Fix type.
[official-gcc.git] / gcc / integrate.c
blob6746b3f450f1000c5e7826cd509f7a409ad3d6a0
1 /* Procedure integration for GCC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "flags.h"
31 #include "debug.h"
32 #include "insn-config.h"
33 #include "expr.h"
34 #include "output.h"
35 #include "recog.h"
36 #include "integrate.h"
37 #include "real.h"
38 #include "except.h"
39 #include "function.h"
40 #include "toplev.h"
41 #include "intl.h"
42 #include "loop.h"
43 #include "params.h"
44 #include "ggc.h"
45 #include "target.h"
46 #include "langhooks.h"
48 /* Similar, but round to the next highest integer that meets the
49 alignment. */
50 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
52 /* Default max number of insns a function can have and still be inline.
53 This is overridden on RISC machines. */
54 #ifndef INTEGRATE_THRESHOLD
55 /* Inlining small functions might save more space then not inlining at
56 all. Assume 1 instruction for the call and 1.5 insns per argument. */
57 #define INTEGRATE_THRESHOLD(DECL) \
58 (optimize_size \
59 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
60 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
61 #endif
64 /* Private type used by {get/has}_func_hard_reg_initial_val. */
65 typedef struct initial_value_pair GTY(()) {
66 rtx hard_reg;
67 rtx pseudo;
68 } initial_value_pair;
69 typedef struct initial_value_struct GTY(()) {
70 int num_entries;
71 int max_entries;
72 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
73 } initial_value_struct;
75 static void setup_initial_hard_reg_value_integration PARAMS ((struct function *, struct inline_remap *));
77 static rtvec initialize_for_inline PARAMS ((tree));
78 static void note_modified_parmregs PARAMS ((rtx, rtx, void *));
79 static void integrate_parm_decls PARAMS ((tree, struct inline_remap *,
80 rtvec));
81 static tree integrate_decl_tree PARAMS ((tree,
82 struct inline_remap *));
83 static void subst_constants PARAMS ((rtx *, rtx,
84 struct inline_remap *, int));
85 static void set_block_origin_self PARAMS ((tree));
86 static void set_block_abstract_flags PARAMS ((tree, int));
87 static void process_reg_param PARAMS ((struct inline_remap *, rtx,
88 rtx));
89 void set_decl_abstract_flags PARAMS ((tree, int));
90 static void mark_stores PARAMS ((rtx, rtx, void *));
91 static void save_parm_insns PARAMS ((rtx, rtx));
92 static void copy_insn_list PARAMS ((rtx, struct inline_remap *,
93 rtx));
94 static void copy_insn_notes PARAMS ((rtx, struct inline_remap *,
95 int));
96 static int compare_blocks PARAMS ((const PTR, const PTR));
97 static int find_block PARAMS ((const PTR, const PTR));
99 /* Used by copy_rtx_and_substitute; this indicates whether the function is
100 called for the purpose of inlining or some other purpose (i.e. loop
101 unrolling). This affects how constant pool references are handled.
102 This variable contains the FUNCTION_DECL for the inlined function. */
103 static struct function *inlining = 0;
105 /* Returns the Ith entry in the label_map contained in MAP. If the
106 Ith entry has not yet been set, return a fresh label. This function
107 performs a lazy initialization of label_map, thereby avoiding huge memory
108 explosions when the label_map gets very large. */
111 get_label_from_map (map, i)
112 struct inline_remap *map;
113 int i;
115 rtx x = map->label_map[i];
117 if (x == NULL_RTX)
118 x = map->label_map[i] = gen_label_rtx ();
120 return x;
123 /* Return false if the function FNDECL cannot be inlined on account of its
124 attributes, true otherwise. */
125 bool
126 function_attribute_inlinable_p (fndecl)
127 tree fndecl;
129 if (targetm.attribute_table)
131 tree a;
133 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
135 tree name = TREE_PURPOSE (a);
136 int i;
138 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
139 if (is_attribute_p (targetm.attribute_table[i].name, name))
140 return (*targetm.function_attribute_inlinable_p) (fndecl);
144 return true;
147 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
148 is safe and reasonable to integrate into other functions.
149 Nonzero means value is a warning msgid with a single %s
150 for the function's name. */
152 const char *
153 function_cannot_inline_p (fndecl)
154 tree fndecl;
156 rtx insn;
157 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
159 /* For functions marked as inline increase the maximum size to
160 MAX_INLINE_INSNS (-finline-limit-<n>). For regular functions
161 use the limit given by INTEGRATE_THRESHOLD. */
163 int max_insns = (DECL_INLINE (fndecl))
164 ? (MAX_INLINE_INSNS
165 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
166 : INTEGRATE_THRESHOLD (fndecl);
168 int ninsns = 0;
169 tree parms;
171 if (DECL_UNINLINABLE (fndecl))
172 return N_("function cannot be inline");
174 /* No inlines with varargs. */
175 if (last && TREE_VALUE (last) != void_type_node)
176 return N_("varargs function cannot be inline");
178 if (current_function_calls_alloca)
179 return N_("function using alloca cannot be inline");
181 if (current_function_calls_setjmp)
182 return N_("function using setjmp cannot be inline");
184 if (current_function_calls_eh_return)
185 return N_("function uses __builtin_eh_return");
187 if (current_function_contains_functions)
188 return N_("function with nested functions cannot be inline");
190 if (forced_labels)
191 return
192 N_("function with label addresses used in initializers cannot inline");
194 if (current_function_cannot_inline)
195 return current_function_cannot_inline;
197 /* If its not even close, don't even look. */
198 if (get_max_uid () > 3 * max_insns)
199 return N_("function too large to be inline");
201 #if 0
202 /* Don't inline functions which do not specify a function prototype and
203 have BLKmode argument or take the address of a parameter. */
204 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
206 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
207 TREE_ADDRESSABLE (parms) = 1;
208 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
209 return N_("no prototype, and parameter address used; cannot be inline");
211 #endif
213 /* We can't inline functions that return structures
214 the old-fashioned PCC way, copying into a static block. */
215 if (current_function_returns_pcc_struct)
216 return N_("inline functions not supported for this return value type");
218 /* We can't inline functions that return structures of varying size. */
219 if (TREE_CODE (TREE_TYPE (TREE_TYPE (fndecl))) != VOID_TYPE
220 && int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
221 return N_("function with varying-size return value cannot be inline");
223 /* Cannot inline a function with a varying size argument or one that
224 receives a transparent union. */
225 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
227 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
228 return N_("function with varying-size parameter cannot be inline");
229 else if (TREE_CODE (TREE_TYPE (parms)) == UNION_TYPE
230 && TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
231 return N_("function with transparent unit parameter cannot be inline");
234 if (get_max_uid () > max_insns)
236 for (ninsns = 0, insn = get_first_nonparm_insn ();
237 insn && ninsns < max_insns;
238 insn = NEXT_INSN (insn))
239 if (INSN_P (insn))
240 ninsns++;
242 if (ninsns >= max_insns)
243 return N_("function too large to be inline");
246 /* We will not inline a function which uses computed goto. The addresses of
247 its local labels, which may be tucked into global storage, are of course
248 not constant across instantiations, which causes unexpected behavior. */
249 if (current_function_has_computed_jump)
250 return N_("function with computed jump cannot inline");
252 /* We cannot inline a nested function that jumps to a nonlocal label. */
253 if (current_function_has_nonlocal_goto)
254 return N_("function with nonlocal goto cannot be inline");
256 /* We can't inline functions that return a PARALLEL rtx. */
257 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
259 rtx result = DECL_RTL (DECL_RESULT (fndecl));
260 if (GET_CODE (result) == PARALLEL)
261 return N_("inline functions not supported for this return value type");
264 /* If the function has a target specific attribute attached to it,
265 then we assume that we should not inline it. This can be overriden
266 by the target if it defines TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P. */
267 if (!function_attribute_inlinable_p (fndecl))
268 return N_("function with target specific attribute(s) cannot be inlined");
270 return NULL;
273 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
274 Zero for a reg that isn't a parm's home.
275 Only reg numbers less than max_parm_reg are mapped here. */
276 static tree *parmdecl_map;
278 /* In save_for_inline, nonzero if past the parm-initialization insns. */
279 static int in_nonparm_insns;
281 /* Subroutine for `save_for_inline'. Performs initialization
282 needed to save FNDECL's insns and info for future inline expansion. */
284 static rtvec
285 initialize_for_inline (fndecl)
286 tree fndecl;
288 int i;
289 rtvec arg_vector;
290 tree parms;
292 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
293 memset ((char *) parmdecl_map, 0, max_parm_reg * sizeof (tree));
294 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
296 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
297 parms;
298 parms = TREE_CHAIN (parms), i++)
300 rtx p = DECL_RTL (parms);
302 /* If we have (mem (addressof (mem ...))), use the inner MEM since
303 otherwise the copy_rtx call below will not unshare the MEM since
304 it shares ADDRESSOF. */
305 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
306 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
307 p = XEXP (XEXP (p, 0), 0);
309 RTVEC_ELT (arg_vector, i) = p;
311 if (GET_CODE (p) == REG)
312 parmdecl_map[REGNO (p)] = parms;
313 else if (GET_CODE (p) == CONCAT)
315 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
316 rtx pimag = gen_imagpart (GET_MODE (preal), p);
318 if (GET_CODE (preal) == REG)
319 parmdecl_map[REGNO (preal)] = parms;
320 if (GET_CODE (pimag) == REG)
321 parmdecl_map[REGNO (pimag)] = parms;
324 /* This flag is cleared later
325 if the function ever modifies the value of the parm. */
326 TREE_READONLY (parms) = 1;
329 return arg_vector;
332 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
333 originally was in the FROM_FN, but now it will be in the
334 TO_FN. */
336 tree
337 copy_decl_for_inlining (decl, from_fn, to_fn)
338 tree decl;
339 tree from_fn;
340 tree to_fn;
342 tree copy;
344 /* Copy the declaration. */
345 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
347 tree type;
348 int invisiref = 0;
350 /* See if the frontend wants to pass this by invisible reference. */
351 if (TREE_CODE (decl) == PARM_DECL
352 && DECL_ARG_TYPE (decl) != TREE_TYPE (decl)
353 && POINTER_TYPE_P (DECL_ARG_TYPE (decl))
354 && TREE_TYPE (DECL_ARG_TYPE (decl)) == TREE_TYPE (decl))
356 invisiref = 1;
357 type = DECL_ARG_TYPE (decl);
359 else
360 type = TREE_TYPE (decl);
362 /* For a parameter, we must make an equivalent VAR_DECL, not a
363 new PARM_DECL. */
364 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
365 if (!invisiref)
367 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
368 TREE_READONLY (copy) = TREE_READONLY (decl);
369 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
371 else
373 TREE_ADDRESSABLE (copy) = 0;
374 TREE_READONLY (copy) = 1;
375 TREE_THIS_VOLATILE (copy) = 0;
378 else
380 copy = copy_node (decl);
381 (*lang_hooks.dup_lang_specific_decl) (copy);
383 /* TREE_ADDRESSABLE isn't used to indicate that a label's
384 address has been taken; it's for internal bookkeeping in
385 expand_goto_internal. */
386 if (TREE_CODE (copy) == LABEL_DECL)
387 TREE_ADDRESSABLE (copy) = 0;
390 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
391 declaration inspired this copy. */
392 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
394 /* The new variable/label has no RTL, yet. */
395 SET_DECL_RTL (copy, NULL_RTX);
397 /* These args would always appear unused, if not for this. */
398 TREE_USED (copy) = 1;
400 /* Set the context for the new declaration. */
401 if (!DECL_CONTEXT (decl))
402 /* Globals stay global. */
404 else if (DECL_CONTEXT (decl) != from_fn)
405 /* Things that weren't in the scope of the function we're inlining
406 from aren't in the scope we're inlining too, either. */
408 else if (TREE_STATIC (decl))
409 /* Function-scoped static variables should say in the original
410 function. */
412 else
413 /* Ordinary automatic local variables are now in the scope of the
414 new function. */
415 DECL_CONTEXT (copy) = to_fn;
417 return copy;
420 /* Make the insns and PARM_DECLs of the current function permanent
421 and record other information in DECL_SAVED_INSNS to allow inlining
422 of this function in subsequent calls.
424 This routine need not copy any insns because we are not going
425 to immediately compile the insns in the insn chain. There
426 are two cases when we would compile the insns for FNDECL:
427 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
428 be output at the end of other compilation, because somebody took
429 its address. In the first case, the insns of FNDECL are copied
430 as it is expanded inline, so FNDECL's saved insns are not
431 modified. In the second case, FNDECL is used for the last time,
432 so modifying the rtl is not a problem.
434 We don't have to worry about FNDECL being inline expanded by
435 other functions which are written at the end of compilation
436 because flag_no_inline is turned on when we begin writing
437 functions at the end of compilation. */
439 void
440 save_for_inline (fndecl)
441 tree fndecl;
443 rtx insn;
444 rtvec argvec;
445 rtx first_nonparm_insn;
447 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
448 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
449 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
450 for the parms, prior to elimination of virtual registers.
451 These values are needed for substituting parms properly. */
452 if (! flag_no_inline)
453 parmdecl_map = (tree *) xmalloc (max_parm_reg * sizeof (tree));
455 /* Make and emit a return-label if we have not already done so. */
457 if (return_label == 0)
459 return_label = gen_label_rtx ();
460 emit_label (return_label);
463 if (! flag_no_inline)
464 argvec = initialize_for_inline (fndecl);
465 else
466 argvec = NULL;
468 /* Delete basic block notes created by early run of find_basic_block.
469 The notes would be later used by find_basic_blocks to reuse the memory
470 for basic_block structures on already freed obstack. */
471 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
472 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK)
473 delete_related_insns (insn);
475 /* If there are insns that copy parms from the stack into pseudo registers,
476 those insns are not copied. `expand_inline_function' must
477 emit the correct code to handle such things. */
479 insn = get_insns ();
480 if (GET_CODE (insn) != NOTE)
481 abort ();
483 if (! flag_no_inline)
485 /* Get the insn which signals the end of parameter setup code. */
486 first_nonparm_insn = get_first_nonparm_insn ();
488 /* Now just scan the chain of insns to see what happens to our
489 PARM_DECLs. If a PARM_DECL is used but never modified, we
490 can substitute its rtl directly when expanding inline (and
491 perform constant folding when its incoming value is
492 constant). Otherwise, we have to copy its value into a new
493 register and track the new register's life. */
494 in_nonparm_insns = 0;
495 save_parm_insns (insn, first_nonparm_insn);
497 cfun->inl_max_label_num = max_label_num ();
498 cfun->inl_last_parm_insn = cfun->x_last_parm_insn;
499 cfun->original_arg_vector = argvec;
501 cfun->original_decl_initial = DECL_INITIAL (fndecl);
502 cfun->no_debugging_symbols = (write_symbols == NO_DEBUG);
503 DECL_SAVED_INSNS (fndecl) = cfun;
505 /* Clean up. */
506 if (! flag_no_inline)
507 free (parmdecl_map);
510 /* Scan the chain of insns to see what happens to our PARM_DECLs. If a
511 PARM_DECL is used but never modified, we can substitute its rtl directly
512 when expanding inline (and perform constant folding when its incoming
513 value is constant). Otherwise, we have to copy its value into a new
514 register and track the new register's life. */
516 static void
517 save_parm_insns (insn, first_nonparm_insn)
518 rtx insn;
519 rtx first_nonparm_insn;
521 if (insn == NULL_RTX)
522 return;
524 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
526 if (insn == first_nonparm_insn)
527 in_nonparm_insns = 1;
529 if (INSN_P (insn))
531 /* Record what interesting things happen to our parameters. */
532 note_stores (PATTERN (insn), note_modified_parmregs, NULL);
534 /* If this is a CALL_PLACEHOLDER insn then we need to look into the
535 three attached sequences: normal call, sibling call and tail
536 recursion. */
537 if (GET_CODE (insn) == CALL_INSN
538 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
540 int i;
542 for (i = 0; i < 3; i++)
543 save_parm_insns (XEXP (PATTERN (insn), i),
544 first_nonparm_insn);
550 /* Note whether a parameter is modified or not. */
552 static void
553 note_modified_parmregs (reg, x, data)
554 rtx reg;
555 rtx x ATTRIBUTE_UNUSED;
556 void *data ATTRIBUTE_UNUSED;
558 if (GET_CODE (reg) == REG && in_nonparm_insns
559 && REGNO (reg) < max_parm_reg
560 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
561 && parmdecl_map[REGNO (reg)] != 0)
562 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
565 /* Unfortunately, we need a global copy of const_equiv map for communication
566 with a function called from note_stores. Be *very* careful that this
567 is used properly in the presence of recursion. */
569 varray_type global_const_equiv_varray;
571 #define FIXED_BASE_PLUS_P(X) \
572 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
573 && GET_CODE (XEXP (X, 0)) == REG \
574 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
575 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
577 /* Called to set up a mapping for the case where a parameter is in a
578 register. If it is read-only and our argument is a constant, set up the
579 constant equivalence.
581 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
582 if it is a register.
584 Also, don't allow hard registers here; they might not be valid when
585 substituted into insns. */
586 static void
587 process_reg_param (map, loc, copy)
588 struct inline_remap *map;
589 rtx loc, copy;
591 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
592 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
593 && ! REG_USERVAR_P (copy))
594 || (GET_CODE (copy) == REG
595 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
597 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
598 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
599 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
600 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
601 copy = temp;
603 map->reg_map[REGNO (loc)] = copy;
606 /* Compare two BLOCKs for qsort. The key we sort on is the
607 BLOCK_ABSTRACT_ORIGIN of the blocks. We cannot just subtract the
608 two pointers, because it may overflow sizeof(int). */
610 static int
611 compare_blocks (v1, v2)
612 const PTR v1;
613 const PTR v2;
615 tree b1 = *((const tree *) v1);
616 tree b2 = *((const tree *) v2);
617 char *p1 = (char *) BLOCK_ABSTRACT_ORIGIN (b1);
618 char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
620 if (p1 == p2)
621 return 0;
622 return p1 < p2 ? -1 : 1;
625 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
626 an original block; the second to a remapped equivalent. */
628 static int
629 find_block (v1, v2)
630 const PTR v1;
631 const PTR v2;
633 const union tree_node *b1 = (const union tree_node *) v1;
634 tree b2 = *((const tree *) v2);
635 char *p1 = (char *) b1;
636 char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
638 if (p1 == p2)
639 return 0;
640 return p1 < p2 ? -1 : 1;
643 /* Integrate the procedure defined by FNDECL. Note that this function
644 may wind up calling itself. Since the static variables are not
645 reentrant, we do not assign them until after the possibility
646 of recursion is eliminated.
648 If IGNORE is nonzero, do not produce a value.
649 Otherwise store the value in TARGET if it is nonzero and that is convenient.
651 Value is:
652 (rtx)-1 if we could not substitute the function
653 0 if we substituted it and it does not produce a value
654 else an rtx for where the value is stored. */
657 expand_inline_function (fndecl, parms, target, ignore, type,
658 structure_value_addr)
659 tree fndecl, parms;
660 rtx target;
661 int ignore;
662 tree type;
663 rtx structure_value_addr;
665 struct function *inlining_previous;
666 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
667 tree formal, actual, block;
668 rtx parm_insns = inl_f->emit->x_first_insn;
669 rtx insns = (inl_f->inl_last_parm_insn
670 ? NEXT_INSN (inl_f->inl_last_parm_insn)
671 : parm_insns);
672 tree *arg_trees;
673 rtx *arg_vals;
674 int max_regno;
675 int i;
676 int min_labelno = inl_f->emit->x_first_label_num;
677 int max_labelno = inl_f->inl_max_label_num;
678 int nargs;
679 rtx loc;
680 rtx stack_save = 0;
681 rtx temp;
682 struct inline_remap *map = 0;
683 rtvec arg_vector = inl_f->original_arg_vector;
684 rtx static_chain_value = 0;
685 int inl_max_uid;
686 int eh_region_offset;
688 /* The pointer used to track the true location of the memory used
689 for MAP->LABEL_MAP. */
690 rtx *real_label_map = 0;
692 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
693 max_regno = inl_f->emit->x_reg_rtx_no + 3;
694 if (max_regno < FIRST_PSEUDO_REGISTER)
695 abort ();
697 /* Pull out the decl for the function definition; fndecl may be a
698 local declaration, which would break DECL_ABSTRACT_ORIGIN. */
699 fndecl = inl_f->decl;
701 nargs = list_length (DECL_ARGUMENTS (fndecl));
703 if (cfun->preferred_stack_boundary < inl_f->preferred_stack_boundary)
704 cfun->preferred_stack_boundary = inl_f->preferred_stack_boundary;
706 /* Check that the parms type match and that sufficient arguments were
707 passed. Since the appropriate conversions or default promotions have
708 already been applied, the machine modes should match exactly. */
710 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
711 formal;
712 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
714 tree arg;
715 enum machine_mode mode;
717 if (actual == 0)
718 return (rtx) (size_t) -1;
720 arg = TREE_VALUE (actual);
721 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
723 if (arg == error_mark_node
724 || mode != TYPE_MODE (TREE_TYPE (arg))
725 /* If they are block mode, the types should match exactly.
726 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
727 which could happen if the parameter has incomplete type. */
728 || (mode == BLKmode
729 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
730 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
731 return (rtx) (size_t) -1;
734 /* Extra arguments are valid, but will be ignored below, so we must
735 evaluate them here for side-effects. */
736 for (; actual; actual = TREE_CHAIN (actual))
737 expand_expr (TREE_VALUE (actual), const0_rtx,
738 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
740 /* Expand the function arguments. Do this first so that any
741 new registers get created before we allocate the maps. */
743 arg_vals = (rtx *) xmalloc (nargs * sizeof (rtx));
744 arg_trees = (tree *) xmalloc (nargs * sizeof (tree));
746 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
747 formal;
748 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
750 /* Actual parameter, converted to the type of the argument within the
751 function. */
752 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
753 /* Mode of the variable used within the function. */
754 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
755 int invisiref = 0;
757 arg_trees[i] = arg;
758 loc = RTVEC_ELT (arg_vector, i);
760 /* If this is an object passed by invisible reference, we copy the
761 object into a stack slot and save its address. If this will go
762 into memory, we do nothing now. Otherwise, we just expand the
763 argument. */
764 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
765 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
767 rtx stack_slot = assign_temp (TREE_TYPE (arg), 1, 1, 1);
769 store_expr (arg, stack_slot, 0);
770 arg_vals[i] = XEXP (stack_slot, 0);
771 invisiref = 1;
773 else if (GET_CODE (loc) != MEM)
775 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
777 int unsignedp = TREE_UNSIGNED (TREE_TYPE (formal));
778 enum machine_mode pmode = TYPE_MODE (TREE_TYPE (formal));
780 pmode = promote_mode (TREE_TYPE (formal), pmode,
781 &unsignedp, 0);
783 if (GET_MODE (loc) != pmode)
784 abort ();
786 /* The mode if LOC and ARG can differ if LOC was a variable
787 that had its mode promoted via PROMOTED_MODE. */
788 arg_vals[i] = convert_modes (pmode,
789 TYPE_MODE (TREE_TYPE (arg)),
790 expand_expr (arg, NULL_RTX, mode,
791 EXPAND_SUM),
792 unsignedp);
794 else
795 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
797 else
798 arg_vals[i] = 0;
800 if (arg_vals[i] != 0
801 && (! TREE_READONLY (formal)
802 /* If the parameter is not read-only, copy our argument through
803 a register. Also, we cannot use ARG_VALS[I] if it overlaps
804 TARGET in any way. In the inline function, they will likely
805 be two different pseudos, and `safe_from_p' will make all
806 sorts of smart assumptions about their not conflicting.
807 But if ARG_VALS[I] overlaps TARGET, these assumptions are
808 wrong, so put ARG_VALS[I] into a fresh register.
809 Don't worry about invisible references, since their stack
810 temps will never overlap the target. */
811 || (target != 0
812 && ! invisiref
813 && (GET_CODE (arg_vals[i]) == REG
814 || GET_CODE (arg_vals[i]) == SUBREG
815 || GET_CODE (arg_vals[i]) == MEM)
816 && reg_overlap_mentioned_p (arg_vals[i], target))
817 /* ??? We must always copy a SUBREG into a REG, because it might
818 get substituted into an address, and not all ports correctly
819 handle SUBREGs in addresses. */
820 || (GET_CODE (arg_vals[i]) == SUBREG)))
821 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
823 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
824 && POINTER_TYPE_P (TREE_TYPE (formal)))
825 mark_reg_pointer (arg_vals[i],
826 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal))));
829 /* Allocate the structures we use to remap things. */
831 map = (struct inline_remap *) xcalloc (1, sizeof (struct inline_remap));
832 map->fndecl = fndecl;
834 VARRAY_TREE_INIT (map->block_map, 10, "block_map");
835 map->reg_map = (rtx *) xcalloc (max_regno, sizeof (rtx));
837 /* We used to use alloca here, but the size of what it would try to
838 allocate would occasionally cause it to exceed the stack limit and
839 cause unpredictable core dumps. */
840 real_label_map
841 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
842 map->label_map = real_label_map;
843 map->local_return_label = NULL_RTX;
845 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
846 map->insn_map = (rtx *) xcalloc (inl_max_uid, sizeof (rtx));
847 map->min_insnno = 0;
848 map->max_insnno = inl_max_uid;
850 map->integrating = 1;
851 map->compare_src = NULL_RTX;
852 map->compare_mode = VOIDmode;
854 /* const_equiv_varray maps pseudos in our routine to constants, so
855 it needs to be large enough for all our pseudos. This is the
856 number we are currently using plus the number in the called
857 routine, plus 15 for each arg, five to compute the virtual frame
858 pointer, and five for the return value. This should be enough
859 for most cases. We do not reference entries outside the range of
860 the map.
862 ??? These numbers are quite arbitrary and were obtained by
863 experimentation. At some point, we should try to allocate the
864 table after all the parameters are set up so we can more accurately
865 estimate the number of pseudos we will need. */
867 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
868 (max_reg_num ()
869 + (max_regno - FIRST_PSEUDO_REGISTER)
870 + 15 * nargs
871 + 10),
872 "expand_inline_function");
873 map->const_age = 0;
875 /* Record the current insn in case we have to set up pointers to frame
876 and argument memory blocks. If there are no insns yet, add a dummy
877 insn that can be used as an insertion point. */
878 map->insns_at_start = get_last_insn ();
879 if (map->insns_at_start == 0)
880 map->insns_at_start = emit_note (NULL, NOTE_INSN_DELETED);
882 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
883 map->x_regno_reg_rtx = inl_f->emit->x_regno_reg_rtx;
885 /* Update the outgoing argument size to allow for those in the inlined
886 function. */
887 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
888 current_function_outgoing_args_size = inl_f->outgoing_args_size;
890 /* If the inline function needs to make PIC references, that means
891 that this function's PIC offset table must be used. */
892 if (inl_f->uses_pic_offset_table)
893 current_function_uses_pic_offset_table = 1;
895 /* If this function needs a context, set it up. */
896 if (inl_f->needs_context)
897 static_chain_value = lookup_static_chain (fndecl);
899 if (GET_CODE (parm_insns) == NOTE
900 && NOTE_LINE_NUMBER (parm_insns) > 0)
902 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
903 NOTE_LINE_NUMBER (parm_insns));
904 if (note)
905 RTX_INTEGRATED_P (note) = 1;
908 /* Process each argument. For each, set up things so that the function's
909 reference to the argument will refer to the argument being passed.
910 We only replace REG with REG here. Any simplifications are done
911 via const_equiv_map.
913 We make two passes: In the first, we deal with parameters that will
914 be placed into registers, since we need to ensure that the allocated
915 register number fits in const_equiv_map. Then we store all non-register
916 parameters into their memory location. */
918 /* Don't try to free temp stack slots here, because we may put one of the
919 parameters into a temp stack slot. */
921 for (i = 0; i < nargs; i++)
923 rtx copy = arg_vals[i];
925 loc = RTVEC_ELT (arg_vector, i);
927 /* There are three cases, each handled separately. */
928 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
929 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
931 /* This must be an object passed by invisible reference (it could
932 also be a variable-sized object, but we forbid inlining functions
933 with variable-sized arguments). COPY is the address of the
934 actual value (this computation will cause it to be copied). We
935 map that address for the register, noting the actual address as
936 an equivalent in case it can be substituted into the insns. */
938 if (GET_CODE (copy) != REG)
940 temp = copy_addr_to_reg (copy);
941 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
942 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
943 copy = temp;
945 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
947 else if (GET_CODE (loc) == MEM)
949 /* This is the case of a parameter that lives in memory. It
950 will live in the block we allocate in the called routine's
951 frame that simulates the incoming argument area. Do nothing
952 with the parameter now; we will call store_expr later. In
953 this case, however, we must ensure that the virtual stack and
954 incoming arg rtx values are expanded now so that we can be
955 sure we have enough slots in the const equiv map since the
956 store_expr call can easily blow the size estimate. */
957 if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
958 copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
960 else if (GET_CODE (loc) == REG)
961 process_reg_param (map, loc, copy);
962 else if (GET_CODE (loc) == CONCAT)
964 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
965 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
966 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
967 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
969 process_reg_param (map, locreal, copyreal);
970 process_reg_param (map, locimag, copyimag);
972 else
973 abort ();
976 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
977 specially. This function can be called recursively, so we need to
978 save the previous value. */
979 inlining_previous = inlining;
980 inlining = inl_f;
982 /* Now do the parameters that will be placed in memory. */
984 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
985 formal; formal = TREE_CHAIN (formal), i++)
987 loc = RTVEC_ELT (arg_vector, i);
989 if (GET_CODE (loc) == MEM
990 /* Exclude case handled above. */
991 && ! (GET_CODE (XEXP (loc, 0)) == REG
992 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
994 rtx note = emit_note (DECL_SOURCE_FILE (formal),
995 DECL_SOURCE_LINE (formal));
996 if (note)
997 RTX_INTEGRATED_P (note) = 1;
999 /* Compute the address in the area we reserved and store the
1000 value there. */
1001 temp = copy_rtx_and_substitute (loc, map, 1);
1002 subst_constants (&temp, NULL_RTX, map, 1);
1003 apply_change_group ();
1004 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1005 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1006 store_expr (arg_trees[i], temp, 0);
1010 /* Deal with the places that the function puts its result.
1011 We are driven by what is placed into DECL_RESULT.
1013 Initially, we assume that we don't have anything special handling for
1014 REG_FUNCTION_RETURN_VALUE_P. */
1016 map->inline_target = 0;
1017 loc = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
1018 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
1020 if (TYPE_MODE (type) == VOIDmode)
1021 /* There is no return value to worry about. */
1023 else if (GET_CODE (loc) == MEM)
1025 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
1027 temp = copy_rtx_and_substitute (loc, map, 1);
1028 subst_constants (&temp, NULL_RTX, map, 1);
1029 apply_change_group ();
1030 target = temp;
1032 else
1034 if (! structure_value_addr
1035 || ! aggregate_value_p (DECL_RESULT (fndecl)))
1036 abort ();
1038 /* Pass the function the address in which to return a structure
1039 value. Note that a constructor can cause someone to call us
1040 with STRUCTURE_VALUE_ADDR, but the initialization takes place
1041 via the first parameter, rather than the struct return address.
1043 We have two cases: If the address is a simple register
1044 indirect, use the mapping mechanism to point that register to
1045 our structure return address. Otherwise, store the structure
1046 return value into the place that it will be referenced from. */
1048 if (GET_CODE (XEXP (loc, 0)) == REG)
1050 temp = force_operand (structure_value_addr, NULL_RTX);
1051 temp = force_reg (Pmode, temp);
1052 /* A virtual register might be invalid in an insn, because
1053 it can cause trouble in reload. Since we don't have access
1054 to the expanders at map translation time, make sure we have
1055 a proper register now.
1056 If a virtual register is actually valid, cse or combine
1057 can put it into the mapped insns. */
1058 if (REGNO (temp) >= FIRST_VIRTUAL_REGISTER
1059 && REGNO (temp) <= LAST_VIRTUAL_REGISTER)
1060 temp = copy_to_mode_reg (Pmode, temp);
1061 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1063 if (CONSTANT_P (structure_value_addr)
1064 || GET_CODE (structure_value_addr) == ADDRESSOF
1065 || (GET_CODE (structure_value_addr) == PLUS
1066 && (XEXP (structure_value_addr, 0)
1067 == virtual_stack_vars_rtx)
1068 && (GET_CODE (XEXP (structure_value_addr, 1))
1069 == CONST_INT)))
1071 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
1072 CONST_AGE_PARM);
1075 else
1077 temp = copy_rtx_and_substitute (loc, map, 1);
1078 subst_constants (&temp, NULL_RTX, map, 0);
1079 apply_change_group ();
1080 emit_move_insn (temp, structure_value_addr);
1084 else if (ignore)
1085 /* We will ignore the result value, so don't look at its structure.
1086 Note that preparations for an aggregate return value
1087 do need to be made (above) even if it will be ignored. */
1089 else if (GET_CODE (loc) == REG)
1091 /* The function returns an object in a register and we use the return
1092 value. Set up our target for remapping. */
1094 /* Machine mode function was declared to return. */
1095 enum machine_mode departing_mode = TYPE_MODE (type);
1096 /* (Possibly wider) machine mode it actually computes
1097 (for the sake of callers that fail to declare it right).
1098 We have to use the mode of the result's RTL, rather than
1099 its type, since expand_function_start may have promoted it. */
1100 enum machine_mode arriving_mode
1101 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1102 rtx reg_to_map;
1104 /* Don't use MEMs as direct targets because on some machines
1105 substituting a MEM for a REG makes invalid insns.
1106 Let the combiner substitute the MEM if that is valid. */
1107 if (target == 0 || GET_CODE (target) != REG
1108 || GET_MODE (target) != departing_mode)
1110 /* Don't make BLKmode registers. If this looks like
1111 a BLKmode object being returned in a register, get
1112 the mode from that, otherwise abort. */
1113 if (departing_mode == BLKmode)
1115 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1117 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1118 arriving_mode = departing_mode;
1120 else
1121 abort ();
1124 target = gen_reg_rtx (departing_mode);
1127 /* If function's value was promoted before return,
1128 avoid machine mode mismatch when we substitute INLINE_TARGET.
1129 But TARGET is what we will return to the caller. */
1130 if (arriving_mode != departing_mode)
1132 /* Avoid creating a paradoxical subreg wider than
1133 BITS_PER_WORD, since that is illegal. */
1134 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1136 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1137 GET_MODE_BITSIZE (arriving_mode)))
1138 /* Maybe could be handled by using convert_move () ? */
1139 abort ();
1140 reg_to_map = gen_reg_rtx (arriving_mode);
1141 target = gen_lowpart (departing_mode, reg_to_map);
1143 else
1144 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1146 else
1147 reg_to_map = target;
1149 /* Usually, the result value is the machine's return register.
1150 Sometimes it may be a pseudo. Handle both cases. */
1151 if (REG_FUNCTION_VALUE_P (loc))
1152 map->inline_target = reg_to_map;
1153 else
1154 map->reg_map[REGNO (loc)] = reg_to_map;
1156 else if (GET_CODE (loc) == CONCAT)
1158 enum machine_mode departing_mode = TYPE_MODE (type);
1159 enum machine_mode arriving_mode
1160 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1162 if (departing_mode != arriving_mode)
1163 abort ();
1164 if (GET_CODE (XEXP (loc, 0)) != REG
1165 || GET_CODE (XEXP (loc, 1)) != REG)
1166 abort ();
1168 /* Don't use MEMs as direct targets because on some machines
1169 substituting a MEM for a REG makes invalid insns.
1170 Let the combiner substitute the MEM if that is valid. */
1171 if (target == 0 || GET_CODE (target) != REG
1172 || GET_MODE (target) != departing_mode)
1173 target = gen_reg_rtx (departing_mode);
1175 if (GET_CODE (target) != CONCAT)
1176 abort ();
1178 map->reg_map[REGNO (XEXP (loc, 0))] = XEXP (target, 0);
1179 map->reg_map[REGNO (XEXP (loc, 1))] = XEXP (target, 1);
1181 else
1182 abort ();
1184 /* Remap the exception handler data pointer from one to the other. */
1185 temp = get_exception_pointer (inl_f);
1186 if (temp)
1187 map->reg_map[REGNO (temp)] = get_exception_pointer (cfun);
1189 /* Initialize label_map. get_label_from_map will actually make
1190 the labels. */
1191 memset ((char *) &map->label_map[min_labelno], 0,
1192 (max_labelno - min_labelno) * sizeof (rtx));
1194 /* Make copies of the decls of the symbols in the inline function, so that
1195 the copies of the variables get declared in the current function. Set
1196 up things so that lookup_static_chain knows that to interpret registers
1197 in SAVE_EXPRs for TYPE_SIZEs as local. */
1198 inline_function_decl = fndecl;
1199 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1200 block = integrate_decl_tree (inl_f->original_decl_initial, map);
1201 BLOCK_ABSTRACT_ORIGIN (block) = DECL_ORIGIN (fndecl);
1202 inline_function_decl = 0;
1204 /* Make a fresh binding contour that we can easily remove. Do this after
1205 expanding our arguments so cleanups are properly scoped. */
1206 expand_start_bindings_and_block (0, block);
1208 /* Sort the block-map so that it will be easy to find remapped
1209 blocks later. */
1210 qsort (&VARRAY_TREE (map->block_map, 0),
1211 map->block_map->elements_used,
1212 sizeof (tree),
1213 compare_blocks);
1215 /* Perform postincrements before actually calling the function. */
1216 emit_queue ();
1218 /* Clean up stack so that variables might have smaller offsets. */
1219 do_pending_stack_adjust ();
1221 /* Save a copy of the location of const_equiv_varray for
1222 mark_stores, called via note_stores. */
1223 global_const_equiv_varray = map->const_equiv_varray;
1225 /* If the called function does an alloca, save and restore the
1226 stack pointer around the call. This saves stack space, but
1227 also is required if this inline is being done between two
1228 pushes. */
1229 if (inl_f->calls_alloca)
1230 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1232 /* Map pseudos used for initial hard reg values. */
1233 setup_initial_hard_reg_value_integration (inl_f, map);
1235 /* Now copy the insns one by one. */
1236 copy_insn_list (insns, map, static_chain_value);
1238 /* Duplicate the EH regions. This will create an offset from the
1239 region numbers in the function we're inlining to the region
1240 numbers in the calling function. This must wait until after
1241 copy_insn_list, as we need the insn map to be complete. */
1242 eh_region_offset = duplicate_eh_regions (inl_f, map);
1244 /* Now copy the REG_NOTES for those insns. */
1245 copy_insn_notes (insns, map, eh_region_offset);
1247 /* If the insn sequence required one, emit the return label. */
1248 if (map->local_return_label)
1249 emit_label (map->local_return_label);
1251 /* Restore the stack pointer if we saved it above. */
1252 if (inl_f->calls_alloca)
1253 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1255 if (! cfun->x_whole_function_mode_p)
1256 /* In statement-at-a-time mode, we just tell the front-end to add
1257 this block to the list of blocks at this binding level. We
1258 can't do it the way it's done for function-at-a-time mode the
1259 superblocks have not been created yet. */
1260 (*lang_hooks.decls.insert_block) (block);
1261 else
1263 BLOCK_CHAIN (block)
1264 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1265 BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
1268 /* End the scope containing the copied formal parameter variables
1269 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1270 here so that expand_end_bindings will not check for unused
1271 variables. That's already been checked for when the inlined
1272 function was defined. */
1273 expand_end_bindings (NULL_TREE, 1, 1);
1275 /* Must mark the line number note after inlined functions as a repeat, so
1276 that the test coverage code can avoid counting the call twice. This
1277 just tells the code to ignore the immediately following line note, since
1278 there already exists a copy of this note before the expanded inline call.
1279 This line number note is still needed for debugging though, so we can't
1280 delete it. */
1281 if (flag_test_coverage)
1282 emit_note (0, NOTE_INSN_REPEATED_LINE_NUMBER);
1284 emit_line_note (input_filename, lineno);
1286 /* If the function returns a BLKmode object in a register, copy it
1287 out of the temp register into a BLKmode memory object. */
1288 if (target
1289 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1290 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1291 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1293 if (structure_value_addr)
1295 target = gen_rtx_MEM (TYPE_MODE (type),
1296 memory_address (TYPE_MODE (type),
1297 structure_value_addr));
1298 set_mem_attributes (target, type, 1);
1301 /* Make sure we free the things we explicitly allocated with xmalloc. */
1302 if (real_label_map)
1303 free (real_label_map);
1304 VARRAY_FREE (map->const_equiv_varray);
1305 free (map->reg_map);
1306 free (map->insn_map);
1307 free (map);
1308 free (arg_vals);
1309 free (arg_trees);
1311 inlining = inlining_previous;
1313 return target;
1316 /* Make copies of each insn in the given list using the mapping
1317 computed in expand_inline_function. This function may call itself for
1318 insns containing sequences.
1320 Copying is done in two passes, first the insns and then their REG_NOTES.
1322 If static_chain_value is nonzero, it represents the context-pointer
1323 register for the function. */
1325 static void
1326 copy_insn_list (insns, map, static_chain_value)
1327 rtx insns;
1328 struct inline_remap *map;
1329 rtx static_chain_value;
1331 int i;
1332 rtx insn;
1333 rtx temp;
1334 #ifdef HAVE_cc0
1335 rtx cc0_insn = 0;
1336 #endif
1337 rtx static_chain_mem = 0;
1339 /* Copy the insns one by one. Do this in two passes, first the insns and
1340 then their REG_NOTES. */
1342 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1344 for (insn = insns; insn; insn = NEXT_INSN (insn))
1346 rtx copy, pattern, set;
1348 map->orig_asm_operands_vector = 0;
1350 switch (GET_CODE (insn))
1352 case INSN:
1353 pattern = PATTERN (insn);
1354 set = single_set (insn);
1355 copy = 0;
1356 if (GET_CODE (pattern) == USE
1357 && GET_CODE (XEXP (pattern, 0)) == REG
1358 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1359 /* The (USE (REG n)) at return from the function should
1360 be ignored since we are changing (REG n) into
1361 inline_target. */
1362 break;
1364 /* Ignore setting a function value that we don't want to use. */
1365 if (map->inline_target == 0
1366 && set != 0
1367 && GET_CODE (SET_DEST (set)) == REG
1368 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1370 if (volatile_refs_p (SET_SRC (set)))
1372 rtx new_set;
1374 /* If we must not delete the source,
1375 load it into a new temporary. */
1376 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1378 new_set = single_set (copy);
1379 if (new_set == 0)
1380 abort ();
1382 SET_DEST (new_set)
1383 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1385 /* If the source and destination are the same and it
1386 has a note on it, keep the insn. */
1387 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1388 && REG_NOTES (insn) != 0)
1389 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1390 else
1391 break;
1394 /* Similarly if an ignored return value is clobbered. */
1395 else if (map->inline_target == 0
1396 && GET_CODE (pattern) == CLOBBER
1397 && GET_CODE (XEXP (pattern, 0)) == REG
1398 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1399 break;
1401 /* Look for the address of the static chain slot. The
1402 rtx_equal_p comparisons against the
1403 static_chain_incoming_rtx below may fail if the static
1404 chain is in memory and the address specified is not
1405 "legitimate". This happens on Xtensa where the static
1406 chain is at a negative offset from argp and where only
1407 positive offsets are legitimate. When the RTL is
1408 generated, the address is "legitimized" by copying it
1409 into a register, causing the rtx_equal_p comparisons to
1410 fail. This workaround looks for code that sets a
1411 register to the address of the static chain. Subsequent
1412 memory references via that register can then be
1413 identified as static chain references. We assume that
1414 the register is only assigned once, and that the static
1415 chain address is only live in one register at a time. */
1417 else if (static_chain_value != 0
1418 && set != 0
1419 && GET_CODE (static_chain_incoming_rtx) == MEM
1420 && GET_CODE (SET_DEST (set)) == REG
1421 && rtx_equal_p (SET_SRC (set),
1422 XEXP (static_chain_incoming_rtx, 0)))
1424 static_chain_mem =
1425 gen_rtx_MEM (GET_MODE (static_chain_incoming_rtx),
1426 SET_DEST (set));
1428 /* emit the instruction in case it is used for something
1429 other than setting the static chain; if it's not used,
1430 it can always be removed as dead code */
1431 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1434 /* If this is setting the static chain rtx, omit it. */
1435 else if (static_chain_value != 0
1436 && set != 0
1437 && (rtx_equal_p (SET_DEST (set),
1438 static_chain_incoming_rtx)
1439 || (static_chain_mem
1440 && rtx_equal_p (SET_DEST (set), static_chain_mem))))
1441 break;
1443 /* If this is setting the static chain pseudo, set it from
1444 the value we want to give it instead. */
1445 else if (static_chain_value != 0
1446 && set != 0
1447 && (rtx_equal_p (SET_SRC (set),
1448 static_chain_incoming_rtx)
1449 || (static_chain_mem
1450 && rtx_equal_p (SET_SRC (set), static_chain_mem))))
1452 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
1454 copy = emit_move_insn (newdest, static_chain_value);
1455 if (GET_CODE (static_chain_incoming_rtx) != MEM)
1456 static_chain_value = 0;
1459 /* If this is setting the virtual stack vars register, this must
1460 be the code at the handler for a builtin longjmp. The value
1461 saved in the setjmp buffer will be the address of the frame
1462 we've made for this inlined instance within our frame. But we
1463 know the offset of that value so we can use it to reconstruct
1464 our virtual stack vars register from that value. If we are
1465 copying it from the stack pointer, leave it unchanged. */
1466 else if (set != 0
1467 && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1469 HOST_WIDE_INT offset;
1470 temp = map->reg_map[REGNO (SET_DEST (set))];
1471 temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1472 REGNO (temp)).rtx;
1474 if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1475 offset = 0;
1476 else if (GET_CODE (temp) == PLUS
1477 && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1478 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1479 offset = INTVAL (XEXP (temp, 1));
1480 else
1481 abort ();
1483 if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1484 temp = SET_SRC (set);
1485 else
1486 temp = force_operand (plus_constant (SET_SRC (set),
1487 - offset),
1488 NULL_RTX);
1490 copy = emit_move_insn (virtual_stack_vars_rtx, temp);
1493 else
1494 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1495 /* REG_NOTES will be copied later. */
1497 #ifdef HAVE_cc0
1498 /* If this insn is setting CC0, it may need to look at
1499 the insn that uses CC0 to see what type of insn it is.
1500 In that case, the call to recog via validate_change will
1501 fail. So don't substitute constants here. Instead,
1502 do it when we emit the following insn.
1504 For example, see the pyr.md file. That machine has signed and
1505 unsigned compares. The compare patterns must check the
1506 following branch insn to see which what kind of compare to
1507 emit.
1509 If the previous insn set CC0, substitute constants on it as
1510 well. */
1511 if (sets_cc0_p (PATTERN (copy)) != 0)
1512 cc0_insn = copy;
1513 else
1515 if (cc0_insn)
1516 try_constants (cc0_insn, map);
1517 cc0_insn = 0;
1518 try_constants (copy, map);
1520 #else
1521 try_constants (copy, map);
1522 #endif
1523 INSN_SCOPE (copy) = INSN_SCOPE (insn);
1524 break;
1526 case JUMP_INSN:
1527 if (map->integrating && returnjump_p (insn))
1529 if (map->local_return_label == 0)
1530 map->local_return_label = gen_label_rtx ();
1531 pattern = gen_jump (map->local_return_label);
1533 else
1534 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1536 copy = emit_jump_insn (pattern);
1538 #ifdef HAVE_cc0
1539 if (cc0_insn)
1540 try_constants (cc0_insn, map);
1541 cc0_insn = 0;
1542 #endif
1543 try_constants (copy, map);
1544 INSN_SCOPE (copy) = INSN_SCOPE (insn);
1546 /* If this used to be a conditional jump insn but whose branch
1547 direction is now know, we must do something special. */
1548 if (any_condjump_p (insn) && onlyjump_p (insn) && map->last_pc_value)
1550 #ifdef HAVE_cc0
1551 /* If the previous insn set cc0 for us, delete it. */
1552 if (only_sets_cc0_p (PREV_INSN (copy)))
1553 delete_related_insns (PREV_INSN (copy));
1554 #endif
1556 /* If this is now a no-op, delete it. */
1557 if (map->last_pc_value == pc_rtx)
1559 delete_related_insns (copy);
1560 copy = 0;
1562 else
1563 /* Otherwise, this is unconditional jump so we must put a
1564 BARRIER after it. We could do some dead code elimination
1565 here, but jump.c will do it just as well. */
1566 emit_barrier ();
1568 break;
1570 case CALL_INSN:
1571 /* If this is a CALL_PLACEHOLDER insn then we need to copy the
1572 three attached sequences: normal call, sibling call and tail
1573 recursion. */
1574 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1576 rtx sequence[3];
1577 rtx tail_label;
1579 for (i = 0; i < 3; i++)
1581 rtx seq;
1583 sequence[i] = NULL_RTX;
1584 seq = XEXP (PATTERN (insn), i);
1585 if (seq)
1587 start_sequence ();
1588 copy_insn_list (seq, map, static_chain_value);
1589 sequence[i] = get_insns ();
1590 end_sequence ();
1594 /* Find the new tail recursion label.
1595 It will already be substituted into sequence[2]. */
1596 tail_label = copy_rtx_and_substitute (XEXP (PATTERN (insn), 3),
1597 map, 0);
1599 copy = emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode,
1600 sequence[0],
1601 sequence[1],
1602 sequence[2],
1603 tail_label));
1604 break;
1607 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1608 copy = emit_call_insn (pattern);
1610 SIBLING_CALL_P (copy) = SIBLING_CALL_P (insn);
1611 CONST_OR_PURE_CALL_P (copy) = CONST_OR_PURE_CALL_P (insn);
1612 INSN_SCOPE (copy) = INSN_SCOPE (insn);
1614 /* Because the USAGE information potentially contains objects other
1615 than hard registers, we need to copy it. */
1617 CALL_INSN_FUNCTION_USAGE (copy)
1618 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1619 map, 0);
1621 #ifdef HAVE_cc0
1622 if (cc0_insn)
1623 try_constants (cc0_insn, map);
1624 cc0_insn = 0;
1625 #endif
1626 try_constants (copy, map);
1628 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1629 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1630 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1631 break;
1633 case CODE_LABEL:
1634 copy = emit_label (get_label_from_map (map,
1635 CODE_LABEL_NUMBER (insn)));
1636 LABEL_NAME (copy) = LABEL_NAME (insn);
1637 map->const_age++;
1638 break;
1640 case BARRIER:
1641 copy = emit_barrier ();
1642 break;
1644 case NOTE:
1645 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)
1647 copy = emit_label (get_label_from_map (map,
1648 CODE_LABEL_NUMBER (insn)));
1649 LABEL_NAME (copy) = NOTE_SOURCE_FILE (insn);
1650 map->const_age++;
1651 break;
1654 /* NOTE_INSN_FUNCTION_END and NOTE_INSN_FUNCTION_BEG are
1655 discarded because it is important to have only one of
1656 each in the current function.
1658 NOTE_INSN_DELETED notes aren't useful. */
1660 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1661 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1662 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1664 copy = emit_note (NOTE_SOURCE_FILE (insn),
1665 NOTE_LINE_NUMBER (insn));
1666 if (copy
1667 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
1668 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
1669 && NOTE_BLOCK (insn))
1671 tree *mapped_block_p;
1673 mapped_block_p
1674 = (tree *) bsearch (NOTE_BLOCK (insn),
1675 &VARRAY_TREE (map->block_map, 0),
1676 map->block_map->elements_used,
1677 sizeof (tree),
1678 find_block);
1680 if (!mapped_block_p)
1681 abort ();
1682 else
1683 NOTE_BLOCK (copy) = *mapped_block_p;
1685 else if (copy
1686 && NOTE_LINE_NUMBER (copy) == NOTE_INSN_EXPECTED_VALUE)
1687 NOTE_EXPECTED_VALUE (copy)
1688 = copy_rtx_and_substitute (NOTE_EXPECTED_VALUE (insn),
1689 map, 0);
1691 else
1692 copy = 0;
1693 break;
1695 default:
1696 abort ();
1699 if (copy)
1700 RTX_INTEGRATED_P (copy) = 1;
1702 map->insn_map[INSN_UID (insn)] = copy;
1706 /* Copy the REG_NOTES. Increment const_age, so that only constants
1707 from parameters can be substituted in. These are the only ones
1708 that are valid across the entire function. */
1710 static void
1711 copy_insn_notes (insns, map, eh_region_offset)
1712 rtx insns;
1713 struct inline_remap *map;
1714 int eh_region_offset;
1716 rtx insn, new_insn;
1718 map->const_age++;
1719 for (insn = insns; insn; insn = NEXT_INSN (insn))
1721 if (! INSN_P (insn))
1722 continue;
1724 new_insn = map->insn_map[INSN_UID (insn)];
1725 if (! new_insn)
1726 continue;
1728 if (REG_NOTES (insn))
1730 rtx next, note = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
1732 /* We must also do subst_constants, in case one of our parameters
1733 has const type and constant value. */
1734 subst_constants (&note, NULL_RTX, map, 0);
1735 apply_change_group ();
1736 REG_NOTES (new_insn) = note;
1738 /* Delete any REG_LABEL notes from the chain. Remap any
1739 REG_EH_REGION notes. */
1740 for (; note; note = next)
1742 next = XEXP (note, 1);
1743 if (REG_NOTE_KIND (note) == REG_LABEL)
1744 remove_note (new_insn, note);
1745 else if (REG_NOTE_KIND (note) == REG_EH_REGION
1746 && INTVAL (XEXP (note, 0)) > 0)
1747 XEXP (note, 0) = GEN_INT (INTVAL (XEXP (note, 0))
1748 + eh_region_offset);
1752 if (GET_CODE (insn) == CALL_INSN
1753 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1755 int i;
1756 for (i = 0; i < 3; i++)
1757 copy_insn_notes (XEXP (PATTERN (insn), i), map, eh_region_offset);
1760 if (GET_CODE (insn) == JUMP_INSN
1761 && GET_CODE (PATTERN (insn)) == RESX)
1762 XINT (PATTERN (new_insn), 0) += eh_region_offset;
1766 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1767 push all of those decls and give each one the corresponding home. */
1769 static void
1770 integrate_parm_decls (args, map, arg_vector)
1771 tree args;
1772 struct inline_remap *map;
1773 rtvec arg_vector;
1775 tree tail;
1776 int i;
1778 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1780 tree decl = copy_decl_for_inlining (tail, map->fndecl,
1781 current_function_decl);
1782 rtx new_decl_rtl
1783 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
1785 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1786 here, but that's going to require some more work. */
1787 /* DECL_INCOMING_RTL (decl) = ?; */
1788 /* Fully instantiate the address with the equivalent form so that the
1789 debugging information contains the actual register, instead of the
1790 virtual register. Do this by not passing an insn to
1791 subst_constants. */
1792 subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
1793 apply_change_group ();
1794 SET_DECL_RTL (decl, new_decl_rtl);
1798 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1799 current function a tree of contexts isomorphic to the one that is given.
1801 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1802 registers used in the DECL_RTL field should be remapped. If it is zero,
1803 no mapping is necessary. */
1805 static tree
1806 integrate_decl_tree (let, map)
1807 tree let;
1808 struct inline_remap *map;
1810 tree t;
1811 tree new_block;
1812 tree *next;
1814 new_block = make_node (BLOCK);
1815 VARRAY_PUSH_TREE (map->block_map, new_block);
1816 next = &BLOCK_VARS (new_block);
1818 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1820 tree d;
1822 d = copy_decl_for_inlining (t, map->fndecl, current_function_decl);
1824 if (DECL_RTL_SET_P (t))
1826 rtx r;
1828 SET_DECL_RTL (d, copy_rtx_and_substitute (DECL_RTL (t), map, 1));
1830 /* Fully instantiate the address with the equivalent form so that the
1831 debugging information contains the actual register, instead of the
1832 virtual register. Do this by not passing an insn to
1833 subst_constants. */
1834 r = DECL_RTL (d);
1835 subst_constants (&r, NULL_RTX, map, 1);
1836 SET_DECL_RTL (d, r);
1838 if (GET_CODE (r) == REG)
1839 REGNO_DECL (REGNO (r)) = d;
1840 else if (GET_CODE (r) == CONCAT)
1842 REGNO_DECL (REGNO (XEXP (r, 0))) = d;
1843 REGNO_DECL (REGNO (XEXP (r, 1))) = d;
1846 apply_change_group ();
1849 /* Add this declaration to the list of variables in the new
1850 block. */
1851 *next = d;
1852 next = &TREE_CHAIN (d);
1855 next = &BLOCK_SUBBLOCKS (new_block);
1856 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1858 *next = integrate_decl_tree (t, map);
1859 BLOCK_SUPERCONTEXT (*next) = new_block;
1860 next = &BLOCK_CHAIN (*next);
1863 TREE_USED (new_block) = TREE_USED (let);
1864 BLOCK_ABSTRACT_ORIGIN (new_block) = let;
1866 return new_block;
1869 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1870 except for those few rtx codes that are sharable.
1872 We always return an rtx that is similar to that incoming rtx, with the
1873 exception of possibly changing a REG to a SUBREG or vice versa. No
1874 rtl is ever emitted.
1876 If FOR_LHS is nonzero, if means we are processing something that will
1877 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1878 inlining since we need to be conservative in how it is set for
1879 such cases.
1881 Handle constants that need to be placed in the constant pool by
1882 calling `force_const_mem'. */
1885 copy_rtx_and_substitute (orig, map, for_lhs)
1886 rtx orig;
1887 struct inline_remap *map;
1888 int for_lhs;
1890 rtx copy, temp;
1891 int i, j;
1892 RTX_CODE code;
1893 enum machine_mode mode;
1894 const char *format_ptr;
1895 int regno;
1897 if (orig == 0)
1898 return 0;
1900 code = GET_CODE (orig);
1901 mode = GET_MODE (orig);
1903 switch (code)
1905 case REG:
1906 /* If the stack pointer register shows up, it must be part of
1907 stack-adjustments (*not* because we eliminated the frame pointer!).
1908 Small hard registers are returned as-is. Pseudo-registers
1909 go through their `reg_map'. */
1910 regno = REGNO (orig);
1911 if (regno <= LAST_VIRTUAL_REGISTER
1912 || (map->integrating
1913 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1915 /* Some hard registers are also mapped,
1916 but others are not translated. */
1917 if (map->reg_map[regno] != 0)
1918 return map->reg_map[regno];
1920 /* If this is the virtual frame pointer, make space in current
1921 function's stack frame for the stack frame of the inline function.
1923 Copy the address of this area into a pseudo. Map
1924 virtual_stack_vars_rtx to this pseudo and set up a constant
1925 equivalence for it to be the address. This will substitute the
1926 address into insns where it can be substituted and use the new
1927 pseudo where it can't. */
1928 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
1930 rtx loc, seq;
1931 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1932 #ifdef FRAME_GROWS_DOWNWARD
1933 int alignment
1934 = (DECL_SAVED_INSNS (map->fndecl)->stack_alignment_needed
1935 / BITS_PER_UNIT);
1937 /* In this case, virtual_stack_vars_rtx points to one byte
1938 higher than the top of the frame area. So make sure we
1939 allocate a big enough chunk to keep the frame pointer
1940 aligned like a real one. */
1941 if (alignment)
1942 size = CEIL_ROUND (size, alignment);
1943 #endif
1944 start_sequence ();
1945 loc = assign_stack_temp (BLKmode, size, 1);
1946 loc = XEXP (loc, 0);
1947 #ifdef FRAME_GROWS_DOWNWARD
1948 /* In this case, virtual_stack_vars_rtx points to one byte
1949 higher than the top of the frame area. So compute the offset
1950 to one byte higher than our substitute frame. */
1951 loc = plus_constant (loc, size);
1952 #endif
1953 map->reg_map[regno] = temp
1954 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1956 #ifdef STACK_BOUNDARY
1957 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1958 #endif
1960 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1962 seq = get_insns ();
1963 end_sequence ();
1964 emit_insn_after (seq, map->insns_at_start);
1965 return temp;
1967 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1968 || (map->integrating
1969 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1970 == orig)))
1972 /* Do the same for a block to contain any arguments referenced
1973 in memory. */
1974 rtx loc, seq;
1975 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1977 start_sequence ();
1978 loc = assign_stack_temp (BLKmode, size, 1);
1979 loc = XEXP (loc, 0);
1980 /* When arguments grow downward, the virtual incoming
1981 args pointer points to the top of the argument block,
1982 so the remapped location better do the same. */
1983 #ifdef ARGS_GROW_DOWNWARD
1984 loc = plus_constant (loc, size);
1985 #endif
1986 map->reg_map[regno] = temp
1987 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1989 #ifdef STACK_BOUNDARY
1990 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1991 #endif
1993 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1995 seq = get_insns ();
1996 end_sequence ();
1997 emit_insn_after (seq, map->insns_at_start);
1998 return temp;
2000 else if (REG_FUNCTION_VALUE_P (orig))
2002 /* This is a reference to the function return value. If
2003 the function doesn't have a return value, error. If the
2004 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
2005 if (map->inline_target == 0)
2007 if (rtx_equal_function_value_matters)
2008 /* This is an ignored return value. We must not
2009 leave it in with REG_FUNCTION_VALUE_P set, since
2010 that would confuse subsequent inlining of the
2011 current function into a later function. */
2012 return gen_rtx_REG (GET_MODE (orig), regno);
2013 else
2014 /* Must be unrolling loops or replicating code if we
2015 reach here, so return the register unchanged. */
2016 return orig;
2018 else if (GET_MODE (map->inline_target) != BLKmode
2019 && mode != GET_MODE (map->inline_target))
2020 return gen_lowpart (mode, map->inline_target);
2021 else
2022 return map->inline_target;
2024 #if defined (LEAF_REGISTERS) && defined (LEAF_REG_REMAP)
2025 /* If leaf_renumber_regs_insn() might remap this register to
2026 some other number, make sure we don't share it with the
2027 inlined function, otherwise delayed optimization of the
2028 inlined function may change it in place, breaking our
2029 reference to it. We may still shared it within the
2030 function, so create an entry for this register in the
2031 reg_map. */
2032 if (map->integrating && regno < FIRST_PSEUDO_REGISTER
2033 && LEAF_REGISTERS[regno] && LEAF_REG_REMAP (regno) != regno)
2035 if (!map->leaf_reg_map[regno][mode])
2036 map->leaf_reg_map[regno][mode] = gen_rtx_REG (mode, regno);
2037 return map->leaf_reg_map[regno][mode];
2039 #endif
2040 else
2041 return orig;
2043 abort ();
2045 if (map->reg_map[regno] == NULL)
2047 map->reg_map[regno] = gen_reg_rtx (mode);
2048 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2049 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2050 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2051 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2053 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
2054 mark_reg_pointer (map->reg_map[regno],
2055 map->regno_pointer_align[regno]);
2057 return map->reg_map[regno];
2059 case SUBREG:
2060 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
2061 return simplify_gen_subreg (GET_MODE (orig), copy,
2062 GET_MODE (SUBREG_REG (orig)),
2063 SUBREG_BYTE (orig));
2065 case ADDRESSOF:
2066 copy = gen_rtx_ADDRESSOF (mode,
2067 copy_rtx_and_substitute (XEXP (orig, 0),
2068 map, for_lhs),
2069 0, ADDRESSOF_DECL (orig));
2070 regno = ADDRESSOF_REGNO (orig);
2071 if (map->reg_map[regno])
2072 regno = REGNO (map->reg_map[regno]);
2073 else if (regno > LAST_VIRTUAL_REGISTER)
2075 temp = XEXP (orig, 0);
2076 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2077 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2078 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2079 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2080 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2082 /* Objects may initially be represented as registers, but
2083 but turned into a MEM if their address is taken by
2084 put_var_into_stack. Therefore, the register table may have
2085 entries which are MEMs.
2087 We briefly tried to clear such entries, but that ended up
2088 cascading into many changes due to the optimizers not being
2089 prepared for empty entries in the register table. So we've
2090 decided to allow the MEMs in the register table for now. */
2091 if (REG_P (map->x_regno_reg_rtx[regno])
2092 && REG_POINTER (map->x_regno_reg_rtx[regno]))
2093 mark_reg_pointer (map->reg_map[regno],
2094 map->regno_pointer_align[regno]);
2095 regno = REGNO (map->reg_map[regno]);
2097 ADDRESSOF_REGNO (copy) = regno;
2098 return copy;
2100 case USE:
2101 case CLOBBER:
2102 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2103 to (use foo) if the original insn didn't have a subreg.
2104 Removing the subreg distorts the VAX movstrhi pattern
2105 by changing the mode of an operand. */
2106 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
2107 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2108 copy = SUBREG_REG (copy);
2109 return gen_rtx_fmt_e (code, VOIDmode, copy);
2111 /* We need to handle "deleted" labels that appear in the DECL_RTL
2112 of a LABEL_DECL. */
2113 case NOTE:
2114 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
2115 break;
2117 /* ... FALLTHRU ... */
2118 case CODE_LABEL:
2119 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
2120 = LABEL_PRESERVE_P (orig);
2121 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
2123 case LABEL_REF:
2124 copy
2125 = gen_rtx_LABEL_REF
2126 (mode,
2127 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2128 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
2130 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2132 /* The fact that this label was previously nonlocal does not mean
2133 it still is, so we must check if it is within the range of
2134 this function's labels. */
2135 LABEL_REF_NONLOCAL_P (copy)
2136 = (LABEL_REF_NONLOCAL_P (orig)
2137 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2138 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2140 /* If we have made a nonlocal label local, it means that this
2141 inlined call will be referring to our nonlocal goto handler.
2142 So make sure we create one for this block; we normally would
2143 not since this is not otherwise considered a "call". */
2144 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2145 function_call_count++;
2147 return copy;
2149 case PC:
2150 case CC0:
2151 case CONST_INT:
2152 case CONST_VECTOR:
2153 return orig;
2155 case SYMBOL_REF:
2156 /* Symbols which represent the address of a label stored in the constant
2157 pool must be modified to point to a constant pool entry for the
2158 remapped label. Otherwise, symbols are returned unchanged. */
2159 if (CONSTANT_POOL_ADDRESS_P (orig))
2161 struct function *f = inlining ? inlining : cfun;
2162 rtx constant = get_pool_constant_for_function (f, orig);
2163 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
2164 if (inlining)
2166 rtx temp = force_const_mem (const_mode,
2167 copy_rtx_and_substitute (constant,
2168 map, 0));
2170 #if 0
2171 /* Legitimizing the address here is incorrect.
2173 Since we had a SYMBOL_REF before, we can assume it is valid
2174 to have one in this position in the insn.
2176 Also, change_address may create new registers. These
2177 registers will not have valid reg_map entries. This can
2178 cause try_constants() to fail because assumes that all
2179 registers in the rtx have valid reg_map entries, and it may
2180 end up replacing one of these new registers with junk. */
2182 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2183 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2184 #endif
2186 temp = XEXP (temp, 0);
2188 #ifdef POINTERS_EXTEND_UNSIGNED
2189 if (GET_MODE (temp) != GET_MODE (orig))
2190 temp = convert_memory_address (GET_MODE (orig), temp);
2191 #endif
2192 return temp;
2194 else if (GET_CODE (constant) == LABEL_REF)
2195 return XEXP (force_const_mem
2196 (GET_MODE (orig),
2197 copy_rtx_and_substitute (constant, map, for_lhs)),
2201 return orig;
2203 case CONST_DOUBLE:
2204 /* We have to make a new copy of this CONST_DOUBLE because don't want
2205 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2206 duplicate of a CONST_DOUBLE we have already seen. */
2207 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2209 REAL_VALUE_TYPE d;
2211 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2212 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2214 else
2215 return immed_double_const (CONST_DOUBLE_LOW (orig),
2216 CONST_DOUBLE_HIGH (orig), VOIDmode);
2218 case CONST:
2219 /* Make new constant pool entry for a constant
2220 that was in the pool of the inline function. */
2221 if (RTX_INTEGRATED_P (orig))
2222 abort ();
2223 break;
2225 case ASM_OPERANDS:
2226 /* If a single asm insn contains multiple output operands then
2227 it contains multiple ASM_OPERANDS rtx's that share the input
2228 and constraint vecs. We must make sure that the copied insn
2229 continues to share it. */
2230 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
2232 copy = rtx_alloc (ASM_OPERANDS);
2233 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2234 PUT_MODE (copy, GET_MODE (orig));
2235 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
2236 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
2237 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
2238 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
2239 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
2240 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
2241 = map->copy_asm_constraints_vector;
2242 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
2243 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
2244 return copy;
2246 break;
2248 case CALL:
2249 /* This is given special treatment because the first
2250 operand of a CALL is a (MEM ...) which may get
2251 forced into a register for cse. This is undesirable
2252 if function-address cse isn't wanted or if we won't do cse. */
2253 #ifndef NO_FUNCTION_CSE
2254 if (! (optimize && ! flag_no_function_cse))
2255 #endif
2257 rtx copy
2258 = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2259 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2260 map, 0));
2262 MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
2264 return
2265 gen_rtx_CALL (GET_MODE (orig), copy,
2266 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
2268 break;
2270 #if 0
2271 /* Must be ifdefed out for loop unrolling to work. */
2272 case RETURN:
2273 abort ();
2274 #endif
2276 case SET:
2277 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2278 Adjust the setting by the offset of the area we made.
2279 If the nonlocal goto is into the current function,
2280 this will result in unnecessarily bad code, but should work. */
2281 if (SET_DEST (orig) == virtual_stack_vars_rtx
2282 || SET_DEST (orig) == virtual_incoming_args_rtx)
2284 /* In case a translation hasn't occurred already, make one now. */
2285 rtx equiv_reg;
2286 rtx equiv_loc;
2287 HOST_WIDE_INT loc_offset;
2289 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
2290 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2291 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
2292 REGNO (equiv_reg)).rtx;
2293 loc_offset
2294 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2296 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2297 force_operand
2298 (plus_constant
2299 (copy_rtx_and_substitute (SET_SRC (orig),
2300 map, 0),
2301 - loc_offset),
2302 NULL_RTX));
2304 else
2305 return gen_rtx_SET (VOIDmode,
2306 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
2307 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
2308 break;
2310 case MEM:
2311 if (inlining
2312 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
2313 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
2315 enum machine_mode const_mode
2316 = get_pool_mode_for_function (inlining, XEXP (orig, 0));
2317 rtx constant
2318 = get_pool_constant_for_function (inlining, XEXP (orig, 0));
2320 constant = copy_rtx_and_substitute (constant, map, 0);
2322 /* If this was an address of a constant pool entry that itself
2323 had to be placed in the constant pool, it might not be a
2324 valid address. So the recursive call might have turned it
2325 into a register. In that case, it isn't a constant any
2326 more, so return it. This has the potential of changing a
2327 MEM into a REG, but we'll assume that it safe. */
2328 if (! CONSTANT_P (constant))
2329 return constant;
2331 return validize_mem (force_const_mem (const_mode, constant));
2334 copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
2335 map, 0));
2336 MEM_COPY_ATTRIBUTES (copy, orig);
2338 /* If inlining and this is not for the LHS, turn off RTX_UNCHANGING_P
2339 since this may be an indirect reference to a parameter and the
2340 actual may not be readonly. */
2341 if (inlining && !for_lhs)
2342 RTX_UNCHANGING_P (copy) = 0;
2344 /* If inlining, squish aliasing data that references the subroutine's
2345 parameter list, since that's no longer applicable. */
2346 if (inlining && MEM_EXPR (copy)
2347 && TREE_CODE (MEM_EXPR (copy)) == INDIRECT_REF
2348 && TREE_CODE (TREE_OPERAND (MEM_EXPR (copy), 0)) == PARM_DECL)
2349 set_mem_expr (copy, NULL_TREE);
2351 return copy;
2353 default:
2354 break;
2357 copy = rtx_alloc (code);
2358 PUT_MODE (copy, mode);
2359 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2360 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2361 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2363 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2365 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2367 switch (*format_ptr++)
2369 case '0':
2370 /* Copy this through the wide int field; that's safest. */
2371 X0WINT (copy, i) = X0WINT (orig, i);
2372 break;
2374 case 'e':
2375 XEXP (copy, i)
2376 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
2377 break;
2379 case 'u':
2380 /* Change any references to old-insns to point to the
2381 corresponding copied insns. */
2382 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2383 break;
2385 case 'E':
2386 XVEC (copy, i) = XVEC (orig, i);
2387 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2389 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2390 for (j = 0; j < XVECLEN (copy, i); j++)
2391 XVECEXP (copy, i, j)
2392 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
2393 map, for_lhs);
2395 break;
2397 case 'w':
2398 XWINT (copy, i) = XWINT (orig, i);
2399 break;
2401 case 'i':
2402 XINT (copy, i) = XINT (orig, i);
2403 break;
2405 case 's':
2406 XSTR (copy, i) = XSTR (orig, i);
2407 break;
2409 case 't':
2410 XTREE (copy, i) = XTREE (orig, i);
2411 break;
2413 default:
2414 abort ();
2418 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2420 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
2421 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
2422 map->copy_asm_constraints_vector
2423 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
2426 return copy;
2429 /* Substitute known constant values into INSN, if that is valid. */
2431 void
2432 try_constants (insn, map)
2433 rtx insn;
2434 struct inline_remap *map;
2436 int i;
2438 map->num_sets = 0;
2440 /* First try just updating addresses, then other things. This is
2441 important when we have something like the store of a constant
2442 into memory and we can update the memory address but the machine
2443 does not support a constant source. */
2444 subst_constants (&PATTERN (insn), insn, map, 1);
2445 apply_change_group ();
2446 subst_constants (&PATTERN (insn), insn, map, 0);
2447 apply_change_group ();
2449 /* Show we don't know the value of anything stored or clobbered. */
2450 note_stores (PATTERN (insn), mark_stores, NULL);
2451 map->last_pc_value = 0;
2452 #ifdef HAVE_cc0
2453 map->last_cc0_value = 0;
2454 #endif
2456 /* Set up any constant equivalences made in this insn. */
2457 for (i = 0; i < map->num_sets; i++)
2459 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2461 int regno = REGNO (map->equiv_sets[i].dest);
2463 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2464 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2465 /* Following clause is a hack to make case work where GNU C++
2466 reassigns a variable to make cse work right. */
2467 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2468 regno).rtx,
2469 map->equiv_sets[i].equiv))
2470 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2471 map->equiv_sets[i].equiv, map->const_age);
2473 else if (map->equiv_sets[i].dest == pc_rtx)
2474 map->last_pc_value = map->equiv_sets[i].equiv;
2475 #ifdef HAVE_cc0
2476 else if (map->equiv_sets[i].dest == cc0_rtx)
2477 map->last_cc0_value = map->equiv_sets[i].equiv;
2478 #endif
2482 /* Substitute known constants for pseudo regs in the contents of LOC,
2483 which are part of INSN.
2484 If INSN is zero, the substitution should always be done (this is used to
2485 update DECL_RTL).
2486 These changes are taken out by try_constants if the result is not valid.
2488 Note that we are more concerned with determining when the result of a SET
2489 is a constant, for further propagation, than actually inserting constants
2490 into insns; cse will do the latter task better.
2492 This function is also used to adjust address of items previously addressed
2493 via the virtual stack variable or virtual incoming arguments registers.
2495 If MEMONLY is nonzero, only make changes inside a MEM. */
2497 static void
2498 subst_constants (loc, insn, map, memonly)
2499 rtx *loc;
2500 rtx insn;
2501 struct inline_remap *map;
2502 int memonly;
2504 rtx x = *loc;
2505 int i, j;
2506 enum rtx_code code;
2507 const char *format_ptr;
2508 int num_changes = num_validated_changes ();
2509 rtx new = 0;
2510 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2512 code = GET_CODE (x);
2514 switch (code)
2516 case PC:
2517 case CONST_INT:
2518 case CONST_DOUBLE:
2519 case CONST_VECTOR:
2520 case SYMBOL_REF:
2521 case CONST:
2522 case LABEL_REF:
2523 case ADDRESS:
2524 return;
2526 #ifdef HAVE_cc0
2527 case CC0:
2528 if (! memonly)
2529 validate_change (insn, loc, map->last_cc0_value, 1);
2530 return;
2531 #endif
2533 case USE:
2534 case CLOBBER:
2535 /* The only thing we can do with a USE or CLOBBER is possibly do
2536 some substitutions in a MEM within it. */
2537 if (GET_CODE (XEXP (x, 0)) == MEM)
2538 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
2539 return;
2541 case REG:
2542 /* Substitute for parms and known constants. Don't replace
2543 hard regs used as user variables with constants. */
2544 if (! memonly)
2546 int regno = REGNO (x);
2547 struct const_equiv_data *p;
2549 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2550 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2551 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2552 p->rtx != 0)
2553 && p->age >= map->const_age)
2554 validate_change (insn, loc, p->rtx, 1);
2556 return;
2558 case SUBREG:
2559 /* SUBREG applied to something other than a reg
2560 should be treated as ordinary, since that must
2561 be a special hack and we don't know how to treat it specially.
2562 Consider for example mulsidi3 in m68k.md.
2563 Ordinary SUBREG of a REG needs this special treatment. */
2564 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
2566 rtx inner = SUBREG_REG (x);
2567 rtx new = 0;
2569 /* We can't call subst_constants on &SUBREG_REG (x) because any
2570 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2571 see what is inside, try to form the new SUBREG and see if that is
2572 valid. We handle two cases: extracting a full word in an
2573 integral mode and extracting the low part. */
2574 subst_constants (&inner, NULL_RTX, map, 0);
2575 new = simplify_gen_subreg (GET_MODE (x), inner,
2576 GET_MODE (SUBREG_REG (x)),
2577 SUBREG_BYTE (x));
2579 if (new)
2580 validate_change (insn, loc, new, 1);
2581 else
2582 cancel_changes (num_changes);
2584 return;
2586 break;
2588 case MEM:
2589 subst_constants (&XEXP (x, 0), insn, map, 0);
2591 /* If a memory address got spoiled, change it back. */
2592 if (! memonly && insn != 0 && num_validated_changes () != num_changes
2593 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2594 cancel_changes (num_changes);
2595 return;
2597 case SET:
2599 /* Substitute constants in our source, and in any arguments to a
2600 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2601 itself. */
2602 rtx *dest_loc = &SET_DEST (x);
2603 rtx dest = *dest_loc;
2604 rtx src, tem;
2605 enum machine_mode compare_mode = VOIDmode;
2607 /* If SET_SRC is a COMPARE which subst_constants would turn into
2608 COMPARE of 2 VOIDmode constants, note the mode in which comparison
2609 is to be done. */
2610 if (GET_CODE (SET_SRC (x)) == COMPARE)
2612 src = SET_SRC (x);
2613 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2614 #ifdef HAVE_cc0
2615 || dest == cc0_rtx
2616 #endif
2619 compare_mode = GET_MODE (XEXP (src, 0));
2620 if (compare_mode == VOIDmode)
2621 compare_mode = GET_MODE (XEXP (src, 1));
2625 subst_constants (&SET_SRC (x), insn, map, memonly);
2626 src = SET_SRC (x);
2628 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2629 || GET_CODE (*dest_loc) == SUBREG
2630 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2632 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2634 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2635 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
2637 dest_loc = &XEXP (*dest_loc, 0);
2640 /* Do substitute in the address of a destination in memory. */
2641 if (GET_CODE (*dest_loc) == MEM)
2642 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
2644 /* Check for the case of DEST a SUBREG, both it and the underlying
2645 register are less than one word, and the SUBREG has the wider mode.
2646 In the case, we are really setting the underlying register to the
2647 source converted to the mode of DEST. So indicate that. */
2648 if (GET_CODE (dest) == SUBREG
2649 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2650 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2651 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2652 <= GET_MODE_SIZE (GET_MODE (dest)))
2653 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2654 src)))
2655 src = tem, dest = SUBREG_REG (dest);
2657 /* If storing a recognizable value save it for later recording. */
2658 if ((map->num_sets < MAX_RECOG_OPERANDS)
2659 && (CONSTANT_P (src)
2660 || (GET_CODE (src) == REG
2661 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2662 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2663 || (GET_CODE (src) == PLUS
2664 && GET_CODE (XEXP (src, 0)) == REG
2665 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2666 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2667 && CONSTANT_P (XEXP (src, 1)))
2668 || GET_CODE (src) == COMPARE
2669 #ifdef HAVE_cc0
2670 || dest == cc0_rtx
2671 #endif
2672 || (dest == pc_rtx
2673 && (src == pc_rtx || GET_CODE (src) == RETURN
2674 || GET_CODE (src) == LABEL_REF))))
2676 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2677 it will cause us to save the COMPARE with any constants
2678 substituted, which is what we want for later. */
2679 rtx src_copy = copy_rtx (src);
2680 map->equiv_sets[map->num_sets].equiv = src_copy;
2681 map->equiv_sets[map->num_sets++].dest = dest;
2682 if (compare_mode != VOIDmode
2683 && GET_CODE (src) == COMPARE
2684 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2685 #ifdef HAVE_cc0
2686 || dest == cc0_rtx
2687 #endif
2689 && GET_MODE (XEXP (src, 0)) == VOIDmode
2690 && GET_MODE (XEXP (src, 1)) == VOIDmode)
2692 map->compare_src = src_copy;
2693 map->compare_mode = compare_mode;
2697 return;
2699 default:
2700 break;
2703 format_ptr = GET_RTX_FORMAT (code);
2705 /* If the first operand is an expression, save its mode for later. */
2706 if (*format_ptr == 'e')
2707 op0_mode = GET_MODE (XEXP (x, 0));
2709 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2711 switch (*format_ptr++)
2713 case '0':
2714 break;
2716 case 'e':
2717 if (XEXP (x, i))
2718 subst_constants (&XEXP (x, i), insn, map, memonly);
2719 break;
2721 case 'u':
2722 case 'i':
2723 case 's':
2724 case 'w':
2725 case 'n':
2726 case 't':
2727 case 'B':
2728 break;
2730 case 'E':
2731 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2732 for (j = 0; j < XVECLEN (x, i); j++)
2733 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2735 break;
2737 default:
2738 abort ();
2742 /* If this is a commutative operation, move a constant to the second
2743 operand unless the second operand is already a CONST_INT. */
2744 if (! memonly
2745 && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2746 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2748 rtx tem = XEXP (x, 0);
2749 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2750 validate_change (insn, &XEXP (x, 1), tem, 1);
2753 /* Simplify the expression in case we put in some constants. */
2754 if (! memonly)
2755 switch (GET_RTX_CLASS (code))
2757 case '1':
2758 if (op0_mode == MAX_MACHINE_MODE)
2759 abort ();
2760 new = simplify_unary_operation (code, GET_MODE (x),
2761 XEXP (x, 0), op0_mode);
2762 break;
2764 case '<':
2766 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2768 if (op_mode == VOIDmode)
2769 op_mode = GET_MODE (XEXP (x, 1));
2770 new = simplify_relational_operation (code, op_mode,
2771 XEXP (x, 0), XEXP (x, 1));
2772 #ifdef FLOAT_STORE_FLAG_VALUE
2773 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2775 enum machine_mode mode = GET_MODE (x);
2776 if (new == const0_rtx)
2777 new = CONST0_RTX (mode);
2778 else
2780 REAL_VALUE_TYPE val;
2782 /* Avoid automatic aggregate initialization. */
2783 val = FLOAT_STORE_FLAG_VALUE (mode);
2784 new = CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
2787 #endif
2788 break;
2791 case '2':
2792 case 'c':
2793 new = simplify_binary_operation (code, GET_MODE (x),
2794 XEXP (x, 0), XEXP (x, 1));
2795 break;
2797 case 'b':
2798 case '3':
2799 if (op0_mode == MAX_MACHINE_MODE)
2800 abort ();
2802 if (code == IF_THEN_ELSE)
2804 rtx op0 = XEXP (x, 0);
2806 if (GET_RTX_CLASS (GET_CODE (op0)) == '<'
2807 && GET_MODE (op0) == VOIDmode
2808 && ! side_effects_p (op0)
2809 && XEXP (op0, 0) == map->compare_src
2810 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
2812 /* We have compare of two VOIDmode constants for which
2813 we recorded the comparison mode. */
2814 rtx temp =
2815 simplify_relational_operation (GET_CODE (op0),
2816 map->compare_mode,
2817 XEXP (op0, 0),
2818 XEXP (op0, 1));
2820 if (temp == const0_rtx)
2821 new = XEXP (x, 2);
2822 else if (temp == const1_rtx)
2823 new = XEXP (x, 1);
2826 if (!new)
2827 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2828 XEXP (x, 0), XEXP (x, 1),
2829 XEXP (x, 2));
2830 break;
2833 if (new)
2834 validate_change (insn, loc, new, 1);
2837 /* Show that register modified no longer contain known constants. We are
2838 called from note_stores with parts of the new insn. */
2840 static void
2841 mark_stores (dest, x, data)
2842 rtx dest;
2843 rtx x ATTRIBUTE_UNUSED;
2844 void *data ATTRIBUTE_UNUSED;
2846 int regno = -1;
2847 enum machine_mode mode = VOIDmode;
2849 /* DEST is always the innermost thing set, except in the case of
2850 SUBREGs of hard registers. */
2852 if (GET_CODE (dest) == REG)
2853 regno = REGNO (dest), mode = GET_MODE (dest);
2854 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2856 regno = REGNO (SUBREG_REG (dest));
2857 if (regno < FIRST_PSEUDO_REGISTER)
2858 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
2859 GET_MODE (SUBREG_REG (dest)),
2860 SUBREG_BYTE (dest),
2861 GET_MODE (dest));
2862 mode = GET_MODE (SUBREG_REG (dest));
2865 if (regno >= 0)
2867 unsigned int uregno = regno;
2868 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
2869 : uregno + HARD_REGNO_NREGS (uregno, mode) - 1);
2870 unsigned int i;
2872 /* Ignore virtual stack var or virtual arg register since those
2873 are handled separately. */
2874 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
2875 && uregno != VIRTUAL_STACK_VARS_REGNUM)
2876 for (i = uregno; i <= last_reg; i++)
2877 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2878 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2882 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2883 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2884 that it points to the node itself, thus indicating that the node is its
2885 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2886 the given node is NULL, recursively descend the decl/block tree which
2887 it is the root of, and for each other ..._DECL or BLOCK node contained
2888 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2889 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2890 values to point to themselves. */
2892 static void
2893 set_block_origin_self (stmt)
2894 tree stmt;
2896 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2898 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2901 tree local_decl;
2903 for (local_decl = BLOCK_VARS (stmt);
2904 local_decl != NULL_TREE;
2905 local_decl = TREE_CHAIN (local_decl))
2906 set_decl_origin_self (local_decl); /* Potential recursion. */
2910 tree subblock;
2912 for (subblock = BLOCK_SUBBLOCKS (stmt);
2913 subblock != NULL_TREE;
2914 subblock = BLOCK_CHAIN (subblock))
2915 set_block_origin_self (subblock); /* Recurse. */
2920 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2921 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2922 node to so that it points to the node itself, thus indicating that the
2923 node represents its own (abstract) origin. Additionally, if the
2924 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2925 the decl/block tree of which the given node is the root of, and for
2926 each other ..._DECL or BLOCK node contained therein whose
2927 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2928 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2929 point to themselves. */
2931 void
2932 set_decl_origin_self (decl)
2933 tree decl;
2935 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2937 DECL_ABSTRACT_ORIGIN (decl) = decl;
2938 if (TREE_CODE (decl) == FUNCTION_DECL)
2940 tree arg;
2942 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2943 DECL_ABSTRACT_ORIGIN (arg) = arg;
2944 if (DECL_INITIAL (decl) != NULL_TREE
2945 && DECL_INITIAL (decl) != error_mark_node)
2946 set_block_origin_self (DECL_INITIAL (decl));
2951 /* Given a pointer to some BLOCK node, and a boolean value to set the
2952 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2953 the given block, and for all local decls and all local sub-blocks
2954 (recursively) which are contained therein. */
2956 static void
2957 set_block_abstract_flags (stmt, setting)
2958 tree stmt;
2959 int setting;
2961 tree local_decl;
2962 tree subblock;
2964 BLOCK_ABSTRACT (stmt) = setting;
2966 for (local_decl = BLOCK_VARS (stmt);
2967 local_decl != NULL_TREE;
2968 local_decl = TREE_CHAIN (local_decl))
2969 set_decl_abstract_flags (local_decl, setting);
2971 for (subblock = BLOCK_SUBBLOCKS (stmt);
2972 subblock != NULL_TREE;
2973 subblock = BLOCK_CHAIN (subblock))
2974 set_block_abstract_flags (subblock, setting);
2977 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2978 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2979 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2980 set the abstract flags for all of the parameters, local vars, local
2981 blocks and sub-blocks (recursively) to the same setting. */
2983 void
2984 set_decl_abstract_flags (decl, setting)
2985 tree decl;
2986 int setting;
2988 DECL_ABSTRACT (decl) = setting;
2989 if (TREE_CODE (decl) == FUNCTION_DECL)
2991 tree arg;
2993 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2994 DECL_ABSTRACT (arg) = setting;
2995 if (DECL_INITIAL (decl) != NULL_TREE
2996 && DECL_INITIAL (decl) != error_mark_node)
2997 set_block_abstract_flags (DECL_INITIAL (decl), setting);
3001 /* Output the assembly language code for the function FNDECL
3002 from its DECL_SAVED_INSNS. Used for inline functions that are output
3003 at end of compilation instead of where they came in the source. */
3005 void
3006 output_inline_function (fndecl)
3007 tree fndecl;
3009 struct function *old_cfun = cfun;
3010 enum debug_info_type old_write_symbols = write_symbols;
3011 const struct gcc_debug_hooks *const old_debug_hooks = debug_hooks;
3012 struct function *f = DECL_SAVED_INSNS (fndecl);
3014 cfun = f;
3015 current_function_decl = fndecl;
3017 set_new_last_label_num (f->inl_max_label_num);
3019 /* We're not deferring this any longer. */
3020 DECL_DEFER_OUTPUT (fndecl) = 0;
3022 /* If requested, suppress debugging information. */
3023 if (f->no_debugging_symbols)
3025 write_symbols = NO_DEBUG;
3026 debug_hooks = &do_nothing_debug_hooks;
3029 /* Compile this function all the way down to assembly code. As a
3030 side effect this destroys the saved RTL representation, but
3031 that's okay, because we don't need to inline this anymore. */
3032 rest_of_compilation (fndecl);
3033 DECL_INLINE (fndecl) = 0;
3035 cfun = old_cfun;
3036 current_function_decl = old_cfun ? old_cfun->decl : 0;
3037 write_symbols = old_write_symbols;
3038 debug_hooks = old_debug_hooks;
3042 /* Functions to keep track of the values hard regs had at the start of
3043 the function. */
3046 get_hard_reg_initial_reg (fun, reg)
3047 struct function *fun;
3048 rtx reg;
3050 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3051 int i;
3053 if (ivs == 0)
3054 return NULL_RTX;
3056 for (i = 0; i < ivs->num_entries; i++)
3057 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
3058 return ivs->entries[i].hard_reg;
3060 return NULL_RTX;
3064 has_func_hard_reg_initial_val (fun, reg)
3065 struct function *fun;
3066 rtx reg;
3068 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3069 int i;
3071 if (ivs == 0)
3072 return NULL_RTX;
3074 for (i = 0; i < ivs->num_entries; i++)
3075 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
3076 return ivs->entries[i].pseudo;
3078 return NULL_RTX;
3082 get_func_hard_reg_initial_val (fun, reg)
3083 struct function *fun;
3084 rtx reg;
3086 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3087 rtx rv = has_func_hard_reg_initial_val (fun, reg);
3089 if (rv)
3090 return rv;
3092 if (ivs == 0)
3094 fun->hard_reg_initial_vals = (void *) ggc_alloc (sizeof (initial_value_struct));
3095 ivs = fun->hard_reg_initial_vals;
3096 ivs->num_entries = 0;
3097 ivs->max_entries = 5;
3098 ivs->entries = (initial_value_pair *) ggc_alloc (5 * sizeof (initial_value_pair));
3101 if (ivs->num_entries >= ivs->max_entries)
3103 ivs->max_entries += 5;
3104 ivs->entries =
3105 (initial_value_pair *) ggc_realloc (ivs->entries,
3106 ivs->max_entries
3107 * sizeof (initial_value_pair));
3110 ivs->entries[ivs->num_entries].hard_reg = reg;
3111 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
3113 return ivs->entries[ivs->num_entries++].pseudo;
3117 get_hard_reg_initial_val (mode, regno)
3118 enum machine_mode mode;
3119 int regno;
3121 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3125 has_hard_reg_initial_val (mode, regno)
3126 enum machine_mode mode;
3127 int regno;
3129 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3132 static void
3133 setup_initial_hard_reg_value_integration (inl_f, remap)
3134 struct function *inl_f;
3135 struct inline_remap *remap;
3137 struct initial_value_struct *ivs = inl_f->hard_reg_initial_vals;
3138 int i;
3140 if (ivs == 0)
3141 return;
3143 for (i = 0; i < ivs->num_entries; i ++)
3144 remap->reg_map[REGNO (ivs->entries[i].pseudo)]
3145 = get_func_hard_reg_initial_val (cfun, ivs->entries[i].hard_reg);
3149 void
3150 emit_initial_value_sets ()
3152 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3153 int i;
3154 rtx seq;
3156 if (ivs == 0)
3157 return;
3159 start_sequence ();
3160 for (i = 0; i < ivs->num_entries; i++)
3161 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
3162 seq = get_insns ();
3163 end_sequence ();
3165 emit_insn_after (seq, get_insns ());
3168 /* If the backend knows where to allocate pseudos for hard
3169 register initial values, register these allocations now. */
3170 void
3171 allocate_initial_values (reg_equiv_memory_loc)
3172 rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED;
3174 #ifdef ALLOCATE_INITIAL_VALUE
3175 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3176 int i;
3178 if (ivs == 0)
3179 return;
3181 for (i = 0; i < ivs->num_entries; i++)
3183 int regno = REGNO (ivs->entries[i].pseudo);
3184 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
3186 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
3187 ; /* Do nothing. */
3188 else if (GET_CODE (x) == MEM)
3189 reg_equiv_memory_loc[regno] = x;
3190 else if (GET_CODE (x) == REG)
3192 reg_renumber[regno] = REGNO (x);
3193 /* Poke the regno right into regno_reg_rtx
3194 so that even fixed regs are accepted. */
3195 REGNO (ivs->entries[i].pseudo) = REGNO (x);
3197 else abort ();
3199 #endif
3202 #include "gt-integrate.h"