* java/io/FileDescriptor.java (position): New private field.
[official-gcc.git] / gcc / integrate.c
blob8feb845b9e428bd96e19c3dd916e7b20cb6c9244
1 /* Procedure integration for GCC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "regs.h"
32 #include "flags.h"
33 #include "debug.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "output.h"
37 #include "recog.h"
38 #include "integrate.h"
39 #include "real.h"
40 #include "except.h"
41 #include "function.h"
42 #include "toplev.h"
43 #include "intl.h"
44 #include "loop.h"
45 #include "params.h"
46 #include "ggc.h"
47 #include "target.h"
48 #include "langhooks.h"
50 /* Similar, but round to the next highest integer that meets the
51 alignment. */
52 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
54 /* Default max number of insns a function can have and still be inline.
55 This is overridden on RISC machines. */
56 #ifndef INTEGRATE_THRESHOLD
57 /* Inlining small functions might save more space then not inlining at
58 all. Assume 1 instruction for the call and 1.5 insns per argument. */
59 #define INTEGRATE_THRESHOLD(DECL) \
60 (optimize_size \
61 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
62 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
63 #endif
66 /* Private type used by {get/has}_func_hard_reg_initial_val. */
67 typedef struct initial_value_pair GTY(()) {
68 rtx hard_reg;
69 rtx pseudo;
70 } initial_value_pair;
71 typedef struct initial_value_struct GTY(()) {
72 int num_entries;
73 int max_entries;
74 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
75 } initial_value_struct;
77 static void setup_initial_hard_reg_value_integration PARAMS ((struct function *, struct inline_remap *));
79 static rtvec initialize_for_inline PARAMS ((tree));
80 static void note_modified_parmregs PARAMS ((rtx, rtx, void *));
81 static void integrate_parm_decls PARAMS ((tree, struct inline_remap *,
82 rtvec));
83 static tree integrate_decl_tree PARAMS ((tree,
84 struct inline_remap *));
85 static void subst_constants PARAMS ((rtx *, rtx,
86 struct inline_remap *, int));
87 static void set_block_origin_self PARAMS ((tree));
88 static void set_block_abstract_flags PARAMS ((tree, int));
89 static void process_reg_param PARAMS ((struct inline_remap *, rtx,
90 rtx));
91 void set_decl_abstract_flags PARAMS ((tree, int));
92 static void mark_stores PARAMS ((rtx, rtx, void *));
93 static void save_parm_insns PARAMS ((rtx, rtx));
94 static void copy_insn_list PARAMS ((rtx, struct inline_remap *,
95 rtx));
96 static void copy_insn_notes PARAMS ((rtx, struct inline_remap *,
97 int));
98 static int compare_blocks PARAMS ((const PTR, const PTR));
99 static int find_block PARAMS ((const PTR, const PTR));
101 /* Used by copy_rtx_and_substitute; this indicates whether the function is
102 called for the purpose of inlining or some other purpose (i.e. loop
103 unrolling). This affects how constant pool references are handled.
104 This variable contains the FUNCTION_DECL for the inlined function. */
105 static struct function *inlining = 0;
107 /* Returns the Ith entry in the label_map contained in MAP. If the
108 Ith entry has not yet been set, return a fresh label. This function
109 performs a lazy initialization of label_map, thereby avoiding huge memory
110 explosions when the label_map gets very large. */
113 get_label_from_map (map, i)
114 struct inline_remap *map;
115 int i;
117 rtx x = map->label_map[i];
119 if (x == NULL_RTX)
120 x = map->label_map[i] = gen_label_rtx ();
122 return x;
125 /* Return false if the function FNDECL cannot be inlined on account of its
126 attributes, true otherwise. */
127 bool
128 function_attribute_inlinable_p (fndecl)
129 tree fndecl;
131 if (targetm.attribute_table)
133 tree a;
135 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
137 tree name = TREE_PURPOSE (a);
138 int i;
140 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
141 if (is_attribute_p (targetm.attribute_table[i].name, name))
142 return (*targetm.function_attribute_inlinable_p) (fndecl);
146 return true;
149 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
150 is safe and reasonable to integrate into other functions.
151 Nonzero means value is a warning msgid with a single %s
152 for the function's name. */
154 const char *
155 function_cannot_inline_p (fndecl)
156 tree fndecl;
158 rtx insn;
159 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
161 /* For functions marked as inline increase the maximum size to
162 MAX_INLINE_INSNS (-finline-limit-<n>). For regular functions
163 use the limit given by INTEGRATE_THRESHOLD. */
165 int max_insns = (DECL_INLINE (fndecl))
166 ? (MAX_INLINE_INSNS
167 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
168 : INTEGRATE_THRESHOLD (fndecl);
170 int ninsns = 0;
171 tree parms;
173 if (DECL_UNINLINABLE (fndecl))
174 return N_("function cannot be inline");
176 /* No inlines with varargs. */
177 if (last && TREE_VALUE (last) != void_type_node)
178 return N_("varargs function cannot be inline");
180 if (current_function_calls_alloca)
181 return N_("function using alloca cannot be inline");
183 if (current_function_calls_setjmp)
184 return N_("function using setjmp cannot be inline");
186 if (current_function_calls_eh_return)
187 return N_("function uses __builtin_eh_return");
189 if (current_function_contains_functions)
190 return N_("function with nested functions cannot be inline");
192 if (forced_labels)
193 return
194 N_("function with label addresses used in initializers cannot inline");
196 if (current_function_cannot_inline)
197 return current_function_cannot_inline;
199 /* If its not even close, don't even look. */
200 if (get_max_uid () > 3 * max_insns)
201 return N_("function too large to be inline");
203 #if 0
204 /* Don't inline functions which do not specify a function prototype and
205 have BLKmode argument or take the address of a parameter. */
206 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
208 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
209 TREE_ADDRESSABLE (parms) = 1;
210 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
211 return N_("no prototype, and parameter address used; cannot be inline");
213 #endif
215 /* We can't inline functions that return structures
216 the old-fashioned PCC way, copying into a static block. */
217 if (current_function_returns_pcc_struct)
218 return N_("inline functions not supported for this return value type");
220 /* We can't inline functions that return structures of varying size. */
221 if (TREE_CODE (TREE_TYPE (TREE_TYPE (fndecl))) != VOID_TYPE
222 && int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
223 return N_("function with varying-size return value cannot be inline");
225 /* Cannot inline a function with a varying size argument or one that
226 receives a transparent union. */
227 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
229 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
230 return N_("function with varying-size parameter cannot be inline");
231 else if (TREE_CODE (TREE_TYPE (parms)) == UNION_TYPE
232 && TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
233 return N_("function with transparent unit parameter cannot be inline");
236 if (get_max_uid () > max_insns)
238 for (ninsns = 0, insn = get_first_nonparm_insn ();
239 insn && ninsns < max_insns;
240 insn = NEXT_INSN (insn))
241 if (INSN_P (insn))
242 ninsns++;
244 if (ninsns >= max_insns)
245 return N_("function too large to be inline");
248 /* We will not inline a function which uses computed goto. The addresses of
249 its local labels, which may be tucked into global storage, are of course
250 not constant across instantiations, which causes unexpected behavior. */
251 if (current_function_has_computed_jump)
252 return N_("function with computed jump cannot inline");
254 /* We cannot inline a nested function that jumps to a nonlocal label. */
255 if (current_function_has_nonlocal_goto)
256 return N_("function with nonlocal goto cannot be inline");
258 /* We can't inline functions that return a PARALLEL rtx. */
259 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
261 rtx result = DECL_RTL (DECL_RESULT (fndecl));
262 if (GET_CODE (result) == PARALLEL)
263 return N_("inline functions not supported for this return value type");
266 /* If the function has a target specific attribute attached to it,
267 then we assume that we should not inline it. This can be overridden
268 by the target if it defines TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P. */
269 if (!function_attribute_inlinable_p (fndecl))
270 return N_("function with target specific attribute(s) cannot be inlined");
272 return NULL;
275 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
276 Zero for a reg that isn't a parm's home.
277 Only reg numbers less than max_parm_reg are mapped here. */
278 static tree *parmdecl_map;
280 /* In save_for_inline, nonzero if past the parm-initialization insns. */
281 static int in_nonparm_insns;
283 /* Subroutine for `save_for_inline'. Performs initialization
284 needed to save FNDECL's insns and info for future inline expansion. */
286 static rtvec
287 initialize_for_inline (fndecl)
288 tree fndecl;
290 int i;
291 rtvec arg_vector;
292 tree parms;
294 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
295 memset ((char *) parmdecl_map, 0, max_parm_reg * sizeof (tree));
296 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
298 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
299 parms;
300 parms = TREE_CHAIN (parms), i++)
302 rtx p = DECL_RTL (parms);
304 /* If we have (mem (addressof (mem ...))), use the inner MEM since
305 otherwise the copy_rtx call below will not unshare the MEM since
306 it shares ADDRESSOF. */
307 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
308 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
309 p = XEXP (XEXP (p, 0), 0);
311 RTVEC_ELT (arg_vector, i) = p;
313 if (GET_CODE (p) == REG)
314 parmdecl_map[REGNO (p)] = parms;
315 else if (GET_CODE (p) == CONCAT)
317 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
318 rtx pimag = gen_imagpart (GET_MODE (preal), p);
320 if (GET_CODE (preal) == REG)
321 parmdecl_map[REGNO (preal)] = parms;
322 if (GET_CODE (pimag) == REG)
323 parmdecl_map[REGNO (pimag)] = parms;
326 /* This flag is cleared later
327 if the function ever modifies the value of the parm. */
328 TREE_READONLY (parms) = 1;
331 return arg_vector;
334 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
335 originally was in the FROM_FN, but now it will be in the
336 TO_FN. */
338 tree
339 copy_decl_for_inlining (decl, from_fn, to_fn)
340 tree decl;
341 tree from_fn;
342 tree to_fn;
344 tree copy;
346 /* Copy the declaration. */
347 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
349 tree type;
350 int invisiref = 0;
352 /* See if the frontend wants to pass this by invisible reference. */
353 if (TREE_CODE (decl) == PARM_DECL
354 && DECL_ARG_TYPE (decl) != TREE_TYPE (decl)
355 && POINTER_TYPE_P (DECL_ARG_TYPE (decl))
356 && TREE_TYPE (DECL_ARG_TYPE (decl)) == TREE_TYPE (decl))
358 invisiref = 1;
359 type = DECL_ARG_TYPE (decl);
361 else
362 type = TREE_TYPE (decl);
364 /* For a parameter, we must make an equivalent VAR_DECL, not a
365 new PARM_DECL. */
366 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
367 if (!invisiref)
369 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
370 TREE_READONLY (copy) = TREE_READONLY (decl);
371 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
373 else
375 TREE_ADDRESSABLE (copy) = 0;
376 TREE_READONLY (copy) = 1;
377 TREE_THIS_VOLATILE (copy) = 0;
380 else
382 copy = copy_node (decl);
383 (*lang_hooks.dup_lang_specific_decl) (copy);
385 /* TREE_ADDRESSABLE isn't used to indicate that a label's
386 address has been taken; it's for internal bookkeeping in
387 expand_goto_internal. */
388 if (TREE_CODE (copy) == LABEL_DECL)
389 TREE_ADDRESSABLE (copy) = 0;
392 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
393 declaration inspired this copy. */
394 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
396 /* The new variable/label has no RTL, yet. */
397 if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
398 SET_DECL_RTL (copy, NULL_RTX);
400 /* These args would always appear unused, if not for this. */
401 TREE_USED (copy) = 1;
403 /* Set the context for the new declaration. */
404 if (!DECL_CONTEXT (decl))
405 /* Globals stay global. */
407 else if (DECL_CONTEXT (decl) != from_fn)
408 /* Things that weren't in the scope of the function we're inlining
409 from aren't in the scope we're inlining to, either. */
411 else if (TREE_STATIC (decl))
412 /* Function-scoped static variables should stay in the original
413 function. */
415 else
416 /* Ordinary automatic local variables are now in the scope of the
417 new function. */
418 DECL_CONTEXT (copy) = to_fn;
420 return copy;
423 /* Make the insns and PARM_DECLs of the current function permanent
424 and record other information in DECL_SAVED_INSNS to allow inlining
425 of this function in subsequent calls.
427 This routine need not copy any insns because we are not going
428 to immediately compile the insns in the insn chain. There
429 are two cases when we would compile the insns for FNDECL:
430 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
431 be output at the end of other compilation, because somebody took
432 its address. In the first case, the insns of FNDECL are copied
433 as it is expanded inline, so FNDECL's saved insns are not
434 modified. In the second case, FNDECL is used for the last time,
435 so modifying the rtl is not a problem.
437 We don't have to worry about FNDECL being inline expanded by
438 other functions which are written at the end of compilation
439 because flag_no_inline is turned on when we begin writing
440 functions at the end of compilation. */
442 void
443 save_for_inline (fndecl)
444 tree fndecl;
446 rtx insn;
447 rtvec argvec;
448 rtx first_nonparm_insn;
450 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
451 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
452 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
453 for the parms, prior to elimination of virtual registers.
454 These values are needed for substituting parms properly. */
455 if (! flag_no_inline)
456 parmdecl_map = (tree *) xmalloc (max_parm_reg * sizeof (tree));
458 /* Make and emit a return-label if we have not already done so. */
460 if (return_label == 0)
462 return_label = gen_label_rtx ();
463 emit_label (return_label);
466 if (! flag_no_inline)
467 argvec = initialize_for_inline (fndecl);
468 else
469 argvec = NULL;
471 /* Delete basic block notes created by early run of find_basic_block.
472 The notes would be later used by find_basic_blocks to reuse the memory
473 for basic_block structures on already freed obstack. */
474 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
475 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK)
476 delete_related_insns (insn);
478 /* If there are insns that copy parms from the stack into pseudo registers,
479 those insns are not copied. `expand_inline_function' must
480 emit the correct code to handle such things. */
482 insn = get_insns ();
483 if (GET_CODE (insn) != NOTE)
484 abort ();
486 if (! flag_no_inline)
488 /* Get the insn which signals the end of parameter setup code. */
489 first_nonparm_insn = get_first_nonparm_insn ();
491 /* Now just scan the chain of insns to see what happens to our
492 PARM_DECLs. If a PARM_DECL is used but never modified, we
493 can substitute its rtl directly when expanding inline (and
494 perform constant folding when its incoming value is
495 constant). Otherwise, we have to copy its value into a new
496 register and track the new register's life. */
497 in_nonparm_insns = 0;
498 save_parm_insns (insn, first_nonparm_insn);
500 cfun->inl_max_label_num = max_label_num ();
501 cfun->inl_last_parm_insn = cfun->x_last_parm_insn;
502 cfun->original_arg_vector = argvec;
504 cfun->original_decl_initial = DECL_INITIAL (fndecl);
505 cfun->no_debugging_symbols = (write_symbols == NO_DEBUG);
506 DECL_SAVED_INSNS (fndecl) = cfun;
508 /* Clean up. */
509 if (! flag_no_inline)
510 free (parmdecl_map);
513 /* Scan the chain of insns to see what happens to our PARM_DECLs. If a
514 PARM_DECL is used but never modified, we can substitute its rtl directly
515 when expanding inline (and perform constant folding when its incoming
516 value is constant). Otherwise, we have to copy its value into a new
517 register and track the new register's life. */
519 static void
520 save_parm_insns (insn, first_nonparm_insn)
521 rtx insn;
522 rtx first_nonparm_insn;
524 if (insn == NULL_RTX)
525 return;
527 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
529 if (insn == first_nonparm_insn)
530 in_nonparm_insns = 1;
532 if (INSN_P (insn))
534 /* Record what interesting things happen to our parameters. */
535 note_stores (PATTERN (insn), note_modified_parmregs, NULL);
537 /* If this is a CALL_PLACEHOLDER insn then we need to look into the
538 three attached sequences: normal call, sibling call and tail
539 recursion. */
540 if (GET_CODE (insn) == CALL_INSN
541 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
543 int i;
545 for (i = 0; i < 3; i++)
546 save_parm_insns (XEXP (PATTERN (insn), i),
547 first_nonparm_insn);
553 /* Note whether a parameter is modified or not. */
555 static void
556 note_modified_parmregs (reg, x, data)
557 rtx reg;
558 rtx x ATTRIBUTE_UNUSED;
559 void *data ATTRIBUTE_UNUSED;
561 if (GET_CODE (reg) == REG && in_nonparm_insns
562 && REGNO (reg) < max_parm_reg
563 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
564 && parmdecl_map[REGNO (reg)] != 0)
565 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
568 /* Unfortunately, we need a global copy of const_equiv map for communication
569 with a function called from note_stores. Be *very* careful that this
570 is used properly in the presence of recursion. */
572 varray_type global_const_equiv_varray;
574 #define FIXED_BASE_PLUS_P(X) \
575 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
576 && GET_CODE (XEXP (X, 0)) == REG \
577 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
578 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
580 /* Called to set up a mapping for the case where a parameter is in a
581 register. If it is read-only and our argument is a constant, set up the
582 constant equivalence.
584 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
585 if it is a register.
587 Also, don't allow hard registers here; they might not be valid when
588 substituted into insns. */
589 static void
590 process_reg_param (map, loc, copy)
591 struct inline_remap *map;
592 rtx loc, copy;
594 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
595 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
596 && ! REG_USERVAR_P (copy))
597 || (GET_CODE (copy) == REG
598 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
600 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
601 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
602 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
603 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
604 copy = temp;
606 map->reg_map[REGNO (loc)] = copy;
609 /* Compare two BLOCKs for qsort. The key we sort on is the
610 BLOCK_ABSTRACT_ORIGIN of the blocks. We cannot just subtract the
611 two pointers, because it may overflow sizeof(int). */
613 static int
614 compare_blocks (v1, v2)
615 const PTR v1;
616 const PTR v2;
618 tree b1 = *((const tree *) v1);
619 tree b2 = *((const tree *) v2);
620 char *p1 = (char *) BLOCK_ABSTRACT_ORIGIN (b1);
621 char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
623 if (p1 == p2)
624 return 0;
625 return p1 < p2 ? -1 : 1;
628 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
629 an original block; the second to a remapped equivalent. */
631 static int
632 find_block (v1, v2)
633 const PTR v1;
634 const PTR v2;
636 const union tree_node *b1 = (const union tree_node *) v1;
637 tree b2 = *((const tree *) v2);
638 char *p1 = (char *) b1;
639 char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
641 if (p1 == p2)
642 return 0;
643 return p1 < p2 ? -1 : 1;
646 /* Integrate the procedure defined by FNDECL. Note that this function
647 may wind up calling itself. Since the static variables are not
648 reentrant, we do not assign them until after the possibility
649 of recursion is eliminated.
651 If IGNORE is nonzero, do not produce a value.
652 Otherwise store the value in TARGET if it is nonzero and that is convenient.
654 Value is:
655 (rtx)-1 if we could not substitute the function
656 0 if we substituted it and it does not produce a value
657 else an rtx for where the value is stored. */
660 expand_inline_function (fndecl, parms, target, ignore, type,
661 structure_value_addr)
662 tree fndecl, parms;
663 rtx target;
664 int ignore;
665 tree type;
666 rtx structure_value_addr;
668 struct function *inlining_previous;
669 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
670 tree formal, actual, block;
671 rtx parm_insns = inl_f->emit->x_first_insn;
672 rtx insns = (inl_f->inl_last_parm_insn
673 ? NEXT_INSN (inl_f->inl_last_parm_insn)
674 : parm_insns);
675 tree *arg_trees;
676 rtx *arg_vals;
677 int max_regno;
678 int i;
679 int min_labelno = inl_f->emit->x_first_label_num;
680 int max_labelno = inl_f->inl_max_label_num;
681 int nargs;
682 rtx loc;
683 rtx stack_save = 0;
684 rtx temp;
685 struct inline_remap *map = 0;
686 rtvec arg_vector = inl_f->original_arg_vector;
687 rtx static_chain_value = 0;
688 int inl_max_uid;
689 int eh_region_offset;
691 /* The pointer used to track the true location of the memory used
692 for MAP->LABEL_MAP. */
693 rtx *real_label_map = 0;
695 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
696 max_regno = inl_f->emit->x_reg_rtx_no + 3;
697 if (max_regno < FIRST_PSEUDO_REGISTER)
698 abort ();
700 /* Pull out the decl for the function definition; fndecl may be a
701 local declaration, which would break DECL_ABSTRACT_ORIGIN. */
702 fndecl = inl_f->decl;
704 nargs = list_length (DECL_ARGUMENTS (fndecl));
706 if (cfun->preferred_stack_boundary < inl_f->preferred_stack_boundary)
707 cfun->preferred_stack_boundary = inl_f->preferred_stack_boundary;
709 /* Check that the parms type match and that sufficient arguments were
710 passed. Since the appropriate conversions or default promotions have
711 already been applied, the machine modes should match exactly. */
713 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
714 formal;
715 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
717 tree arg;
718 enum machine_mode mode;
720 if (actual == 0)
721 return (rtx) (size_t) -1;
723 arg = TREE_VALUE (actual);
724 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
726 if (arg == error_mark_node
727 || mode != TYPE_MODE (TREE_TYPE (arg))
728 /* If they are block mode, the types should match exactly.
729 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
730 which could happen if the parameter has incomplete type. */
731 || (mode == BLKmode
732 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
733 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
734 return (rtx) (size_t) -1;
737 /* Extra arguments are valid, but will be ignored below, so we must
738 evaluate them here for side-effects. */
739 for (; actual; actual = TREE_CHAIN (actual))
740 expand_expr (TREE_VALUE (actual), const0_rtx,
741 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
743 /* Expand the function arguments. Do this first so that any
744 new registers get created before we allocate the maps. */
746 arg_vals = (rtx *) xmalloc (nargs * sizeof (rtx));
747 arg_trees = (tree *) xmalloc (nargs * sizeof (tree));
749 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
750 formal;
751 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
753 /* Actual parameter, converted to the type of the argument within the
754 function. */
755 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
756 /* Mode of the variable used within the function. */
757 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
758 int invisiref = 0;
760 arg_trees[i] = arg;
761 loc = RTVEC_ELT (arg_vector, i);
763 /* If this is an object passed by invisible reference, we copy the
764 object into a stack slot and save its address. If this will go
765 into memory, we do nothing now. Otherwise, we just expand the
766 argument. */
767 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
768 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
770 rtx stack_slot = assign_temp (TREE_TYPE (arg), 1, 1, 1);
772 store_expr (arg, stack_slot, 0);
773 arg_vals[i] = XEXP (stack_slot, 0);
774 invisiref = 1;
776 else if (GET_CODE (loc) != MEM)
778 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
780 int unsignedp = TREE_UNSIGNED (TREE_TYPE (formal));
781 enum machine_mode pmode = TYPE_MODE (TREE_TYPE (formal));
783 pmode = promote_mode (TREE_TYPE (formal), pmode,
784 &unsignedp, 0);
786 if (GET_MODE (loc) != pmode)
787 abort ();
789 /* The mode if LOC and ARG can differ if LOC was a variable
790 that had its mode promoted via PROMOTED_MODE. */
791 arg_vals[i] = convert_modes (pmode,
792 TYPE_MODE (TREE_TYPE (arg)),
793 expand_expr (arg, NULL_RTX, mode,
794 EXPAND_SUM),
795 unsignedp);
797 else
798 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
800 else
801 arg_vals[i] = 0;
803 if (arg_vals[i] != 0
804 && (! TREE_READONLY (formal)
805 /* If the parameter is not read-only, copy our argument through
806 a register. Also, we cannot use ARG_VALS[I] if it overlaps
807 TARGET in any way. In the inline function, they will likely
808 be two different pseudos, and `safe_from_p' will make all
809 sorts of smart assumptions about their not conflicting.
810 But if ARG_VALS[I] overlaps TARGET, these assumptions are
811 wrong, so put ARG_VALS[I] into a fresh register.
812 Don't worry about invisible references, since their stack
813 temps will never overlap the target. */
814 || (target != 0
815 && ! invisiref
816 && (GET_CODE (arg_vals[i]) == REG
817 || GET_CODE (arg_vals[i]) == SUBREG
818 || GET_CODE (arg_vals[i]) == MEM)
819 && reg_overlap_mentioned_p (arg_vals[i], target))
820 /* ??? We must always copy a SUBREG into a REG, because it might
821 get substituted into an address, and not all ports correctly
822 handle SUBREGs in addresses. */
823 || (GET_CODE (arg_vals[i]) == SUBREG)))
824 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
826 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
827 && POINTER_TYPE_P (TREE_TYPE (formal)))
828 mark_reg_pointer (arg_vals[i],
829 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal))));
832 /* Allocate the structures we use to remap things. */
834 map = (struct inline_remap *) xcalloc (1, sizeof (struct inline_remap));
835 map->fndecl = fndecl;
837 VARRAY_TREE_INIT (map->block_map, 10, "block_map");
838 map->reg_map = (rtx *) xcalloc (max_regno, sizeof (rtx));
840 /* We used to use alloca here, but the size of what it would try to
841 allocate would occasionally cause it to exceed the stack limit and
842 cause unpredictable core dumps. */
843 real_label_map
844 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
845 map->label_map = real_label_map;
846 map->local_return_label = NULL_RTX;
848 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
849 map->insn_map = (rtx *) xcalloc (inl_max_uid, sizeof (rtx));
850 map->min_insnno = 0;
851 map->max_insnno = inl_max_uid;
853 map->integrating = 1;
854 map->compare_src = NULL_RTX;
855 map->compare_mode = VOIDmode;
857 /* const_equiv_varray maps pseudos in our routine to constants, so
858 it needs to be large enough for all our pseudos. This is the
859 number we are currently using plus the number in the called
860 routine, plus 15 for each arg, five to compute the virtual frame
861 pointer, and five for the return value. This should be enough
862 for most cases. We do not reference entries outside the range of
863 the map.
865 ??? These numbers are quite arbitrary and were obtained by
866 experimentation. At some point, we should try to allocate the
867 table after all the parameters are set up so we can more accurately
868 estimate the number of pseudos we will need. */
870 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
871 (max_reg_num ()
872 + (max_regno - FIRST_PSEUDO_REGISTER)
873 + 15 * nargs
874 + 10),
875 "expand_inline_function");
876 map->const_age = 0;
878 /* Record the current insn in case we have to set up pointers to frame
879 and argument memory blocks. If there are no insns yet, add a dummy
880 insn that can be used as an insertion point. */
881 map->insns_at_start = get_last_insn ();
882 if (map->insns_at_start == 0)
883 map->insns_at_start = emit_note (NULL, NOTE_INSN_DELETED);
885 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
886 map->x_regno_reg_rtx = inl_f->emit->x_regno_reg_rtx;
888 /* Update the outgoing argument size to allow for those in the inlined
889 function. */
890 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
891 current_function_outgoing_args_size = inl_f->outgoing_args_size;
893 /* If the inline function needs to make PIC references, that means
894 that this function's PIC offset table must be used. */
895 if (inl_f->uses_pic_offset_table)
896 current_function_uses_pic_offset_table = 1;
898 /* If this function needs a context, set it up. */
899 if (inl_f->needs_context)
900 static_chain_value = lookup_static_chain (fndecl);
902 if (GET_CODE (parm_insns) == NOTE
903 && NOTE_LINE_NUMBER (parm_insns) > 0)
905 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
906 NOTE_LINE_NUMBER (parm_insns));
907 if (note)
908 RTX_INTEGRATED_P (note) = 1;
911 /* Process each argument. For each, set up things so that the function's
912 reference to the argument will refer to the argument being passed.
913 We only replace REG with REG here. Any simplifications are done
914 via const_equiv_map.
916 We make two passes: In the first, we deal with parameters that will
917 be placed into registers, since we need to ensure that the allocated
918 register number fits in const_equiv_map. Then we store all non-register
919 parameters into their memory location. */
921 /* Don't try to free temp stack slots here, because we may put one of the
922 parameters into a temp stack slot. */
924 for (i = 0; i < nargs; i++)
926 rtx copy = arg_vals[i];
928 loc = RTVEC_ELT (arg_vector, i);
930 /* There are three cases, each handled separately. */
931 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
932 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
934 /* This must be an object passed by invisible reference (it could
935 also be a variable-sized object, but we forbid inlining functions
936 with variable-sized arguments). COPY is the address of the
937 actual value (this computation will cause it to be copied). We
938 map that address for the register, noting the actual address as
939 an equivalent in case it can be substituted into the insns. */
941 if (GET_CODE (copy) != REG)
943 temp = copy_addr_to_reg (copy);
944 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
945 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
946 copy = temp;
948 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
950 else if (GET_CODE (loc) == MEM)
952 /* This is the case of a parameter that lives in memory. It
953 will live in the block we allocate in the called routine's
954 frame that simulates the incoming argument area. Do nothing
955 with the parameter now; we will call store_expr later. In
956 this case, however, we must ensure that the virtual stack and
957 incoming arg rtx values are expanded now so that we can be
958 sure we have enough slots in the const equiv map since the
959 store_expr call can easily blow the size estimate. */
960 if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
961 copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
963 else if (GET_CODE (loc) == REG)
964 process_reg_param (map, loc, copy);
965 else if (GET_CODE (loc) == CONCAT)
967 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
968 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
969 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
970 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
972 process_reg_param (map, locreal, copyreal);
973 process_reg_param (map, locimag, copyimag);
975 else
976 abort ();
979 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
980 specially. This function can be called recursively, so we need to
981 save the previous value. */
982 inlining_previous = inlining;
983 inlining = inl_f;
985 /* Now do the parameters that will be placed in memory. */
987 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
988 formal; formal = TREE_CHAIN (formal), i++)
990 loc = RTVEC_ELT (arg_vector, i);
992 if (GET_CODE (loc) == MEM
993 /* Exclude case handled above. */
994 && ! (GET_CODE (XEXP (loc, 0)) == REG
995 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
997 rtx note = emit_note (DECL_SOURCE_FILE (formal),
998 DECL_SOURCE_LINE (formal));
999 if (note)
1000 RTX_INTEGRATED_P (note) = 1;
1002 /* Compute the address in the area we reserved and store the
1003 value there. */
1004 temp = copy_rtx_and_substitute (loc, map, 1);
1005 subst_constants (&temp, NULL_RTX, map, 1);
1006 apply_change_group ();
1007 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1008 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1009 store_expr (arg_trees[i], temp, 0);
1013 /* Deal with the places that the function puts its result.
1014 We are driven by what is placed into DECL_RESULT.
1016 Initially, we assume that we don't have anything special handling for
1017 REG_FUNCTION_RETURN_VALUE_P. */
1019 map->inline_target = 0;
1020 loc = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
1021 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
1023 if (TYPE_MODE (type) == VOIDmode)
1024 /* There is no return value to worry about. */
1026 else if (GET_CODE (loc) == MEM)
1028 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
1030 temp = copy_rtx_and_substitute (loc, map, 1);
1031 subst_constants (&temp, NULL_RTX, map, 1);
1032 apply_change_group ();
1033 target = temp;
1035 else
1037 if (! structure_value_addr
1038 || ! aggregate_value_p (DECL_RESULT (fndecl)))
1039 abort ();
1041 /* Pass the function the address in which to return a structure
1042 value. Note that a constructor can cause someone to call us
1043 with STRUCTURE_VALUE_ADDR, but the initialization takes place
1044 via the first parameter, rather than the struct return address.
1046 We have two cases: If the address is a simple register
1047 indirect, use the mapping mechanism to point that register to
1048 our structure return address. Otherwise, store the structure
1049 return value into the place that it will be referenced from. */
1051 if (GET_CODE (XEXP (loc, 0)) == REG)
1053 temp = force_operand (structure_value_addr, NULL_RTX);
1054 temp = force_reg (Pmode, temp);
1055 /* A virtual register might be invalid in an insn, because
1056 it can cause trouble in reload. Since we don't have access
1057 to the expanders at map translation time, make sure we have
1058 a proper register now.
1059 If a virtual register is actually valid, cse or combine
1060 can put it into the mapped insns. */
1061 if (REGNO (temp) >= FIRST_VIRTUAL_REGISTER
1062 && REGNO (temp) <= LAST_VIRTUAL_REGISTER)
1063 temp = copy_to_mode_reg (Pmode, temp);
1064 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1066 if (CONSTANT_P (structure_value_addr)
1067 || GET_CODE (structure_value_addr) == ADDRESSOF
1068 || (GET_CODE (structure_value_addr) == PLUS
1069 && (XEXP (structure_value_addr, 0)
1070 == virtual_stack_vars_rtx)
1071 && (GET_CODE (XEXP (structure_value_addr, 1))
1072 == CONST_INT)))
1074 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
1075 CONST_AGE_PARM);
1078 else
1080 temp = copy_rtx_and_substitute (loc, map, 1);
1081 subst_constants (&temp, NULL_RTX, map, 0);
1082 apply_change_group ();
1083 emit_move_insn (temp, structure_value_addr);
1087 else if (ignore)
1088 /* We will ignore the result value, so don't look at its structure.
1089 Note that preparations for an aggregate return value
1090 do need to be made (above) even if it will be ignored. */
1092 else if (GET_CODE (loc) == REG)
1094 /* The function returns an object in a register and we use the return
1095 value. Set up our target for remapping. */
1097 /* Machine mode function was declared to return. */
1098 enum machine_mode departing_mode = TYPE_MODE (type);
1099 /* (Possibly wider) machine mode it actually computes
1100 (for the sake of callers that fail to declare it right).
1101 We have to use the mode of the result's RTL, rather than
1102 its type, since expand_function_start may have promoted it. */
1103 enum machine_mode arriving_mode
1104 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1105 rtx reg_to_map;
1107 /* Don't use MEMs as direct targets because on some machines
1108 substituting a MEM for a REG makes invalid insns.
1109 Let the combiner substitute the MEM if that is valid. */
1110 if (target == 0 || GET_CODE (target) != REG
1111 || GET_MODE (target) != departing_mode)
1113 /* Don't make BLKmode registers. If this looks like
1114 a BLKmode object being returned in a register, get
1115 the mode from that, otherwise abort. */
1116 if (departing_mode == BLKmode)
1118 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1120 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1121 arriving_mode = departing_mode;
1123 else
1124 abort ();
1127 target = gen_reg_rtx (departing_mode);
1130 /* If function's value was promoted before return,
1131 avoid machine mode mismatch when we substitute INLINE_TARGET.
1132 But TARGET is what we will return to the caller. */
1133 if (arriving_mode != departing_mode)
1135 /* Avoid creating a paradoxical subreg wider than
1136 BITS_PER_WORD, since that is illegal. */
1137 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1139 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1140 GET_MODE_BITSIZE (arriving_mode)))
1141 /* Maybe could be handled by using convert_move () ? */
1142 abort ();
1143 reg_to_map = gen_reg_rtx (arriving_mode);
1144 target = gen_lowpart (departing_mode, reg_to_map);
1146 else
1147 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1149 else
1150 reg_to_map = target;
1152 /* Usually, the result value is the machine's return register.
1153 Sometimes it may be a pseudo. Handle both cases. */
1154 if (REG_FUNCTION_VALUE_P (loc))
1155 map->inline_target = reg_to_map;
1156 else
1157 map->reg_map[REGNO (loc)] = reg_to_map;
1159 else if (GET_CODE (loc) == CONCAT)
1161 enum machine_mode departing_mode = TYPE_MODE (type);
1162 enum machine_mode arriving_mode
1163 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1165 if (departing_mode != arriving_mode)
1166 abort ();
1167 if (GET_CODE (XEXP (loc, 0)) != REG
1168 || GET_CODE (XEXP (loc, 1)) != REG)
1169 abort ();
1171 /* Don't use MEMs as direct targets because on some machines
1172 substituting a MEM for a REG makes invalid insns.
1173 Let the combiner substitute the MEM if that is valid. */
1174 if (target == 0 || GET_CODE (target) != REG
1175 || GET_MODE (target) != departing_mode)
1176 target = gen_reg_rtx (departing_mode);
1178 if (GET_CODE (target) != CONCAT)
1179 abort ();
1181 map->reg_map[REGNO (XEXP (loc, 0))] = XEXP (target, 0);
1182 map->reg_map[REGNO (XEXP (loc, 1))] = XEXP (target, 1);
1184 else
1185 abort ();
1187 /* Remap the exception handler data pointer from one to the other. */
1188 temp = get_exception_pointer (inl_f);
1189 if (temp)
1190 map->reg_map[REGNO (temp)] = get_exception_pointer (cfun);
1192 /* Initialize label_map. get_label_from_map will actually make
1193 the labels. */
1194 memset ((char *) &map->label_map[min_labelno], 0,
1195 (max_labelno - min_labelno) * sizeof (rtx));
1197 /* Make copies of the decls of the symbols in the inline function, so that
1198 the copies of the variables get declared in the current function. Set
1199 up things so that lookup_static_chain knows that to interpret registers
1200 in SAVE_EXPRs for TYPE_SIZEs as local. */
1201 inline_function_decl = fndecl;
1202 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1203 block = integrate_decl_tree (inl_f->original_decl_initial, map);
1204 BLOCK_ABSTRACT_ORIGIN (block) = DECL_ORIGIN (fndecl);
1205 inline_function_decl = 0;
1207 /* Make a fresh binding contour that we can easily remove. Do this after
1208 expanding our arguments so cleanups are properly scoped. */
1209 expand_start_bindings_and_block (0, block);
1211 /* Sort the block-map so that it will be easy to find remapped
1212 blocks later. */
1213 qsort (&VARRAY_TREE (map->block_map, 0),
1214 map->block_map->elements_used,
1215 sizeof (tree),
1216 compare_blocks);
1218 /* Perform postincrements before actually calling the function. */
1219 emit_queue ();
1221 /* Clean up stack so that variables might have smaller offsets. */
1222 do_pending_stack_adjust ();
1224 /* Save a copy of the location of const_equiv_varray for
1225 mark_stores, called via note_stores. */
1226 global_const_equiv_varray = map->const_equiv_varray;
1228 /* If the called function does an alloca, save and restore the
1229 stack pointer around the call. This saves stack space, but
1230 also is required if this inline is being done between two
1231 pushes. */
1232 if (inl_f->calls_alloca)
1233 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1235 /* Map pseudos used for initial hard reg values. */
1236 setup_initial_hard_reg_value_integration (inl_f, map);
1238 /* Now copy the insns one by one. */
1239 copy_insn_list (insns, map, static_chain_value);
1241 /* Duplicate the EH regions. This will create an offset from the
1242 region numbers in the function we're inlining to the region
1243 numbers in the calling function. This must wait until after
1244 copy_insn_list, as we need the insn map to be complete. */
1245 eh_region_offset = duplicate_eh_regions (inl_f, map);
1247 /* Now copy the REG_NOTES for those insns. */
1248 copy_insn_notes (insns, map, eh_region_offset);
1250 /* If the insn sequence required one, emit the return label. */
1251 if (map->local_return_label)
1252 emit_label (map->local_return_label);
1254 /* Restore the stack pointer if we saved it above. */
1255 if (inl_f->calls_alloca)
1256 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1258 if (! cfun->x_whole_function_mode_p)
1259 /* In statement-at-a-time mode, we just tell the front-end to add
1260 this block to the list of blocks at this binding level. We
1261 can't do it the way it's done for function-at-a-time mode the
1262 superblocks have not been created yet. */
1263 (*lang_hooks.decls.insert_block) (block);
1264 else
1266 BLOCK_CHAIN (block)
1267 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1268 BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
1271 /* End the scope containing the copied formal parameter variables
1272 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1273 here so that expand_end_bindings will not check for unused
1274 variables. That's already been checked for when the inlined
1275 function was defined. */
1276 expand_end_bindings (NULL_TREE, 1, 1);
1278 /* Must mark the line number note after inlined functions as a repeat, so
1279 that the test coverage code can avoid counting the call twice. This
1280 just tells the code to ignore the immediately following line note, since
1281 there already exists a copy of this note before the expanded inline call.
1282 This line number note is still needed for debugging though, so we can't
1283 delete it. */
1284 if (flag_test_coverage)
1285 emit_note (0, NOTE_INSN_REPEATED_LINE_NUMBER);
1287 emit_line_note (input_filename, lineno);
1289 /* If the function returns a BLKmode object in a register, copy it
1290 out of the temp register into a BLKmode memory object. */
1291 if (target
1292 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1293 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1294 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1296 if (structure_value_addr)
1298 target = gen_rtx_MEM (TYPE_MODE (type),
1299 memory_address (TYPE_MODE (type),
1300 structure_value_addr));
1301 set_mem_attributes (target, type, 1);
1304 /* Make sure we free the things we explicitly allocated with xmalloc. */
1305 if (real_label_map)
1306 free (real_label_map);
1307 VARRAY_FREE (map->const_equiv_varray);
1308 free (map->reg_map);
1309 free (map->insn_map);
1310 free (map);
1311 free (arg_vals);
1312 free (arg_trees);
1314 inlining = inlining_previous;
1316 return target;
1319 /* Make copies of each insn in the given list using the mapping
1320 computed in expand_inline_function. This function may call itself for
1321 insns containing sequences.
1323 Copying is done in two passes, first the insns and then their REG_NOTES.
1325 If static_chain_value is nonzero, it represents the context-pointer
1326 register for the function. */
1328 static void
1329 copy_insn_list (insns, map, static_chain_value)
1330 rtx insns;
1331 struct inline_remap *map;
1332 rtx static_chain_value;
1334 int i;
1335 rtx insn;
1336 rtx temp;
1337 #ifdef HAVE_cc0
1338 rtx cc0_insn = 0;
1339 #endif
1340 rtx static_chain_mem = 0;
1342 /* Copy the insns one by one. Do this in two passes, first the insns and
1343 then their REG_NOTES. */
1345 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1347 for (insn = insns; insn; insn = NEXT_INSN (insn))
1349 rtx copy, pattern, set;
1351 map->orig_asm_operands_vector = 0;
1353 switch (GET_CODE (insn))
1355 case INSN:
1356 pattern = PATTERN (insn);
1357 set = single_set (insn);
1358 copy = 0;
1359 if (GET_CODE (pattern) == USE
1360 && GET_CODE (XEXP (pattern, 0)) == REG
1361 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1362 /* The (USE (REG n)) at return from the function should
1363 be ignored since we are changing (REG n) into
1364 inline_target. */
1365 break;
1367 /* Ignore setting a function value that we don't want to use. */
1368 if (map->inline_target == 0
1369 && set != 0
1370 && GET_CODE (SET_DEST (set)) == REG
1371 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1373 if (volatile_refs_p (SET_SRC (set)))
1375 rtx new_set;
1377 /* If we must not delete the source,
1378 load it into a new temporary. */
1379 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1381 new_set = single_set (copy);
1382 if (new_set == 0)
1383 abort ();
1385 SET_DEST (new_set)
1386 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1388 /* If the source and destination are the same and it
1389 has a note on it, keep the insn. */
1390 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1391 && REG_NOTES (insn) != 0)
1392 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1393 else
1394 break;
1397 /* Similarly if an ignored return value is clobbered. */
1398 else if (map->inline_target == 0
1399 && GET_CODE (pattern) == CLOBBER
1400 && GET_CODE (XEXP (pattern, 0)) == REG
1401 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1402 break;
1404 /* Look for the address of the static chain slot. The
1405 rtx_equal_p comparisons against the
1406 static_chain_incoming_rtx below may fail if the static
1407 chain is in memory and the address specified is not
1408 "legitimate". This happens on Xtensa where the static
1409 chain is at a negative offset from argp and where only
1410 positive offsets are legitimate. When the RTL is
1411 generated, the address is "legitimized" by copying it
1412 into a register, causing the rtx_equal_p comparisons to
1413 fail. This workaround looks for code that sets a
1414 register to the address of the static chain. Subsequent
1415 memory references via that register can then be
1416 identified as static chain references. We assume that
1417 the register is only assigned once, and that the static
1418 chain address is only live in one register at a time. */
1420 else if (static_chain_value != 0
1421 && set != 0
1422 && GET_CODE (static_chain_incoming_rtx) == MEM
1423 && GET_CODE (SET_DEST (set)) == REG
1424 && rtx_equal_p (SET_SRC (set),
1425 XEXP (static_chain_incoming_rtx, 0)))
1427 static_chain_mem =
1428 gen_rtx_MEM (GET_MODE (static_chain_incoming_rtx),
1429 SET_DEST (set));
1431 /* emit the instruction in case it is used for something
1432 other than setting the static chain; if it's not used,
1433 it can always be removed as dead code */
1434 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1437 /* If this is setting the static chain rtx, omit it. */
1438 else if (static_chain_value != 0
1439 && set != 0
1440 && (rtx_equal_p (SET_DEST (set),
1441 static_chain_incoming_rtx)
1442 || (static_chain_mem
1443 && rtx_equal_p (SET_DEST (set), static_chain_mem))))
1444 break;
1446 /* If this is setting the static chain pseudo, set it from
1447 the value we want to give it instead. */
1448 else if (static_chain_value != 0
1449 && set != 0
1450 && (rtx_equal_p (SET_SRC (set),
1451 static_chain_incoming_rtx)
1452 || (static_chain_mem
1453 && rtx_equal_p (SET_SRC (set), static_chain_mem))))
1455 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
1457 copy = emit_move_insn (newdest, static_chain_value);
1458 if (GET_CODE (static_chain_incoming_rtx) != MEM)
1459 static_chain_value = 0;
1462 /* If this is setting the virtual stack vars register, this must
1463 be the code at the handler for a builtin longjmp. The value
1464 saved in the setjmp buffer will be the address of the frame
1465 we've made for this inlined instance within our frame. But we
1466 know the offset of that value so we can use it to reconstruct
1467 our virtual stack vars register from that value. If we are
1468 copying it from the stack pointer, leave it unchanged. */
1469 else if (set != 0
1470 && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1472 HOST_WIDE_INT offset;
1473 temp = map->reg_map[REGNO (SET_DEST (set))];
1474 temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1475 REGNO (temp)).rtx;
1477 if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1478 offset = 0;
1479 else if (GET_CODE (temp) == PLUS
1480 && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1481 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1482 offset = INTVAL (XEXP (temp, 1));
1483 else
1484 abort ();
1486 if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1487 temp = SET_SRC (set);
1488 else
1489 temp = force_operand (plus_constant (SET_SRC (set),
1490 - offset),
1491 NULL_RTX);
1493 copy = emit_move_insn (virtual_stack_vars_rtx, temp);
1496 else
1497 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1498 /* REG_NOTES will be copied later. */
1500 #ifdef HAVE_cc0
1501 /* If this insn is setting CC0, it may need to look at
1502 the insn that uses CC0 to see what type of insn it is.
1503 In that case, the call to recog via validate_change will
1504 fail. So don't substitute constants here. Instead,
1505 do it when we emit the following insn.
1507 For example, see the pyr.md file. That machine has signed and
1508 unsigned compares. The compare patterns must check the
1509 following branch insn to see which what kind of compare to
1510 emit.
1512 If the previous insn set CC0, substitute constants on it as
1513 well. */
1514 if (sets_cc0_p (PATTERN (copy)) != 0)
1515 cc0_insn = copy;
1516 else
1518 if (cc0_insn)
1519 try_constants (cc0_insn, map);
1520 cc0_insn = 0;
1521 try_constants (copy, map);
1523 #else
1524 try_constants (copy, map);
1525 #endif
1526 INSN_SCOPE (copy) = INSN_SCOPE (insn);
1527 break;
1529 case JUMP_INSN:
1530 if (map->integrating && returnjump_p (insn))
1532 if (map->local_return_label == 0)
1533 map->local_return_label = gen_label_rtx ();
1534 pattern = gen_jump (map->local_return_label);
1536 else
1537 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1539 copy = emit_jump_insn (pattern);
1541 #ifdef HAVE_cc0
1542 if (cc0_insn)
1543 try_constants (cc0_insn, map);
1544 cc0_insn = 0;
1545 #endif
1546 try_constants (copy, map);
1547 INSN_SCOPE (copy) = INSN_SCOPE (insn);
1549 /* If this used to be a conditional jump insn but whose branch
1550 direction is now know, we must do something special. */
1551 if (any_condjump_p (insn) && onlyjump_p (insn) && map->last_pc_value)
1553 #ifdef HAVE_cc0
1554 /* If the previous insn set cc0 for us, delete it. */
1555 if (only_sets_cc0_p (PREV_INSN (copy)))
1556 delete_related_insns (PREV_INSN (copy));
1557 #endif
1559 /* If this is now a no-op, delete it. */
1560 if (map->last_pc_value == pc_rtx)
1562 delete_related_insns (copy);
1563 copy = 0;
1565 else
1566 /* Otherwise, this is unconditional jump so we must put a
1567 BARRIER after it. We could do some dead code elimination
1568 here, but jump.c will do it just as well. */
1569 emit_barrier ();
1571 break;
1573 case CALL_INSN:
1574 /* If this is a CALL_PLACEHOLDER insn then we need to copy the
1575 three attached sequences: normal call, sibling call and tail
1576 recursion. */
1577 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1579 rtx sequence[3];
1580 rtx tail_label;
1582 for (i = 0; i < 3; i++)
1584 rtx seq;
1586 sequence[i] = NULL_RTX;
1587 seq = XEXP (PATTERN (insn), i);
1588 if (seq)
1590 start_sequence ();
1591 copy_insn_list (seq, map, static_chain_value);
1592 sequence[i] = get_insns ();
1593 end_sequence ();
1597 /* Find the new tail recursion label.
1598 It will already be substituted into sequence[2]. */
1599 tail_label = copy_rtx_and_substitute (XEXP (PATTERN (insn), 3),
1600 map, 0);
1602 copy = emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode,
1603 sequence[0],
1604 sequence[1],
1605 sequence[2],
1606 tail_label));
1607 break;
1610 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1611 copy = emit_call_insn (pattern);
1613 SIBLING_CALL_P (copy) = SIBLING_CALL_P (insn);
1614 CONST_OR_PURE_CALL_P (copy) = CONST_OR_PURE_CALL_P (insn);
1615 INSN_SCOPE (copy) = INSN_SCOPE (insn);
1617 /* Because the USAGE information potentially contains objects other
1618 than hard registers, we need to copy it. */
1620 CALL_INSN_FUNCTION_USAGE (copy)
1621 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1622 map, 0);
1624 #ifdef HAVE_cc0
1625 if (cc0_insn)
1626 try_constants (cc0_insn, map);
1627 cc0_insn = 0;
1628 #endif
1629 try_constants (copy, map);
1631 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1632 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1633 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1634 break;
1636 case CODE_LABEL:
1637 copy = emit_label (get_label_from_map (map,
1638 CODE_LABEL_NUMBER (insn)));
1639 LABEL_NAME (copy) = LABEL_NAME (insn);
1640 map->const_age++;
1641 break;
1643 case BARRIER:
1644 copy = emit_barrier ();
1645 break;
1647 case NOTE:
1648 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)
1650 copy = emit_label (get_label_from_map (map,
1651 CODE_LABEL_NUMBER (insn)));
1652 LABEL_NAME (copy) = NOTE_SOURCE_FILE (insn);
1653 map->const_age++;
1654 break;
1657 /* NOTE_INSN_FUNCTION_END and NOTE_INSN_FUNCTION_BEG are
1658 discarded because it is important to have only one of
1659 each in the current function.
1661 NOTE_INSN_DELETED notes aren't useful. */
1663 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1664 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1665 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1667 copy = emit_note (NOTE_SOURCE_FILE (insn),
1668 NOTE_LINE_NUMBER (insn));
1669 if (copy
1670 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
1671 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
1672 && NOTE_BLOCK (insn))
1674 tree *mapped_block_p;
1676 mapped_block_p
1677 = (tree *) bsearch (NOTE_BLOCK (insn),
1678 &VARRAY_TREE (map->block_map, 0),
1679 map->block_map->elements_used,
1680 sizeof (tree),
1681 find_block);
1683 if (!mapped_block_p)
1684 abort ();
1685 else
1686 NOTE_BLOCK (copy) = *mapped_block_p;
1688 else if (copy
1689 && NOTE_LINE_NUMBER (copy) == NOTE_INSN_EXPECTED_VALUE)
1690 NOTE_EXPECTED_VALUE (copy)
1691 = copy_rtx_and_substitute (NOTE_EXPECTED_VALUE (insn),
1692 map, 0);
1694 else
1695 copy = 0;
1696 break;
1698 default:
1699 abort ();
1702 if (copy)
1703 RTX_INTEGRATED_P (copy) = 1;
1705 map->insn_map[INSN_UID (insn)] = copy;
1709 /* Copy the REG_NOTES. Increment const_age, so that only constants
1710 from parameters can be substituted in. These are the only ones
1711 that are valid across the entire function. */
1713 static void
1714 copy_insn_notes (insns, map, eh_region_offset)
1715 rtx insns;
1716 struct inline_remap *map;
1717 int eh_region_offset;
1719 rtx insn, new_insn;
1721 map->const_age++;
1722 for (insn = insns; insn; insn = NEXT_INSN (insn))
1724 if (! INSN_P (insn))
1725 continue;
1727 new_insn = map->insn_map[INSN_UID (insn)];
1728 if (! new_insn)
1729 continue;
1731 if (REG_NOTES (insn))
1733 rtx next, note = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
1735 /* We must also do subst_constants, in case one of our parameters
1736 has const type and constant value. */
1737 subst_constants (&note, NULL_RTX, map, 0);
1738 apply_change_group ();
1739 REG_NOTES (new_insn) = note;
1741 /* Delete any REG_LABEL notes from the chain. Remap any
1742 REG_EH_REGION notes. */
1743 for (; note; note = next)
1745 next = XEXP (note, 1);
1746 if (REG_NOTE_KIND (note) == REG_LABEL)
1747 remove_note (new_insn, note);
1748 else if (REG_NOTE_KIND (note) == REG_EH_REGION
1749 && INTVAL (XEXP (note, 0)) > 0)
1750 XEXP (note, 0) = GEN_INT (INTVAL (XEXP (note, 0))
1751 + eh_region_offset);
1755 if (GET_CODE (insn) == CALL_INSN
1756 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1758 int i;
1759 for (i = 0; i < 3; i++)
1760 copy_insn_notes (XEXP (PATTERN (insn), i), map, eh_region_offset);
1763 if (GET_CODE (insn) == JUMP_INSN
1764 && GET_CODE (PATTERN (insn)) == RESX)
1765 XINT (PATTERN (new_insn), 0) += eh_region_offset;
1769 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1770 push all of those decls and give each one the corresponding home. */
1772 static void
1773 integrate_parm_decls (args, map, arg_vector)
1774 tree args;
1775 struct inline_remap *map;
1776 rtvec arg_vector;
1778 tree tail;
1779 int i;
1781 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1783 tree decl = copy_decl_for_inlining (tail, map->fndecl,
1784 current_function_decl);
1785 rtx new_decl_rtl
1786 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
1788 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1789 here, but that's going to require some more work. */
1790 /* DECL_INCOMING_RTL (decl) = ?; */
1791 /* Fully instantiate the address with the equivalent form so that the
1792 debugging information contains the actual register, instead of the
1793 virtual register. Do this by not passing an insn to
1794 subst_constants. */
1795 subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
1796 apply_change_group ();
1797 SET_DECL_RTL (decl, new_decl_rtl);
1801 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1802 current function a tree of contexts isomorphic to the one that is given.
1804 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1805 registers used in the DECL_RTL field should be remapped. If it is zero,
1806 no mapping is necessary. */
1808 static tree
1809 integrate_decl_tree (let, map)
1810 tree let;
1811 struct inline_remap *map;
1813 tree t;
1814 tree new_block;
1815 tree *next;
1817 new_block = make_node (BLOCK);
1818 VARRAY_PUSH_TREE (map->block_map, new_block);
1819 next = &BLOCK_VARS (new_block);
1821 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1823 tree d;
1825 d = copy_decl_for_inlining (t, map->fndecl, current_function_decl);
1827 if (DECL_RTL_SET_P (t))
1829 rtx r;
1831 SET_DECL_RTL (d, copy_rtx_and_substitute (DECL_RTL (t), map, 1));
1833 /* Fully instantiate the address with the equivalent form so that the
1834 debugging information contains the actual register, instead of the
1835 virtual register. Do this by not passing an insn to
1836 subst_constants. */
1837 r = DECL_RTL (d);
1838 subst_constants (&r, NULL_RTX, map, 1);
1839 SET_DECL_RTL (d, r);
1841 if (GET_CODE (r) == REG)
1842 REGNO_DECL (REGNO (r)) = d;
1843 else if (GET_CODE (r) == CONCAT)
1845 REGNO_DECL (REGNO (XEXP (r, 0))) = d;
1846 REGNO_DECL (REGNO (XEXP (r, 1))) = d;
1849 apply_change_group ();
1852 /* Add this declaration to the list of variables in the new
1853 block. */
1854 *next = d;
1855 next = &TREE_CHAIN (d);
1858 next = &BLOCK_SUBBLOCKS (new_block);
1859 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1861 *next = integrate_decl_tree (t, map);
1862 BLOCK_SUPERCONTEXT (*next) = new_block;
1863 next = &BLOCK_CHAIN (*next);
1866 TREE_USED (new_block) = TREE_USED (let);
1867 BLOCK_ABSTRACT_ORIGIN (new_block) = let;
1869 return new_block;
1872 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1873 except for those few rtx codes that are sharable.
1875 We always return an rtx that is similar to that incoming rtx, with the
1876 exception of possibly changing a REG to a SUBREG or vice versa. No
1877 rtl is ever emitted.
1879 If FOR_LHS is nonzero, if means we are processing something that will
1880 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1881 inlining since we need to be conservative in how it is set for
1882 such cases.
1884 Handle constants that need to be placed in the constant pool by
1885 calling `force_const_mem'. */
1888 copy_rtx_and_substitute (orig, map, for_lhs)
1889 rtx orig;
1890 struct inline_remap *map;
1891 int for_lhs;
1893 rtx copy, temp;
1894 int i, j;
1895 RTX_CODE code;
1896 enum machine_mode mode;
1897 const char *format_ptr;
1898 int regno;
1900 if (orig == 0)
1901 return 0;
1903 code = GET_CODE (orig);
1904 mode = GET_MODE (orig);
1906 switch (code)
1908 case REG:
1909 /* If the stack pointer register shows up, it must be part of
1910 stack-adjustments (*not* because we eliminated the frame pointer!).
1911 Small hard registers are returned as-is. Pseudo-registers
1912 go through their `reg_map'. */
1913 regno = REGNO (orig);
1914 if (regno <= LAST_VIRTUAL_REGISTER
1915 || (map->integrating
1916 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1918 /* Some hard registers are also mapped,
1919 but others are not translated. */
1920 if (map->reg_map[regno] != 0)
1921 return map->reg_map[regno];
1923 /* If this is the virtual frame pointer, make space in current
1924 function's stack frame for the stack frame of the inline function.
1926 Copy the address of this area into a pseudo. Map
1927 virtual_stack_vars_rtx to this pseudo and set up a constant
1928 equivalence for it to be the address. This will substitute the
1929 address into insns where it can be substituted and use the new
1930 pseudo where it can't. */
1931 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
1933 rtx loc, seq;
1934 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1935 #ifdef FRAME_GROWS_DOWNWARD
1936 int alignment
1937 = (DECL_SAVED_INSNS (map->fndecl)->stack_alignment_needed
1938 / BITS_PER_UNIT);
1940 /* In this case, virtual_stack_vars_rtx points to one byte
1941 higher than the top of the frame area. So make sure we
1942 allocate a big enough chunk to keep the frame pointer
1943 aligned like a real one. */
1944 if (alignment)
1945 size = CEIL_ROUND (size, alignment);
1946 #endif
1947 start_sequence ();
1948 loc = assign_stack_temp (BLKmode, size, 1);
1949 loc = XEXP (loc, 0);
1950 #ifdef FRAME_GROWS_DOWNWARD
1951 /* In this case, virtual_stack_vars_rtx points to one byte
1952 higher than the top of the frame area. So compute the offset
1953 to one byte higher than our substitute frame. */
1954 loc = plus_constant (loc, size);
1955 #endif
1956 map->reg_map[regno] = temp
1957 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1959 #ifdef STACK_BOUNDARY
1960 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1961 #endif
1963 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1965 seq = get_insns ();
1966 end_sequence ();
1967 emit_insn_after (seq, map->insns_at_start);
1968 return temp;
1970 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1971 || (map->integrating
1972 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1973 == orig)))
1975 /* Do the same for a block to contain any arguments referenced
1976 in memory. */
1977 rtx loc, seq;
1978 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1980 start_sequence ();
1981 loc = assign_stack_temp (BLKmode, size, 1);
1982 loc = XEXP (loc, 0);
1983 /* When arguments grow downward, the virtual incoming
1984 args pointer points to the top of the argument block,
1985 so the remapped location better do the same. */
1986 #ifdef ARGS_GROW_DOWNWARD
1987 loc = plus_constant (loc, size);
1988 #endif
1989 map->reg_map[regno] = temp
1990 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1992 #ifdef STACK_BOUNDARY
1993 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1994 #endif
1996 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1998 seq = get_insns ();
1999 end_sequence ();
2000 emit_insn_after (seq, map->insns_at_start);
2001 return temp;
2003 else if (REG_FUNCTION_VALUE_P (orig))
2005 /* This is a reference to the function return value. If
2006 the function doesn't have a return value, error. If the
2007 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
2008 if (map->inline_target == 0)
2010 if (rtx_equal_function_value_matters)
2011 /* This is an ignored return value. We must not
2012 leave it in with REG_FUNCTION_VALUE_P set, since
2013 that would confuse subsequent inlining of the
2014 current function into a later function. */
2015 return gen_rtx_REG (GET_MODE (orig), regno);
2016 else
2017 /* Must be unrolling loops or replicating code if we
2018 reach here, so return the register unchanged. */
2019 return orig;
2021 else if (GET_MODE (map->inline_target) != BLKmode
2022 && mode != GET_MODE (map->inline_target))
2023 return gen_lowpart (mode, map->inline_target);
2024 else
2025 return map->inline_target;
2027 #if defined (LEAF_REGISTERS) && defined (LEAF_REG_REMAP)
2028 /* If leaf_renumber_regs_insn() might remap this register to
2029 some other number, make sure we don't share it with the
2030 inlined function, otherwise delayed optimization of the
2031 inlined function may change it in place, breaking our
2032 reference to it. We may still shared it within the
2033 function, so create an entry for this register in the
2034 reg_map. */
2035 if (map->integrating && regno < FIRST_PSEUDO_REGISTER
2036 && LEAF_REGISTERS[regno] && LEAF_REG_REMAP (regno) != regno)
2038 if (!map->leaf_reg_map[regno][mode])
2039 map->leaf_reg_map[regno][mode] = gen_rtx_REG (mode, regno);
2040 return map->leaf_reg_map[regno][mode];
2042 #endif
2043 else
2044 return orig;
2046 abort ();
2048 if (map->reg_map[regno] == NULL)
2050 map->reg_map[regno] = gen_reg_rtx (mode);
2051 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2052 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2053 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2054 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2056 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
2057 mark_reg_pointer (map->reg_map[regno],
2058 map->regno_pointer_align[regno]);
2060 return map->reg_map[regno];
2062 case SUBREG:
2063 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
2064 return simplify_gen_subreg (GET_MODE (orig), copy,
2065 GET_MODE (SUBREG_REG (orig)),
2066 SUBREG_BYTE (orig));
2068 case ADDRESSOF:
2069 copy = gen_rtx_ADDRESSOF (mode,
2070 copy_rtx_and_substitute (XEXP (orig, 0),
2071 map, for_lhs),
2072 0, ADDRESSOF_DECL (orig));
2073 regno = ADDRESSOF_REGNO (orig);
2074 if (map->reg_map[regno])
2075 regno = REGNO (map->reg_map[regno]);
2076 else if (regno > LAST_VIRTUAL_REGISTER)
2078 temp = XEXP (orig, 0);
2079 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2080 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2081 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2082 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2083 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2085 /* Objects may initially be represented as registers, but
2086 but turned into a MEM if their address is taken by
2087 put_var_into_stack. Therefore, the register table may have
2088 entries which are MEMs.
2090 We briefly tried to clear such entries, but that ended up
2091 cascading into many changes due to the optimizers not being
2092 prepared for empty entries in the register table. So we've
2093 decided to allow the MEMs in the register table for now. */
2094 if (REG_P (map->x_regno_reg_rtx[regno])
2095 && REG_POINTER (map->x_regno_reg_rtx[regno]))
2096 mark_reg_pointer (map->reg_map[regno],
2097 map->regno_pointer_align[regno]);
2098 regno = REGNO (map->reg_map[regno]);
2100 ADDRESSOF_REGNO (copy) = regno;
2101 return copy;
2103 case USE:
2104 case CLOBBER:
2105 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2106 to (use foo) if the original insn didn't have a subreg.
2107 Removing the subreg distorts the VAX movstrhi pattern
2108 by changing the mode of an operand. */
2109 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
2110 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2111 copy = SUBREG_REG (copy);
2112 return gen_rtx_fmt_e (code, VOIDmode, copy);
2114 /* We need to handle "deleted" labels that appear in the DECL_RTL
2115 of a LABEL_DECL. */
2116 case NOTE:
2117 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
2118 break;
2120 /* ... FALLTHRU ... */
2121 case CODE_LABEL:
2122 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
2123 = LABEL_PRESERVE_P (orig);
2124 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
2126 case LABEL_REF:
2127 copy
2128 = gen_rtx_LABEL_REF
2129 (mode,
2130 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2131 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
2133 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2135 /* The fact that this label was previously nonlocal does not mean
2136 it still is, so we must check if it is within the range of
2137 this function's labels. */
2138 LABEL_REF_NONLOCAL_P (copy)
2139 = (LABEL_REF_NONLOCAL_P (orig)
2140 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2141 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2143 /* If we have made a nonlocal label local, it means that this
2144 inlined call will be referring to our nonlocal goto handler.
2145 So make sure we create one for this block; we normally would
2146 not since this is not otherwise considered a "call". */
2147 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2148 function_call_count++;
2150 return copy;
2152 case PC:
2153 case CC0:
2154 case CONST_INT:
2155 case CONST_VECTOR:
2156 return orig;
2158 case SYMBOL_REF:
2159 /* Symbols which represent the address of a label stored in the constant
2160 pool must be modified to point to a constant pool entry for the
2161 remapped label. Otherwise, symbols are returned unchanged. */
2162 if (CONSTANT_POOL_ADDRESS_P (orig))
2164 struct function *f = inlining ? inlining : cfun;
2165 rtx constant = get_pool_constant_for_function (f, orig);
2166 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
2167 if (inlining)
2169 rtx temp = force_const_mem (const_mode,
2170 copy_rtx_and_substitute (constant,
2171 map, 0));
2173 #if 0
2174 /* Legitimizing the address here is incorrect.
2176 Since we had a SYMBOL_REF before, we can assume it is valid
2177 to have one in this position in the insn.
2179 Also, change_address may create new registers. These
2180 registers will not have valid reg_map entries. This can
2181 cause try_constants() to fail because assumes that all
2182 registers in the rtx have valid reg_map entries, and it may
2183 end up replacing one of these new registers with junk. */
2185 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2186 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2187 #endif
2189 temp = XEXP (temp, 0);
2191 #ifdef POINTERS_EXTEND_UNSIGNED
2192 if (GET_MODE (temp) != GET_MODE (orig))
2193 temp = convert_memory_address (GET_MODE (orig), temp);
2194 #endif
2195 return temp;
2197 else if (GET_CODE (constant) == LABEL_REF)
2198 return XEXP (force_const_mem
2199 (GET_MODE (orig),
2200 copy_rtx_and_substitute (constant, map, for_lhs)),
2204 return orig;
2206 case CONST_DOUBLE:
2207 /* We have to make a new copy of this CONST_DOUBLE because don't want
2208 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2209 duplicate of a CONST_DOUBLE we have already seen. */
2210 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2212 REAL_VALUE_TYPE d;
2214 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2215 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2217 else
2218 return immed_double_const (CONST_DOUBLE_LOW (orig),
2219 CONST_DOUBLE_HIGH (orig), VOIDmode);
2221 case CONST:
2222 /* Make new constant pool entry for a constant
2223 that was in the pool of the inline function. */
2224 if (RTX_INTEGRATED_P (orig))
2225 abort ();
2226 break;
2228 case ASM_OPERANDS:
2229 /* If a single asm insn contains multiple output operands then
2230 it contains multiple ASM_OPERANDS rtx's that share the input
2231 and constraint vecs. We must make sure that the copied insn
2232 continues to share it. */
2233 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
2235 copy = rtx_alloc (ASM_OPERANDS);
2236 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2237 PUT_MODE (copy, GET_MODE (orig));
2238 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
2239 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
2240 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
2241 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
2242 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
2243 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
2244 = map->copy_asm_constraints_vector;
2245 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
2246 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
2247 return copy;
2249 break;
2251 case CALL:
2252 /* This is given special treatment because the first
2253 operand of a CALL is a (MEM ...) which may get
2254 forced into a register for cse. This is undesirable
2255 if function-address cse isn't wanted or if we won't do cse. */
2256 #ifndef NO_FUNCTION_CSE
2257 if (! (optimize && ! flag_no_function_cse))
2258 #endif
2260 rtx copy
2261 = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2262 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2263 map, 0));
2265 MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
2267 return
2268 gen_rtx_CALL (GET_MODE (orig), copy,
2269 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
2271 break;
2273 #if 0
2274 /* Must be ifdefed out for loop unrolling to work. */
2275 case RETURN:
2276 abort ();
2277 #endif
2279 case SET:
2280 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2281 Adjust the setting by the offset of the area we made.
2282 If the nonlocal goto is into the current function,
2283 this will result in unnecessarily bad code, but should work. */
2284 if (SET_DEST (orig) == virtual_stack_vars_rtx
2285 || SET_DEST (orig) == virtual_incoming_args_rtx)
2287 /* In case a translation hasn't occurred already, make one now. */
2288 rtx equiv_reg;
2289 rtx equiv_loc;
2290 HOST_WIDE_INT loc_offset;
2292 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
2293 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2294 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
2295 REGNO (equiv_reg)).rtx;
2296 loc_offset
2297 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2299 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2300 force_operand
2301 (plus_constant
2302 (copy_rtx_and_substitute (SET_SRC (orig),
2303 map, 0),
2304 - loc_offset),
2305 NULL_RTX));
2307 else
2308 return gen_rtx_SET (VOIDmode,
2309 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
2310 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
2311 break;
2313 case MEM:
2314 if (inlining
2315 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
2316 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
2318 enum machine_mode const_mode
2319 = get_pool_mode_for_function (inlining, XEXP (orig, 0));
2320 rtx constant
2321 = get_pool_constant_for_function (inlining, XEXP (orig, 0));
2323 constant = copy_rtx_and_substitute (constant, map, 0);
2325 /* If this was an address of a constant pool entry that itself
2326 had to be placed in the constant pool, it might not be a
2327 valid address. So the recursive call might have turned it
2328 into a register. In that case, it isn't a constant any
2329 more, so return it. This has the potential of changing a
2330 MEM into a REG, but we'll assume that it safe. */
2331 if (! CONSTANT_P (constant))
2332 return constant;
2334 return validize_mem (force_const_mem (const_mode, constant));
2337 copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
2338 map, 0));
2339 MEM_COPY_ATTRIBUTES (copy, orig);
2341 /* If inlining and this is not for the LHS, turn off RTX_UNCHANGING_P
2342 since this may be an indirect reference to a parameter and the
2343 actual may not be readonly. */
2344 if (inlining && !for_lhs)
2345 RTX_UNCHANGING_P (copy) = 0;
2347 /* If inlining, squish aliasing data that references the subroutine's
2348 parameter list, since that's no longer applicable. */
2349 if (inlining && MEM_EXPR (copy)
2350 && TREE_CODE (MEM_EXPR (copy)) == INDIRECT_REF
2351 && TREE_CODE (TREE_OPERAND (MEM_EXPR (copy), 0)) == PARM_DECL)
2352 set_mem_expr (copy, NULL_TREE);
2354 return copy;
2356 default:
2357 break;
2360 copy = rtx_alloc (code);
2361 PUT_MODE (copy, mode);
2362 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2363 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2364 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2366 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2368 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2370 switch (*format_ptr++)
2372 case '0':
2373 /* Copy this through the wide int field; that's safest. */
2374 X0WINT (copy, i) = X0WINT (orig, i);
2375 break;
2377 case 'e':
2378 XEXP (copy, i)
2379 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
2380 break;
2382 case 'u':
2383 /* Change any references to old-insns to point to the
2384 corresponding copied insns. */
2385 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2386 break;
2388 case 'E':
2389 XVEC (copy, i) = XVEC (orig, i);
2390 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2392 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2393 for (j = 0; j < XVECLEN (copy, i); j++)
2394 XVECEXP (copy, i, j)
2395 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
2396 map, for_lhs);
2398 break;
2400 case 'w':
2401 XWINT (copy, i) = XWINT (orig, i);
2402 break;
2404 case 'i':
2405 XINT (copy, i) = XINT (orig, i);
2406 break;
2408 case 's':
2409 XSTR (copy, i) = XSTR (orig, i);
2410 break;
2412 case 't':
2413 XTREE (copy, i) = XTREE (orig, i);
2414 break;
2416 default:
2417 abort ();
2421 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2423 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
2424 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
2425 map->copy_asm_constraints_vector
2426 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
2429 return copy;
2432 /* Substitute known constant values into INSN, if that is valid. */
2434 void
2435 try_constants (insn, map)
2436 rtx insn;
2437 struct inline_remap *map;
2439 int i;
2441 map->num_sets = 0;
2443 /* First try just updating addresses, then other things. This is
2444 important when we have something like the store of a constant
2445 into memory and we can update the memory address but the machine
2446 does not support a constant source. */
2447 subst_constants (&PATTERN (insn), insn, map, 1);
2448 apply_change_group ();
2449 subst_constants (&PATTERN (insn), insn, map, 0);
2450 apply_change_group ();
2452 /* Show we don't know the value of anything stored or clobbered. */
2453 note_stores (PATTERN (insn), mark_stores, NULL);
2454 map->last_pc_value = 0;
2455 #ifdef HAVE_cc0
2456 map->last_cc0_value = 0;
2457 #endif
2459 /* Set up any constant equivalences made in this insn. */
2460 for (i = 0; i < map->num_sets; i++)
2462 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2464 int regno = REGNO (map->equiv_sets[i].dest);
2466 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2467 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2468 /* Following clause is a hack to make case work where GNU C++
2469 reassigns a variable to make cse work right. */
2470 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2471 regno).rtx,
2472 map->equiv_sets[i].equiv))
2473 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2474 map->equiv_sets[i].equiv, map->const_age);
2476 else if (map->equiv_sets[i].dest == pc_rtx)
2477 map->last_pc_value = map->equiv_sets[i].equiv;
2478 #ifdef HAVE_cc0
2479 else if (map->equiv_sets[i].dest == cc0_rtx)
2480 map->last_cc0_value = map->equiv_sets[i].equiv;
2481 #endif
2485 /* Substitute known constants for pseudo regs in the contents of LOC,
2486 which are part of INSN.
2487 If INSN is zero, the substitution should always be done (this is used to
2488 update DECL_RTL).
2489 These changes are taken out by try_constants if the result is not valid.
2491 Note that we are more concerned with determining when the result of a SET
2492 is a constant, for further propagation, than actually inserting constants
2493 into insns; cse will do the latter task better.
2495 This function is also used to adjust address of items previously addressed
2496 via the virtual stack variable or virtual incoming arguments registers.
2498 If MEMONLY is nonzero, only make changes inside a MEM. */
2500 static void
2501 subst_constants (loc, insn, map, memonly)
2502 rtx *loc;
2503 rtx insn;
2504 struct inline_remap *map;
2505 int memonly;
2507 rtx x = *loc;
2508 int i, j;
2509 enum rtx_code code;
2510 const char *format_ptr;
2511 int num_changes = num_validated_changes ();
2512 rtx new = 0;
2513 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2515 code = GET_CODE (x);
2517 switch (code)
2519 case PC:
2520 case CONST_INT:
2521 case CONST_DOUBLE:
2522 case CONST_VECTOR:
2523 case SYMBOL_REF:
2524 case CONST:
2525 case LABEL_REF:
2526 case ADDRESS:
2527 return;
2529 #ifdef HAVE_cc0
2530 case CC0:
2531 if (! memonly)
2532 validate_change (insn, loc, map->last_cc0_value, 1);
2533 return;
2534 #endif
2536 case USE:
2537 case CLOBBER:
2538 /* The only thing we can do with a USE or CLOBBER is possibly do
2539 some substitutions in a MEM within it. */
2540 if (GET_CODE (XEXP (x, 0)) == MEM)
2541 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
2542 return;
2544 case REG:
2545 /* Substitute for parms and known constants. Don't replace
2546 hard regs used as user variables with constants. */
2547 if (! memonly)
2549 int regno = REGNO (x);
2550 struct const_equiv_data *p;
2552 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2553 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2554 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2555 p->rtx != 0)
2556 && p->age >= map->const_age)
2557 validate_change (insn, loc, p->rtx, 1);
2559 return;
2561 case SUBREG:
2562 /* SUBREG applied to something other than a reg
2563 should be treated as ordinary, since that must
2564 be a special hack and we don't know how to treat it specially.
2565 Consider for example mulsidi3 in m68k.md.
2566 Ordinary SUBREG of a REG needs this special treatment. */
2567 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
2569 rtx inner = SUBREG_REG (x);
2570 rtx new = 0;
2572 /* We can't call subst_constants on &SUBREG_REG (x) because any
2573 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2574 see what is inside, try to form the new SUBREG and see if that is
2575 valid. We handle two cases: extracting a full word in an
2576 integral mode and extracting the low part. */
2577 subst_constants (&inner, NULL_RTX, map, 0);
2578 new = simplify_gen_subreg (GET_MODE (x), inner,
2579 GET_MODE (SUBREG_REG (x)),
2580 SUBREG_BYTE (x));
2582 if (new)
2583 validate_change (insn, loc, new, 1);
2584 else
2585 cancel_changes (num_changes);
2587 return;
2589 break;
2591 case MEM:
2592 subst_constants (&XEXP (x, 0), insn, map, 0);
2594 /* If a memory address got spoiled, change it back. */
2595 if (! memonly && insn != 0 && num_validated_changes () != num_changes
2596 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2597 cancel_changes (num_changes);
2598 return;
2600 case SET:
2602 /* Substitute constants in our source, and in any arguments to a
2603 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2604 itself. */
2605 rtx *dest_loc = &SET_DEST (x);
2606 rtx dest = *dest_loc;
2607 rtx src, tem;
2608 enum machine_mode compare_mode = VOIDmode;
2610 /* If SET_SRC is a COMPARE which subst_constants would turn into
2611 COMPARE of 2 VOIDmode constants, note the mode in which comparison
2612 is to be done. */
2613 if (GET_CODE (SET_SRC (x)) == COMPARE)
2615 src = SET_SRC (x);
2616 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2617 #ifdef HAVE_cc0
2618 || dest == cc0_rtx
2619 #endif
2622 compare_mode = GET_MODE (XEXP (src, 0));
2623 if (compare_mode == VOIDmode)
2624 compare_mode = GET_MODE (XEXP (src, 1));
2628 subst_constants (&SET_SRC (x), insn, map, memonly);
2629 src = SET_SRC (x);
2631 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2632 || GET_CODE (*dest_loc) == SUBREG
2633 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2635 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2637 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2638 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
2640 dest_loc = &XEXP (*dest_loc, 0);
2643 /* Do substitute in the address of a destination in memory. */
2644 if (GET_CODE (*dest_loc) == MEM)
2645 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
2647 /* Check for the case of DEST a SUBREG, both it and the underlying
2648 register are less than one word, and the SUBREG has the wider mode.
2649 In the case, we are really setting the underlying register to the
2650 source converted to the mode of DEST. So indicate that. */
2651 if (GET_CODE (dest) == SUBREG
2652 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2653 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2654 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2655 <= GET_MODE_SIZE (GET_MODE (dest)))
2656 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2657 src)))
2658 src = tem, dest = SUBREG_REG (dest);
2660 /* If storing a recognizable value save it for later recording. */
2661 if ((map->num_sets < MAX_RECOG_OPERANDS)
2662 && (CONSTANT_P (src)
2663 || (GET_CODE (src) == REG
2664 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2665 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2666 || (GET_CODE (src) == PLUS
2667 && GET_CODE (XEXP (src, 0)) == REG
2668 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2669 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2670 && CONSTANT_P (XEXP (src, 1)))
2671 || GET_CODE (src) == COMPARE
2672 #ifdef HAVE_cc0
2673 || dest == cc0_rtx
2674 #endif
2675 || (dest == pc_rtx
2676 && (src == pc_rtx || GET_CODE (src) == RETURN
2677 || GET_CODE (src) == LABEL_REF))))
2679 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2680 it will cause us to save the COMPARE with any constants
2681 substituted, which is what we want for later. */
2682 rtx src_copy = copy_rtx (src);
2683 map->equiv_sets[map->num_sets].equiv = src_copy;
2684 map->equiv_sets[map->num_sets++].dest = dest;
2685 if (compare_mode != VOIDmode
2686 && GET_CODE (src) == COMPARE
2687 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2688 #ifdef HAVE_cc0
2689 || dest == cc0_rtx
2690 #endif
2692 && GET_MODE (XEXP (src, 0)) == VOIDmode
2693 && GET_MODE (XEXP (src, 1)) == VOIDmode)
2695 map->compare_src = src_copy;
2696 map->compare_mode = compare_mode;
2700 return;
2702 default:
2703 break;
2706 format_ptr = GET_RTX_FORMAT (code);
2708 /* If the first operand is an expression, save its mode for later. */
2709 if (*format_ptr == 'e')
2710 op0_mode = GET_MODE (XEXP (x, 0));
2712 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2714 switch (*format_ptr++)
2716 case '0':
2717 break;
2719 case 'e':
2720 if (XEXP (x, i))
2721 subst_constants (&XEXP (x, i), insn, map, memonly);
2722 break;
2724 case 'u':
2725 case 'i':
2726 case 's':
2727 case 'w':
2728 case 'n':
2729 case 't':
2730 case 'B':
2731 break;
2733 case 'E':
2734 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2735 for (j = 0; j < XVECLEN (x, i); j++)
2736 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2738 break;
2740 default:
2741 abort ();
2745 /* If this is a commutative operation, move a constant to the second
2746 operand unless the second operand is already a CONST_INT. */
2747 if (! memonly
2748 && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2749 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2751 rtx tem = XEXP (x, 0);
2752 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2753 validate_change (insn, &XEXP (x, 1), tem, 1);
2756 /* Simplify the expression in case we put in some constants. */
2757 if (! memonly)
2758 switch (GET_RTX_CLASS (code))
2760 case '1':
2761 if (op0_mode == MAX_MACHINE_MODE)
2762 abort ();
2763 new = simplify_unary_operation (code, GET_MODE (x),
2764 XEXP (x, 0), op0_mode);
2765 break;
2767 case '<':
2769 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2771 if (op_mode == VOIDmode)
2772 op_mode = GET_MODE (XEXP (x, 1));
2773 new = simplify_relational_operation (code, op_mode,
2774 XEXP (x, 0), XEXP (x, 1));
2775 #ifdef FLOAT_STORE_FLAG_VALUE
2776 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2778 enum machine_mode mode = GET_MODE (x);
2779 if (new == const0_rtx)
2780 new = CONST0_RTX (mode);
2781 else
2783 REAL_VALUE_TYPE val;
2785 /* Avoid automatic aggregate initialization. */
2786 val = FLOAT_STORE_FLAG_VALUE (mode);
2787 new = CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
2790 #endif
2791 break;
2794 case '2':
2795 case 'c':
2796 new = simplify_binary_operation (code, GET_MODE (x),
2797 XEXP (x, 0), XEXP (x, 1));
2798 break;
2800 case 'b':
2801 case '3':
2802 if (op0_mode == MAX_MACHINE_MODE)
2803 abort ();
2805 if (code == IF_THEN_ELSE)
2807 rtx op0 = XEXP (x, 0);
2809 if (GET_RTX_CLASS (GET_CODE (op0)) == '<'
2810 && GET_MODE (op0) == VOIDmode
2811 && ! side_effects_p (op0)
2812 && XEXP (op0, 0) == map->compare_src
2813 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
2815 /* We have compare of two VOIDmode constants for which
2816 we recorded the comparison mode. */
2817 rtx temp =
2818 simplify_relational_operation (GET_CODE (op0),
2819 map->compare_mode,
2820 XEXP (op0, 0),
2821 XEXP (op0, 1));
2823 if (temp == const0_rtx)
2824 new = XEXP (x, 2);
2825 else if (temp == const1_rtx)
2826 new = XEXP (x, 1);
2829 if (!new)
2830 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2831 XEXP (x, 0), XEXP (x, 1),
2832 XEXP (x, 2));
2833 break;
2836 if (new)
2837 validate_change (insn, loc, new, 1);
2840 /* Show that register modified no longer contain known constants. We are
2841 called from note_stores with parts of the new insn. */
2843 static void
2844 mark_stores (dest, x, data)
2845 rtx dest;
2846 rtx x ATTRIBUTE_UNUSED;
2847 void *data ATTRIBUTE_UNUSED;
2849 int regno = -1;
2850 enum machine_mode mode = VOIDmode;
2852 /* DEST is always the innermost thing set, except in the case of
2853 SUBREGs of hard registers. */
2855 if (GET_CODE (dest) == REG)
2856 regno = REGNO (dest), mode = GET_MODE (dest);
2857 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2859 regno = REGNO (SUBREG_REG (dest));
2860 if (regno < FIRST_PSEUDO_REGISTER)
2861 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
2862 GET_MODE (SUBREG_REG (dest)),
2863 SUBREG_BYTE (dest),
2864 GET_MODE (dest));
2865 mode = GET_MODE (SUBREG_REG (dest));
2868 if (regno >= 0)
2870 unsigned int uregno = regno;
2871 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
2872 : uregno + HARD_REGNO_NREGS (uregno, mode) - 1);
2873 unsigned int i;
2875 /* Ignore virtual stack var or virtual arg register since those
2876 are handled separately. */
2877 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
2878 && uregno != VIRTUAL_STACK_VARS_REGNUM)
2879 for (i = uregno; i <= last_reg; i++)
2880 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2881 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2885 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2886 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2887 that it points to the node itself, thus indicating that the node is its
2888 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2889 the given node is NULL, recursively descend the decl/block tree which
2890 it is the root of, and for each other ..._DECL or BLOCK node contained
2891 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2892 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2893 values to point to themselves. */
2895 static void
2896 set_block_origin_self (stmt)
2897 tree stmt;
2899 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2901 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2904 tree local_decl;
2906 for (local_decl = BLOCK_VARS (stmt);
2907 local_decl != NULL_TREE;
2908 local_decl = TREE_CHAIN (local_decl))
2909 set_decl_origin_self (local_decl); /* Potential recursion. */
2913 tree subblock;
2915 for (subblock = BLOCK_SUBBLOCKS (stmt);
2916 subblock != NULL_TREE;
2917 subblock = BLOCK_CHAIN (subblock))
2918 set_block_origin_self (subblock); /* Recurse. */
2923 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2924 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2925 node to so that it points to the node itself, thus indicating that the
2926 node represents its own (abstract) origin. Additionally, if the
2927 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2928 the decl/block tree of which the given node is the root of, and for
2929 each other ..._DECL or BLOCK node contained therein whose
2930 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2931 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2932 point to themselves. */
2934 void
2935 set_decl_origin_self (decl)
2936 tree decl;
2938 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2940 DECL_ABSTRACT_ORIGIN (decl) = decl;
2941 if (TREE_CODE (decl) == FUNCTION_DECL)
2943 tree arg;
2945 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2946 DECL_ABSTRACT_ORIGIN (arg) = arg;
2947 if (DECL_INITIAL (decl) != NULL_TREE
2948 && DECL_INITIAL (decl) != error_mark_node)
2949 set_block_origin_self (DECL_INITIAL (decl));
2954 /* Given a pointer to some BLOCK node, and a boolean value to set the
2955 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2956 the given block, and for all local decls and all local sub-blocks
2957 (recursively) which are contained therein. */
2959 static void
2960 set_block_abstract_flags (stmt, setting)
2961 tree stmt;
2962 int setting;
2964 tree local_decl;
2965 tree subblock;
2967 BLOCK_ABSTRACT (stmt) = setting;
2969 for (local_decl = BLOCK_VARS (stmt);
2970 local_decl != NULL_TREE;
2971 local_decl = TREE_CHAIN (local_decl))
2972 set_decl_abstract_flags (local_decl, setting);
2974 for (subblock = BLOCK_SUBBLOCKS (stmt);
2975 subblock != NULL_TREE;
2976 subblock = BLOCK_CHAIN (subblock))
2977 set_block_abstract_flags (subblock, setting);
2980 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2981 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2982 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2983 set the abstract flags for all of the parameters, local vars, local
2984 blocks and sub-blocks (recursively) to the same setting. */
2986 void
2987 set_decl_abstract_flags (decl, setting)
2988 tree decl;
2989 int setting;
2991 DECL_ABSTRACT (decl) = setting;
2992 if (TREE_CODE (decl) == FUNCTION_DECL)
2994 tree arg;
2996 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2997 DECL_ABSTRACT (arg) = setting;
2998 if (DECL_INITIAL (decl) != NULL_TREE
2999 && DECL_INITIAL (decl) != error_mark_node)
3000 set_block_abstract_flags (DECL_INITIAL (decl), setting);
3004 /* Output the assembly language code for the function FNDECL
3005 from its DECL_SAVED_INSNS. Used for inline functions that are output
3006 at end of compilation instead of where they came in the source. */
3008 static GTY(()) struct function *old_cfun;
3010 void
3011 output_inline_function (fndecl)
3012 tree fndecl;
3014 enum debug_info_type old_write_symbols = write_symbols;
3015 const struct gcc_debug_hooks *const old_debug_hooks = debug_hooks;
3016 struct function *f = DECL_SAVED_INSNS (fndecl);
3018 old_cfun = cfun;
3019 cfun = f;
3020 current_function_decl = fndecl;
3022 set_new_last_label_num (f->inl_max_label_num);
3024 /* We're not deferring this any longer. */
3025 DECL_DEFER_OUTPUT (fndecl) = 0;
3027 /* If requested, suppress debugging information. */
3028 if (f->no_debugging_symbols)
3030 write_symbols = NO_DEBUG;
3031 debug_hooks = &do_nothing_debug_hooks;
3034 /* Compile this function all the way down to assembly code. As a
3035 side effect this destroys the saved RTL representation, but
3036 that's okay, because we don't need to inline this anymore. */
3037 rest_of_compilation (fndecl);
3038 DECL_INLINE (fndecl) = 0;
3040 cfun = old_cfun;
3041 current_function_decl = old_cfun ? old_cfun->decl : 0;
3042 write_symbols = old_write_symbols;
3043 debug_hooks = old_debug_hooks;
3047 /* Functions to keep track of the values hard regs had at the start of
3048 the function. */
3051 get_hard_reg_initial_reg (fun, reg)
3052 struct function *fun;
3053 rtx reg;
3055 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3056 int i;
3058 if (ivs == 0)
3059 return NULL_RTX;
3061 for (i = 0; i < ivs->num_entries; i++)
3062 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
3063 return ivs->entries[i].hard_reg;
3065 return NULL_RTX;
3069 has_func_hard_reg_initial_val (fun, reg)
3070 struct function *fun;
3071 rtx reg;
3073 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3074 int i;
3076 if (ivs == 0)
3077 return NULL_RTX;
3079 for (i = 0; i < ivs->num_entries; i++)
3080 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
3081 return ivs->entries[i].pseudo;
3083 return NULL_RTX;
3087 get_func_hard_reg_initial_val (fun, reg)
3088 struct function *fun;
3089 rtx reg;
3091 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3092 rtx rv = has_func_hard_reg_initial_val (fun, reg);
3094 if (rv)
3095 return rv;
3097 if (ivs == 0)
3099 fun->hard_reg_initial_vals = (void *) ggc_alloc (sizeof (initial_value_struct));
3100 ivs = fun->hard_reg_initial_vals;
3101 ivs->num_entries = 0;
3102 ivs->max_entries = 5;
3103 ivs->entries = (initial_value_pair *) ggc_alloc (5 * sizeof (initial_value_pair));
3106 if (ivs->num_entries >= ivs->max_entries)
3108 ivs->max_entries += 5;
3109 ivs->entries =
3110 (initial_value_pair *) ggc_realloc (ivs->entries,
3111 ivs->max_entries
3112 * sizeof (initial_value_pair));
3115 ivs->entries[ivs->num_entries].hard_reg = reg;
3116 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
3118 return ivs->entries[ivs->num_entries++].pseudo;
3122 get_hard_reg_initial_val (mode, regno)
3123 enum machine_mode mode;
3124 int regno;
3126 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3130 has_hard_reg_initial_val (mode, regno)
3131 enum machine_mode mode;
3132 int regno;
3134 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3137 static void
3138 setup_initial_hard_reg_value_integration (inl_f, remap)
3139 struct function *inl_f;
3140 struct inline_remap *remap;
3142 struct initial_value_struct *ivs = inl_f->hard_reg_initial_vals;
3143 int i;
3145 if (ivs == 0)
3146 return;
3148 for (i = 0; i < ivs->num_entries; i ++)
3149 remap->reg_map[REGNO (ivs->entries[i].pseudo)]
3150 = get_func_hard_reg_initial_val (cfun, ivs->entries[i].hard_reg);
3154 void
3155 emit_initial_value_sets ()
3157 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3158 int i;
3159 rtx seq;
3161 if (ivs == 0)
3162 return;
3164 start_sequence ();
3165 for (i = 0; i < ivs->num_entries; i++)
3166 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
3167 seq = get_insns ();
3168 end_sequence ();
3170 emit_insn_after (seq, get_insns ());
3173 /* If the backend knows where to allocate pseudos for hard
3174 register initial values, register these allocations now. */
3175 void
3176 allocate_initial_values (reg_equiv_memory_loc)
3177 rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED;
3179 #ifdef ALLOCATE_INITIAL_VALUE
3180 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3181 int i;
3183 if (ivs == 0)
3184 return;
3186 for (i = 0; i < ivs->num_entries; i++)
3188 int regno = REGNO (ivs->entries[i].pseudo);
3189 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
3191 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
3192 ; /* Do nothing. */
3193 else if (GET_CODE (x) == MEM)
3194 reg_equiv_memory_loc[regno] = x;
3195 else if (GET_CODE (x) == REG)
3197 reg_renumber[regno] = REGNO (x);
3198 /* Poke the regno right into regno_reg_rtx
3199 so that even fixed regs are accepted. */
3200 REGNO (ivs->entries[i].pseudo) = REGNO (x);
3202 else abort ();
3204 #endif
3207 #include "gt-integrate.h"