2002-07-18 H.J. Lu <hjl@gnu.org>
[official-gcc.git] / gcc / integrate.c
blob49fd4482e23bd37da5282e63d77b8426db2717e6
1 /* Procedure integration for GCC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "flags.h"
31 #include "debug.h"
32 #include "insn-config.h"
33 #include "expr.h"
34 #include "output.h"
35 #include "recog.h"
36 #include "integrate.h"
37 #include "real.h"
38 #include "except.h"
39 #include "function.h"
40 #include "toplev.h"
41 #include "intl.h"
42 #include "loop.h"
43 #include "params.h"
44 #include "ggc.h"
45 #include "target.h"
46 #include "langhooks.h"
48 #include "obstack.h"
49 #define obstack_chunk_alloc xmalloc
50 #define obstack_chunk_free free
52 extern struct obstack *function_maybepermanent_obstack;
54 /* Similar, but round to the next highest integer that meets the
55 alignment. */
56 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
58 /* Default max number of insns a function can have and still be inline.
59 This is overridden on RISC machines. */
60 #ifndef INTEGRATE_THRESHOLD
61 /* Inlining small functions might save more space then not inlining at
62 all. Assume 1 instruction for the call and 1.5 insns per argument. */
63 #define INTEGRATE_THRESHOLD(DECL) \
64 (optimize_size \
65 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
66 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
67 #endif
70 /* Private type used by {get/has}_func_hard_reg_initial_val. */
71 typedef struct initial_value_pair GTY(()) {
72 rtx hard_reg;
73 rtx pseudo;
74 } initial_value_pair;
75 typedef struct initial_value_struct GTY(()) {
76 int num_entries;
77 int max_entries;
78 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
79 } initial_value_struct;
81 static void setup_initial_hard_reg_value_integration PARAMS ((struct function *, struct inline_remap *));
83 static rtvec initialize_for_inline PARAMS ((tree));
84 static void note_modified_parmregs PARAMS ((rtx, rtx, void *));
85 static void integrate_parm_decls PARAMS ((tree, struct inline_remap *,
86 rtvec));
87 static tree integrate_decl_tree PARAMS ((tree,
88 struct inline_remap *));
89 static void subst_constants PARAMS ((rtx *, rtx,
90 struct inline_remap *, int));
91 static void set_block_origin_self PARAMS ((tree));
92 static void set_block_abstract_flags PARAMS ((tree, int));
93 static void process_reg_param PARAMS ((struct inline_remap *, rtx,
94 rtx));
95 void set_decl_abstract_flags PARAMS ((tree, int));
96 static void mark_stores PARAMS ((rtx, rtx, void *));
97 static void save_parm_insns PARAMS ((rtx, rtx));
98 static void copy_insn_list PARAMS ((rtx, struct inline_remap *,
99 rtx));
100 static void copy_insn_notes PARAMS ((rtx, struct inline_remap *,
101 int));
102 static int compare_blocks PARAMS ((const PTR, const PTR));
103 static int find_block PARAMS ((const PTR, const PTR));
105 /* Used by copy_rtx_and_substitute; this indicates whether the function is
106 called for the purpose of inlining or some other purpose (i.e. loop
107 unrolling). This affects how constant pool references are handled.
108 This variable contains the FUNCTION_DECL for the inlined function. */
109 static struct function *inlining = 0;
111 /* Returns the Ith entry in the label_map contained in MAP. If the
112 Ith entry has not yet been set, return a fresh label. This function
113 performs a lazy initialization of label_map, thereby avoiding huge memory
114 explosions when the label_map gets very large. */
117 get_label_from_map (map, i)
118 struct inline_remap *map;
119 int i;
121 rtx x = map->label_map[i];
123 if (x == NULL_RTX)
124 x = map->label_map[i] = gen_label_rtx ();
126 return x;
129 /* Return false if the function FNDECL cannot be inlined on account of its
130 attributes, true otherwise. */
131 bool
132 function_attribute_inlinable_p (fndecl)
133 tree fndecl;
135 if (targetm.attribute_table)
137 tree a;
139 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
141 tree name = TREE_PURPOSE (a);
142 int i;
144 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
145 if (is_attribute_p (targetm.attribute_table[i].name, name))
146 return (*targetm.function_attribute_inlinable_p) (fndecl);
150 return true;
153 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
154 is safe and reasonable to integrate into other functions.
155 Nonzero means value is a warning msgid with a single %s
156 for the function's name. */
158 const char *
159 function_cannot_inline_p (fndecl)
160 tree fndecl;
162 rtx insn;
163 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
165 /* For functions marked as inline increase the maximum size to
166 MAX_INLINE_INSNS (-finline-limit-<n>). For regular functions
167 use the limit given by INTEGRATE_THRESHOLD. */
169 int max_insns = (DECL_INLINE (fndecl))
170 ? (MAX_INLINE_INSNS
171 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
172 : INTEGRATE_THRESHOLD (fndecl);
174 int ninsns = 0;
175 tree parms;
177 if (DECL_UNINLINABLE (fndecl))
178 return N_("function cannot be inline");
180 /* No inlines with varargs. */
181 if (last && TREE_VALUE (last) != void_type_node)
182 return N_("varargs function cannot be inline");
184 if (current_function_calls_alloca)
185 return N_("function using alloca cannot be inline");
187 if (current_function_calls_setjmp)
188 return N_("function using setjmp cannot be inline");
190 if (current_function_calls_eh_return)
191 return N_("function uses __builtin_eh_return");
193 if (current_function_contains_functions)
194 return N_("function with nested functions cannot be inline");
196 if (forced_labels)
197 return
198 N_("function with label addresses used in initializers cannot inline");
200 if (current_function_cannot_inline)
201 return current_function_cannot_inline;
203 /* If its not even close, don't even look. */
204 if (get_max_uid () > 3 * max_insns)
205 return N_("function too large to be inline");
207 #if 0
208 /* Don't inline functions which do not specify a function prototype and
209 have BLKmode argument or take the address of a parameter. */
210 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
212 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
213 TREE_ADDRESSABLE (parms) = 1;
214 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
215 return N_("no prototype, and parameter address used; cannot be inline");
217 #endif
219 /* We can't inline functions that return structures
220 the old-fashioned PCC way, copying into a static block. */
221 if (current_function_returns_pcc_struct)
222 return N_("inline functions not supported for this return value type");
224 /* We can't inline functions that return structures of varying size. */
225 if (TREE_CODE (TREE_TYPE (TREE_TYPE (fndecl))) != VOID_TYPE
226 && int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
227 return N_("function with varying-size return value cannot be inline");
229 /* Cannot inline a function with a varying size argument or one that
230 receives a transparent union. */
231 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
233 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
234 return N_("function with varying-size parameter cannot be inline");
235 else if (TREE_CODE (TREE_TYPE (parms)) == UNION_TYPE
236 && TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
237 return N_("function with transparent unit parameter cannot be inline");
240 if (get_max_uid () > max_insns)
242 for (ninsns = 0, insn = get_first_nonparm_insn ();
243 insn && ninsns < max_insns;
244 insn = NEXT_INSN (insn))
245 if (INSN_P (insn))
246 ninsns++;
248 if (ninsns >= max_insns)
249 return N_("function too large to be inline");
252 /* We will not inline a function which uses computed goto. The addresses of
253 its local labels, which may be tucked into global storage, are of course
254 not constant across instantiations, which causes unexpected behaviour. */
255 if (current_function_has_computed_jump)
256 return N_("function with computed jump cannot inline");
258 /* We cannot inline a nested function that jumps to a nonlocal label. */
259 if (current_function_has_nonlocal_goto)
260 return N_("function with nonlocal goto cannot be inline");
262 /* We can't inline functions that return a PARALLEL rtx. */
263 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
265 rtx result = DECL_RTL (DECL_RESULT (fndecl));
266 if (GET_CODE (result) == PARALLEL)
267 return N_("inline functions not supported for this return value type");
270 /* If the function has a target specific attribute attached to it,
271 then we assume that we should not inline it. This can be overriden
272 by the target if it defines TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P. */
273 if (!function_attribute_inlinable_p (fndecl))
274 return N_("function with target specific attribute(s) cannot be inlined");
276 return NULL;
279 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
280 Zero for a reg that isn't a parm's home.
281 Only reg numbers less than max_parm_reg are mapped here. */
282 static tree *parmdecl_map;
284 /* In save_for_inline, nonzero if past the parm-initialization insns. */
285 static int in_nonparm_insns;
287 /* Subroutine for `save_for_inline'. Performs initialization
288 needed to save FNDECL's insns and info for future inline expansion. */
290 static rtvec
291 initialize_for_inline (fndecl)
292 tree fndecl;
294 int i;
295 rtvec arg_vector;
296 tree parms;
298 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
299 memset ((char *) parmdecl_map, 0, max_parm_reg * sizeof (tree));
300 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
302 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
303 parms;
304 parms = TREE_CHAIN (parms), i++)
306 rtx p = DECL_RTL (parms);
308 /* If we have (mem (addressof (mem ...))), use the inner MEM since
309 otherwise the copy_rtx call below will not unshare the MEM since
310 it shares ADDRESSOF. */
311 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
312 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
313 p = XEXP (XEXP (p, 0), 0);
315 RTVEC_ELT (arg_vector, i) = p;
317 if (GET_CODE (p) == REG)
318 parmdecl_map[REGNO (p)] = parms;
319 else if (GET_CODE (p) == CONCAT)
321 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
322 rtx pimag = gen_imagpart (GET_MODE (preal), p);
324 if (GET_CODE (preal) == REG)
325 parmdecl_map[REGNO (preal)] = parms;
326 if (GET_CODE (pimag) == REG)
327 parmdecl_map[REGNO (pimag)] = parms;
330 /* This flag is cleared later
331 if the function ever modifies the value of the parm. */
332 TREE_READONLY (parms) = 1;
335 return arg_vector;
338 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
339 originally was in the FROM_FN, but now it will be in the
340 TO_FN. */
342 tree
343 copy_decl_for_inlining (decl, from_fn, to_fn)
344 tree decl;
345 tree from_fn;
346 tree to_fn;
348 tree copy;
350 /* Copy the declaration. */
351 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
353 /* For a parameter, we must make an equivalent VAR_DECL, not a
354 new PARM_DECL. */
355 copy = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
356 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
357 TREE_READONLY (copy) = TREE_READONLY (decl);
358 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
360 else
362 copy = copy_node (decl);
363 (*lang_hooks.dup_lang_specific_decl) (copy);
365 /* TREE_ADDRESSABLE isn't used to indicate that a label's
366 address has been taken; it's for internal bookkeeping in
367 expand_goto_internal. */
368 if (TREE_CODE (copy) == LABEL_DECL)
369 TREE_ADDRESSABLE (copy) = 0;
372 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
373 declaration inspired this copy. */
374 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
376 /* The new variable/label has no RTL, yet. */
377 SET_DECL_RTL (copy, NULL_RTX);
379 /* These args would always appear unused, if not for this. */
380 TREE_USED (copy) = 1;
382 /* Set the context for the new declaration. */
383 if (!DECL_CONTEXT (decl))
384 /* Globals stay global. */
386 else if (DECL_CONTEXT (decl) != from_fn)
387 /* Things that weren't in the scope of the function we're inlining
388 from aren't in the scope we're inlining too, either. */
390 else if (TREE_STATIC (decl))
391 /* Function-scoped static variables should say in the original
392 function. */
394 else
395 /* Ordinary automatic local variables are now in the scope of the
396 new function. */
397 DECL_CONTEXT (copy) = to_fn;
399 return copy;
402 /* Make the insns and PARM_DECLs of the current function permanent
403 and record other information in DECL_SAVED_INSNS to allow inlining
404 of this function in subsequent calls.
406 This routine need not copy any insns because we are not going
407 to immediately compile the insns in the insn chain. There
408 are two cases when we would compile the insns for FNDECL:
409 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
410 be output at the end of other compilation, because somebody took
411 its address. In the first case, the insns of FNDECL are copied
412 as it is expanded inline, so FNDECL's saved insns are not
413 modified. In the second case, FNDECL is used for the last time,
414 so modifying the rtl is not a problem.
416 We don't have to worry about FNDECL being inline expanded by
417 other functions which are written at the end of compilation
418 because flag_no_inline is turned on when we begin writing
419 functions at the end of compilation. */
421 void
422 save_for_inline (fndecl)
423 tree fndecl;
425 rtx insn;
426 rtvec argvec;
427 rtx first_nonparm_insn;
429 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
430 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
431 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
432 for the parms, prior to elimination of virtual registers.
433 These values are needed for substituting parms properly. */
434 if (! flag_no_inline)
435 parmdecl_map = (tree *) xmalloc (max_parm_reg * sizeof (tree));
437 /* Make and emit a return-label if we have not already done so. */
439 if (return_label == 0)
441 return_label = gen_label_rtx ();
442 emit_label (return_label);
445 if (! flag_no_inline)
446 argvec = initialize_for_inline (fndecl);
447 else
448 argvec = NULL;
450 /* Delete basic block notes created by early run of find_basic_block.
451 The notes would be later used by find_basic_blocks to reuse the memory
452 for basic_block structures on already freed obstack. */
453 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
454 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK)
455 delete_related_insns (insn);
457 /* If there are insns that copy parms from the stack into pseudo registers,
458 those insns are not copied. `expand_inline_function' must
459 emit the correct code to handle such things. */
461 insn = get_insns ();
462 if (GET_CODE (insn) != NOTE)
463 abort ();
465 if (! flag_no_inline)
467 /* Get the insn which signals the end of parameter setup code. */
468 first_nonparm_insn = get_first_nonparm_insn ();
470 /* Now just scan the chain of insns to see what happens to our
471 PARM_DECLs. If a PARM_DECL is used but never modified, we
472 can substitute its rtl directly when expanding inline (and
473 perform constant folding when its incoming value is
474 constant). Otherwise, we have to copy its value into a new
475 register and track the new register's life. */
476 in_nonparm_insns = 0;
477 save_parm_insns (insn, first_nonparm_insn);
479 cfun->inl_max_label_num = max_label_num ();
480 cfun->inl_last_parm_insn = cfun->x_last_parm_insn;
481 cfun->original_arg_vector = argvec;
483 cfun->original_decl_initial = DECL_INITIAL (fndecl);
484 cfun->no_debugging_symbols = (write_symbols == NO_DEBUG);
485 DECL_SAVED_INSNS (fndecl) = cfun;
487 /* Clean up. */
488 if (! flag_no_inline)
489 free (parmdecl_map);
492 /* Scan the chain of insns to see what happens to our PARM_DECLs. If a
493 PARM_DECL is used but never modified, we can substitute its rtl directly
494 when expanding inline (and perform constant folding when its incoming
495 value is constant). Otherwise, we have to copy its value into a new
496 register and track the new register's life. */
498 static void
499 save_parm_insns (insn, first_nonparm_insn)
500 rtx insn;
501 rtx first_nonparm_insn;
503 if (insn == NULL_RTX)
504 return;
506 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
508 if (insn == first_nonparm_insn)
509 in_nonparm_insns = 1;
511 if (INSN_P (insn))
513 /* Record what interesting things happen to our parameters. */
514 note_stores (PATTERN (insn), note_modified_parmregs, NULL);
516 /* If this is a CALL_PLACEHOLDER insn then we need to look into the
517 three attached sequences: normal call, sibling call and tail
518 recursion. */
519 if (GET_CODE (insn) == CALL_INSN
520 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
522 int i;
524 for (i = 0; i < 3; i++)
525 save_parm_insns (XEXP (PATTERN (insn), i),
526 first_nonparm_insn);
532 /* Note whether a parameter is modified or not. */
534 static void
535 note_modified_parmregs (reg, x, data)
536 rtx reg;
537 rtx x ATTRIBUTE_UNUSED;
538 void *data ATTRIBUTE_UNUSED;
540 if (GET_CODE (reg) == REG && in_nonparm_insns
541 && REGNO (reg) < max_parm_reg
542 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
543 && parmdecl_map[REGNO (reg)] != 0)
544 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
547 /* Unfortunately, we need a global copy of const_equiv map for communication
548 with a function called from note_stores. Be *very* careful that this
549 is used properly in the presence of recursion. */
551 varray_type global_const_equiv_varray;
553 #define FIXED_BASE_PLUS_P(X) \
554 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
555 && GET_CODE (XEXP (X, 0)) == REG \
556 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
557 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
559 /* Called to set up a mapping for the case where a parameter is in a
560 register. If it is read-only and our argument is a constant, set up the
561 constant equivalence.
563 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
564 if it is a register.
566 Also, don't allow hard registers here; they might not be valid when
567 substituted into insns. */
568 static void
569 process_reg_param (map, loc, copy)
570 struct inline_remap *map;
571 rtx loc, copy;
573 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
574 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
575 && ! REG_USERVAR_P (copy))
576 || (GET_CODE (copy) == REG
577 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
579 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
580 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
581 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
582 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
583 copy = temp;
585 map->reg_map[REGNO (loc)] = copy;
588 /* Compare two BLOCKs for qsort. The key we sort on is the
589 BLOCK_ABSTRACT_ORIGIN of the blocks. We cannot just subtract the
590 two pointers, because it may overflow sizeof(int). */
592 static int
593 compare_blocks (v1, v2)
594 const PTR v1;
595 const PTR v2;
597 tree b1 = *((const tree *) v1);
598 tree b2 = *((const tree *) v2);
599 char *p1 = (char *) BLOCK_ABSTRACT_ORIGIN (b1);
600 char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
602 if (p1 == p2)
603 return 0;
604 return p1 < p2 ? -1 : 1;
607 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
608 an original block; the second to a remapped equivalent. */
610 static int
611 find_block (v1, v2)
612 const PTR v1;
613 const PTR v2;
615 const union tree_node *b1 = (const union tree_node *) v1;
616 tree b2 = *((const tree *) v2);
617 char *p1 = (char *) b1;
618 char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
620 if (p1 == p2)
621 return 0;
622 return p1 < p2 ? -1 : 1;
625 /* Integrate the procedure defined by FNDECL. Note that this function
626 may wind up calling itself. Since the static variables are not
627 reentrant, we do not assign them until after the possibility
628 of recursion is eliminated.
630 If IGNORE is nonzero, do not produce a value.
631 Otherwise store the value in TARGET if it is nonzero and that is convenient.
633 Value is:
634 (rtx)-1 if we could not substitute the function
635 0 if we substituted it and it does not produce a value
636 else an rtx for where the value is stored. */
639 expand_inline_function (fndecl, parms, target, ignore, type,
640 structure_value_addr)
641 tree fndecl, parms;
642 rtx target;
643 int ignore;
644 tree type;
645 rtx structure_value_addr;
647 struct function *inlining_previous;
648 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
649 tree formal, actual, block;
650 rtx parm_insns = inl_f->emit->x_first_insn;
651 rtx insns = (inl_f->inl_last_parm_insn
652 ? NEXT_INSN (inl_f->inl_last_parm_insn)
653 : parm_insns);
654 tree *arg_trees;
655 rtx *arg_vals;
656 int max_regno;
657 int i;
658 int min_labelno = inl_f->emit->x_first_label_num;
659 int max_labelno = inl_f->inl_max_label_num;
660 int nargs;
661 rtx loc;
662 rtx stack_save = 0;
663 rtx temp;
664 struct inline_remap *map = 0;
665 rtvec arg_vector = inl_f->original_arg_vector;
666 rtx static_chain_value = 0;
667 int inl_max_uid;
668 int eh_region_offset;
670 /* The pointer used to track the true location of the memory used
671 for MAP->LABEL_MAP. */
672 rtx *real_label_map = 0;
674 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
675 max_regno = inl_f->emit->x_reg_rtx_no + 3;
676 if (max_regno < FIRST_PSEUDO_REGISTER)
677 abort ();
679 /* Pull out the decl for the function definition; fndecl may be a
680 local declaration, which would break DECL_ABSTRACT_ORIGIN. */
681 fndecl = inl_f->decl;
683 nargs = list_length (DECL_ARGUMENTS (fndecl));
685 if (cfun->preferred_stack_boundary < inl_f->preferred_stack_boundary)
686 cfun->preferred_stack_boundary = inl_f->preferred_stack_boundary;
688 /* Check that the parms type match and that sufficient arguments were
689 passed. Since the appropriate conversions or default promotions have
690 already been applied, the machine modes should match exactly. */
692 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
693 formal;
694 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
696 tree arg;
697 enum machine_mode mode;
699 if (actual == 0)
700 return (rtx) (size_t) -1;
702 arg = TREE_VALUE (actual);
703 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
705 if (arg == error_mark_node
706 || mode != TYPE_MODE (TREE_TYPE (arg))
707 /* If they are block mode, the types should match exactly.
708 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
709 which could happen if the parameter has incomplete type. */
710 || (mode == BLKmode
711 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
712 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
713 return (rtx) (size_t) -1;
716 /* Extra arguments are valid, but will be ignored below, so we must
717 evaluate them here for side-effects. */
718 for (; actual; actual = TREE_CHAIN (actual))
719 expand_expr (TREE_VALUE (actual), const0_rtx,
720 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
722 /* Expand the function arguments. Do this first so that any
723 new registers get created before we allocate the maps. */
725 arg_vals = (rtx *) xmalloc (nargs * sizeof (rtx));
726 arg_trees = (tree *) xmalloc (nargs * sizeof (tree));
728 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
729 formal;
730 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
732 /* Actual parameter, converted to the type of the argument within the
733 function. */
734 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
735 /* Mode of the variable used within the function. */
736 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
737 int invisiref = 0;
739 arg_trees[i] = arg;
740 loc = RTVEC_ELT (arg_vector, i);
742 /* If this is an object passed by invisible reference, we copy the
743 object into a stack slot and save its address. If this will go
744 into memory, we do nothing now. Otherwise, we just expand the
745 argument. */
746 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
747 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
749 rtx stack_slot = assign_temp (TREE_TYPE (arg), 1, 1, 1);
751 store_expr (arg, stack_slot, 0);
752 arg_vals[i] = XEXP (stack_slot, 0);
753 invisiref = 1;
755 else if (GET_CODE (loc) != MEM)
757 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
759 int unsignedp = TREE_UNSIGNED (TREE_TYPE (formal));
760 enum machine_mode pmode = TYPE_MODE (TREE_TYPE (formal));
762 pmode = promote_mode (TREE_TYPE (formal), pmode,
763 &unsignedp, 0);
765 if (GET_MODE (loc) != pmode)
766 abort ();
768 /* The mode if LOC and ARG can differ if LOC was a variable
769 that had its mode promoted via PROMOTED_MODE. */
770 arg_vals[i] = convert_modes (pmode,
771 TYPE_MODE (TREE_TYPE (arg)),
772 expand_expr (arg, NULL_RTX, mode,
773 EXPAND_SUM),
774 unsignedp);
776 else
777 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
779 else
780 arg_vals[i] = 0;
782 if (arg_vals[i] != 0
783 && (! TREE_READONLY (formal)
784 /* If the parameter is not read-only, copy our argument through
785 a register. Also, we cannot use ARG_VALS[I] if it overlaps
786 TARGET in any way. In the inline function, they will likely
787 be two different pseudos, and `safe_from_p' will make all
788 sorts of smart assumptions about their not conflicting.
789 But if ARG_VALS[I] overlaps TARGET, these assumptions are
790 wrong, so put ARG_VALS[I] into a fresh register.
791 Don't worry about invisible references, since their stack
792 temps will never overlap the target. */
793 || (target != 0
794 && ! invisiref
795 && (GET_CODE (arg_vals[i]) == REG
796 || GET_CODE (arg_vals[i]) == SUBREG
797 || GET_CODE (arg_vals[i]) == MEM)
798 && reg_overlap_mentioned_p (arg_vals[i], target))
799 /* ??? We must always copy a SUBREG into a REG, because it might
800 get substituted into an address, and not all ports correctly
801 handle SUBREGs in addresses. */
802 || (GET_CODE (arg_vals[i]) == SUBREG)))
803 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
805 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
806 && POINTER_TYPE_P (TREE_TYPE (formal)))
807 mark_reg_pointer (arg_vals[i],
808 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal))));
811 /* Allocate the structures we use to remap things. */
813 map = (struct inline_remap *) xcalloc (1, sizeof (struct inline_remap));
814 map->fndecl = fndecl;
816 VARRAY_TREE_INIT (map->block_map, 10, "block_map");
817 map->reg_map = (rtx *) xcalloc (max_regno, sizeof (rtx));
819 /* We used to use alloca here, but the size of what it would try to
820 allocate would occasionally cause it to exceed the stack limit and
821 cause unpredictable core dumps. */
822 real_label_map
823 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
824 map->label_map = real_label_map;
825 map->local_return_label = NULL_RTX;
827 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
828 map->insn_map = (rtx *) xcalloc (inl_max_uid, sizeof (rtx));
829 map->min_insnno = 0;
830 map->max_insnno = inl_max_uid;
832 map->integrating = 1;
833 map->compare_src = NULL_RTX;
834 map->compare_mode = VOIDmode;
836 /* const_equiv_varray maps pseudos in our routine to constants, so
837 it needs to be large enough for all our pseudos. This is the
838 number we are currently using plus the number in the called
839 routine, plus 15 for each arg, five to compute the virtual frame
840 pointer, and five for the return value. This should be enough
841 for most cases. We do not reference entries outside the range of
842 the map.
844 ??? These numbers are quite arbitrary and were obtained by
845 experimentation. At some point, we should try to allocate the
846 table after all the parameters are set up so we an more accurately
847 estimate the number of pseudos we will need. */
849 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
850 (max_reg_num ()
851 + (max_regno - FIRST_PSEUDO_REGISTER)
852 + 15 * nargs
853 + 10),
854 "expand_inline_function");
855 map->const_age = 0;
857 /* Record the current insn in case we have to set up pointers to frame
858 and argument memory blocks. If there are no insns yet, add a dummy
859 insn that can be used as an insertion point. */
860 map->insns_at_start = get_last_insn ();
861 if (map->insns_at_start == 0)
862 map->insns_at_start = emit_note (NULL, NOTE_INSN_DELETED);
864 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
865 map->x_regno_reg_rtx = inl_f->emit->x_regno_reg_rtx;
867 /* Update the outgoing argument size to allow for those in the inlined
868 function. */
869 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
870 current_function_outgoing_args_size = inl_f->outgoing_args_size;
872 /* If the inline function needs to make PIC references, that means
873 that this function's PIC offset table must be used. */
874 if (inl_f->uses_pic_offset_table)
875 current_function_uses_pic_offset_table = 1;
877 /* If this function needs a context, set it up. */
878 if (inl_f->needs_context)
879 static_chain_value = lookup_static_chain (fndecl);
881 if (GET_CODE (parm_insns) == NOTE
882 && NOTE_LINE_NUMBER (parm_insns) > 0)
884 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
885 NOTE_LINE_NUMBER (parm_insns));
886 if (note)
887 RTX_INTEGRATED_P (note) = 1;
890 /* Process each argument. For each, set up things so that the function's
891 reference to the argument will refer to the argument being passed.
892 We only replace REG with REG here. Any simplifications are done
893 via const_equiv_map.
895 We make two passes: In the first, we deal with parameters that will
896 be placed into registers, since we need to ensure that the allocated
897 register number fits in const_equiv_map. Then we store all non-register
898 parameters into their memory location. */
900 /* Don't try to free temp stack slots here, because we may put one of the
901 parameters into a temp stack slot. */
903 for (i = 0; i < nargs; i++)
905 rtx copy = arg_vals[i];
907 loc = RTVEC_ELT (arg_vector, i);
909 /* There are three cases, each handled separately. */
910 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
911 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
913 /* This must be an object passed by invisible reference (it could
914 also be a variable-sized object, but we forbid inlining functions
915 with variable-sized arguments). COPY is the address of the
916 actual value (this computation will cause it to be copied). We
917 map that address for the register, noting the actual address as
918 an equivalent in case it can be substituted into the insns. */
920 if (GET_CODE (copy) != REG)
922 temp = copy_addr_to_reg (copy);
923 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
924 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
925 copy = temp;
927 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
929 else if (GET_CODE (loc) == MEM)
931 /* This is the case of a parameter that lives in memory. It
932 will live in the block we allocate in the called routine's
933 frame that simulates the incoming argument area. Do nothing
934 with the parameter now; we will call store_expr later. In
935 this case, however, we must ensure that the virtual stack and
936 incoming arg rtx values are expanded now so that we can be
937 sure we have enough slots in the const equiv map since the
938 store_expr call can easily blow the size estimate. */
939 if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
940 copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
942 else if (GET_CODE (loc) == REG)
943 process_reg_param (map, loc, copy);
944 else if (GET_CODE (loc) == CONCAT)
946 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
947 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
948 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
949 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
951 process_reg_param (map, locreal, copyreal);
952 process_reg_param (map, locimag, copyimag);
954 else
955 abort ();
958 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
959 specially. This function can be called recursively, so we need to
960 save the previous value. */
961 inlining_previous = inlining;
962 inlining = inl_f;
964 /* Now do the parameters that will be placed in memory. */
966 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
967 formal; formal = TREE_CHAIN (formal), i++)
969 loc = RTVEC_ELT (arg_vector, i);
971 if (GET_CODE (loc) == MEM
972 /* Exclude case handled above. */
973 && ! (GET_CODE (XEXP (loc, 0)) == REG
974 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
976 rtx note = emit_note (DECL_SOURCE_FILE (formal),
977 DECL_SOURCE_LINE (formal));
978 if (note)
979 RTX_INTEGRATED_P (note) = 1;
981 /* Compute the address in the area we reserved and store the
982 value there. */
983 temp = copy_rtx_and_substitute (loc, map, 1);
984 subst_constants (&temp, NULL_RTX, map, 1);
985 apply_change_group ();
986 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
987 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
988 store_expr (arg_trees[i], temp, 0);
992 /* Deal with the places that the function puts its result.
993 We are driven by what is placed into DECL_RESULT.
995 Initially, we assume that we don't have anything special handling for
996 REG_FUNCTION_RETURN_VALUE_P. */
998 map->inline_target = 0;
999 loc = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
1000 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
1002 if (TYPE_MODE (type) == VOIDmode)
1003 /* There is no return value to worry about. */
1005 else if (GET_CODE (loc) == MEM)
1007 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
1009 temp = copy_rtx_and_substitute (loc, map, 1);
1010 subst_constants (&temp, NULL_RTX, map, 1);
1011 apply_change_group ();
1012 target = temp;
1014 else
1016 if (! structure_value_addr
1017 || ! aggregate_value_p (DECL_RESULT (fndecl)))
1018 abort ();
1020 /* Pass the function the address in which to return a structure
1021 value. Note that a constructor can cause someone to call us
1022 with STRUCTURE_VALUE_ADDR, but the initialization takes place
1023 via the first parameter, rather than the struct return address.
1025 We have two cases: If the address is a simple register
1026 indirect, use the mapping mechanism to point that register to
1027 our structure return address. Otherwise, store the structure
1028 return value into the place that it will be referenced from. */
1030 if (GET_CODE (XEXP (loc, 0)) == REG)
1032 temp = force_operand (structure_value_addr, NULL_RTX);
1033 temp = force_reg (Pmode, temp);
1034 /* A virtual register might be invalid in an insn, because
1035 it can cause trouble in reload. Since we don't have access
1036 to the expanders at map translation time, make sure we have
1037 a proper register now.
1038 If a virtual register is actually valid, cse or combine
1039 can put it into the mapped insns. */
1040 if (REGNO (temp) >= FIRST_VIRTUAL_REGISTER
1041 && REGNO (temp) <= LAST_VIRTUAL_REGISTER)
1042 temp = copy_to_mode_reg (Pmode, temp);
1043 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1045 if (CONSTANT_P (structure_value_addr)
1046 || GET_CODE (structure_value_addr) == ADDRESSOF
1047 || (GET_CODE (structure_value_addr) == PLUS
1048 && (XEXP (structure_value_addr, 0)
1049 == virtual_stack_vars_rtx)
1050 && (GET_CODE (XEXP (structure_value_addr, 1))
1051 == CONST_INT)))
1053 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
1054 CONST_AGE_PARM);
1057 else
1059 temp = copy_rtx_and_substitute (loc, map, 1);
1060 subst_constants (&temp, NULL_RTX, map, 0);
1061 apply_change_group ();
1062 emit_move_insn (temp, structure_value_addr);
1066 else if (ignore)
1067 /* We will ignore the result value, so don't look at its structure.
1068 Note that preparations for an aggregate return value
1069 do need to be made (above) even if it will be ignored. */
1071 else if (GET_CODE (loc) == REG)
1073 /* The function returns an object in a register and we use the return
1074 value. Set up our target for remapping. */
1076 /* Machine mode function was declared to return. */
1077 enum machine_mode departing_mode = TYPE_MODE (type);
1078 /* (Possibly wider) machine mode it actually computes
1079 (for the sake of callers that fail to declare it right).
1080 We have to use the mode of the result's RTL, rather than
1081 its type, since expand_function_start may have promoted it. */
1082 enum machine_mode arriving_mode
1083 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1084 rtx reg_to_map;
1086 /* Don't use MEMs as direct targets because on some machines
1087 substituting a MEM for a REG makes invalid insns.
1088 Let the combiner substitute the MEM if that is valid. */
1089 if (target == 0 || GET_CODE (target) != REG
1090 || GET_MODE (target) != departing_mode)
1092 /* Don't make BLKmode registers. If this looks like
1093 a BLKmode object being returned in a register, get
1094 the mode from that, otherwise abort. */
1095 if (departing_mode == BLKmode)
1097 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1099 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1100 arriving_mode = departing_mode;
1102 else
1103 abort ();
1106 target = gen_reg_rtx (departing_mode);
1109 /* If function's value was promoted before return,
1110 avoid machine mode mismatch when we substitute INLINE_TARGET.
1111 But TARGET is what we will return to the caller. */
1112 if (arriving_mode != departing_mode)
1114 /* Avoid creating a paradoxical subreg wider than
1115 BITS_PER_WORD, since that is illegal. */
1116 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1118 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1119 GET_MODE_BITSIZE (arriving_mode)))
1120 /* Maybe could be handled by using convert_move () ? */
1121 abort ();
1122 reg_to_map = gen_reg_rtx (arriving_mode);
1123 target = gen_lowpart (departing_mode, reg_to_map);
1125 else
1126 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1128 else
1129 reg_to_map = target;
1131 /* Usually, the result value is the machine's return register.
1132 Sometimes it may be a pseudo. Handle both cases. */
1133 if (REG_FUNCTION_VALUE_P (loc))
1134 map->inline_target = reg_to_map;
1135 else
1136 map->reg_map[REGNO (loc)] = reg_to_map;
1138 else if (GET_CODE (loc) == CONCAT)
1140 enum machine_mode departing_mode = TYPE_MODE (type);
1141 enum machine_mode arriving_mode
1142 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1144 if (departing_mode != arriving_mode)
1145 abort ();
1146 if (GET_CODE (XEXP (loc, 0)) != REG
1147 || GET_CODE (XEXP (loc, 1)) != REG)
1148 abort ();
1150 /* Don't use MEMs as direct targets because on some machines
1151 substituting a MEM for a REG makes invalid insns.
1152 Let the combiner substitute the MEM if that is valid. */
1153 if (target == 0 || GET_CODE (target) != REG
1154 || GET_MODE (target) != departing_mode)
1155 target = gen_reg_rtx (departing_mode);
1157 if (GET_CODE (target) != CONCAT)
1158 abort ();
1160 map->reg_map[REGNO (XEXP (loc, 0))] = XEXP (target, 0);
1161 map->reg_map[REGNO (XEXP (loc, 1))] = XEXP (target, 1);
1163 else
1164 abort ();
1166 /* Remap the exception handler data pointer from one to the other. */
1167 temp = get_exception_pointer (inl_f);
1168 if (temp)
1169 map->reg_map[REGNO (temp)] = get_exception_pointer (cfun);
1171 /* Initialize label_map. get_label_from_map will actually make
1172 the labels. */
1173 memset ((char *) &map->label_map[min_labelno], 0,
1174 (max_labelno - min_labelno) * sizeof (rtx));
1176 /* Make copies of the decls of the symbols in the inline function, so that
1177 the copies of the variables get declared in the current function. Set
1178 up things so that lookup_static_chain knows that to interpret registers
1179 in SAVE_EXPRs for TYPE_SIZEs as local. */
1180 inline_function_decl = fndecl;
1181 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1182 block = integrate_decl_tree (inl_f->original_decl_initial, map);
1183 BLOCK_ABSTRACT_ORIGIN (block) = DECL_ORIGIN (fndecl);
1184 inline_function_decl = 0;
1186 /* Make a fresh binding contour that we can easily remove. Do this after
1187 expanding our arguments so cleanups are properly scoped. */
1188 expand_start_bindings_and_block (0, block);
1190 /* Sort the block-map so that it will be easy to find remapped
1191 blocks later. */
1192 qsort (&VARRAY_TREE (map->block_map, 0),
1193 map->block_map->elements_used,
1194 sizeof (tree),
1195 compare_blocks);
1197 /* Perform postincrements before actually calling the function. */
1198 emit_queue ();
1200 /* Clean up stack so that variables might have smaller offsets. */
1201 do_pending_stack_adjust ();
1203 /* Save a copy of the location of const_equiv_varray for
1204 mark_stores, called via note_stores. */
1205 global_const_equiv_varray = map->const_equiv_varray;
1207 /* If the called function does an alloca, save and restore the
1208 stack pointer around the call. This saves stack space, but
1209 also is required if this inline is being done between two
1210 pushes. */
1211 if (inl_f->calls_alloca)
1212 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1214 /* Map pseudos used for initial hard reg values. */
1215 setup_initial_hard_reg_value_integration (inl_f, map);
1217 /* Now copy the insns one by one. */
1218 copy_insn_list (insns, map, static_chain_value);
1220 /* Duplicate the EH regions. This will create an offset from the
1221 region numbers in the function we're inlining to the region
1222 numbers in the calling function. This must wait until after
1223 copy_insn_list, as we need the insn map to be complete. */
1224 eh_region_offset = duplicate_eh_regions (inl_f, map);
1226 /* Now copy the REG_NOTES for those insns. */
1227 copy_insn_notes (insns, map, eh_region_offset);
1229 /* If the insn sequence required one, emit the return label. */
1230 if (map->local_return_label)
1231 emit_label (map->local_return_label);
1233 /* Restore the stack pointer if we saved it above. */
1234 if (inl_f->calls_alloca)
1235 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1237 if (! cfun->x_whole_function_mode_p)
1238 /* In statement-at-a-time mode, we just tell the front-end to add
1239 this block to the list of blocks at this binding level. We
1240 can't do it the way it's done for function-at-a-time mode the
1241 superblocks have not been created yet. */
1242 (*lang_hooks.decls.insert_block) (block);
1243 else
1245 BLOCK_CHAIN (block)
1246 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1247 BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
1250 /* End the scope containing the copied formal parameter variables
1251 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1252 here so that expand_end_bindings will not check for unused
1253 variables. That's already been checked for when the inlined
1254 function was defined. */
1255 expand_end_bindings (NULL_TREE, 1, 1);
1257 /* Must mark the line number note after inlined functions as a repeat, so
1258 that the test coverage code can avoid counting the call twice. This
1259 just tells the code to ignore the immediately following line note, since
1260 there already exists a copy of this note before the expanded inline call.
1261 This line number note is still needed for debugging though, so we can't
1262 delete it. */
1263 if (flag_test_coverage)
1264 emit_note (0, NOTE_INSN_REPEATED_LINE_NUMBER);
1266 emit_line_note (input_filename, lineno);
1268 /* If the function returns a BLKmode object in a register, copy it
1269 out of the temp register into a BLKmode memory object. */
1270 if (target
1271 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1272 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1273 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1275 if (structure_value_addr)
1277 target = gen_rtx_MEM (TYPE_MODE (type),
1278 memory_address (TYPE_MODE (type),
1279 structure_value_addr));
1280 set_mem_attributes (target, type, 1);
1283 /* Make sure we free the things we explicitly allocated with xmalloc. */
1284 if (real_label_map)
1285 free (real_label_map);
1286 VARRAY_FREE (map->const_equiv_varray);
1287 free (map->reg_map);
1288 free (map->insn_map);
1289 free (map);
1290 free (arg_vals);
1291 free (arg_trees);
1293 inlining = inlining_previous;
1295 return target;
1298 /* Make copies of each insn in the given list using the mapping
1299 computed in expand_inline_function. This function may call itself for
1300 insns containing sequences.
1302 Copying is done in two passes, first the insns and then their REG_NOTES.
1304 If static_chain_value is non-zero, it represents the context-pointer
1305 register for the function. */
1307 static void
1308 copy_insn_list (insns, map, static_chain_value)
1309 rtx insns;
1310 struct inline_remap *map;
1311 rtx static_chain_value;
1313 int i;
1314 rtx insn;
1315 rtx temp;
1316 #ifdef HAVE_cc0
1317 rtx cc0_insn = 0;
1318 #endif
1319 rtx static_chain_mem = 0;
1321 /* Copy the insns one by one. Do this in two passes, first the insns and
1322 then their REG_NOTES. */
1324 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1326 for (insn = insns; insn; insn = NEXT_INSN (insn))
1328 rtx copy, pattern, set;
1330 map->orig_asm_operands_vector = 0;
1332 switch (GET_CODE (insn))
1334 case INSN:
1335 pattern = PATTERN (insn);
1336 set = single_set (insn);
1337 copy = 0;
1338 if (GET_CODE (pattern) == USE
1339 && GET_CODE (XEXP (pattern, 0)) == REG
1340 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1341 /* The (USE (REG n)) at return from the function should
1342 be ignored since we are changing (REG n) into
1343 inline_target. */
1344 break;
1346 /* Ignore setting a function value that we don't want to use. */
1347 if (map->inline_target == 0
1348 && set != 0
1349 && GET_CODE (SET_DEST (set)) == REG
1350 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1352 if (volatile_refs_p (SET_SRC (set)))
1354 rtx new_set;
1356 /* If we must not delete the source,
1357 load it into a new temporary. */
1358 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1360 new_set = single_set (copy);
1361 if (new_set == 0)
1362 abort ();
1364 SET_DEST (new_set)
1365 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1367 /* If the source and destination are the same and it
1368 has a note on it, keep the insn. */
1369 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1370 && REG_NOTES (insn) != 0)
1371 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1372 else
1373 break;
1376 /* Similarly if an ignored return value is clobbered. */
1377 else if (map->inline_target == 0
1378 && GET_CODE (pattern) == CLOBBER
1379 && GET_CODE (XEXP (pattern, 0)) == REG
1380 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1381 break;
1383 /* Look for the address of the static chain slot. The
1384 rtx_equal_p comparisons against the
1385 static_chain_incoming_rtx below may fail if the static
1386 chain is in memory and the address specified is not
1387 "legitimate". This happens on Xtensa where the static
1388 chain is at a negative offset from argp and where only
1389 positive offsets are legitimate. When the RTL is
1390 generated, the address is "legitimized" by copying it
1391 into a register, causing the rtx_equal_p comparisons to
1392 fail. This workaround looks for code that sets a
1393 register to the address of the static chain. Subsequent
1394 memory references via that register can then be
1395 identified as static chain references. We assume that
1396 the register is only assigned once, and that the static
1397 chain address is only live in one register at a time. */
1399 else if (static_chain_value != 0
1400 && set != 0
1401 && GET_CODE (static_chain_incoming_rtx) == MEM
1402 && GET_CODE (SET_DEST (set)) == REG
1403 && rtx_equal_p (SET_SRC (set),
1404 XEXP (static_chain_incoming_rtx, 0)))
1406 static_chain_mem =
1407 gen_rtx_MEM (GET_MODE (static_chain_incoming_rtx),
1408 SET_DEST (set));
1410 /* emit the instruction in case it is used for something
1411 other than setting the static chain; if it's not used,
1412 it can always be removed as dead code */
1413 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1416 /* If this is setting the static chain rtx, omit it. */
1417 else if (static_chain_value != 0
1418 && set != 0
1419 && (rtx_equal_p (SET_DEST (set),
1420 static_chain_incoming_rtx)
1421 || (static_chain_mem
1422 && rtx_equal_p (SET_DEST (set), static_chain_mem))))
1423 break;
1425 /* If this is setting the static chain pseudo, set it from
1426 the value we want to give it instead. */
1427 else if (static_chain_value != 0
1428 && set != 0
1429 && (rtx_equal_p (SET_SRC (set),
1430 static_chain_incoming_rtx)
1431 || (static_chain_mem
1432 && rtx_equal_p (SET_SRC (set), static_chain_mem))))
1434 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
1436 copy = emit_move_insn (newdest, static_chain_value);
1437 if (GET_CODE (static_chain_incoming_rtx) != MEM)
1438 static_chain_value = 0;
1441 /* If this is setting the virtual stack vars register, this must
1442 be the code at the handler for a builtin longjmp. The value
1443 saved in the setjmp buffer will be the address of the frame
1444 we've made for this inlined instance within our frame. But we
1445 know the offset of that value so we can use it to reconstruct
1446 our virtual stack vars register from that value. If we are
1447 copying it from the stack pointer, leave it unchanged. */
1448 else if (set != 0
1449 && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1451 HOST_WIDE_INT offset;
1452 temp = map->reg_map[REGNO (SET_DEST (set))];
1453 temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1454 REGNO (temp)).rtx;
1456 if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1457 offset = 0;
1458 else if (GET_CODE (temp) == PLUS
1459 && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1460 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1461 offset = INTVAL (XEXP (temp, 1));
1462 else
1463 abort ();
1465 if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1466 temp = SET_SRC (set);
1467 else
1468 temp = force_operand (plus_constant (SET_SRC (set),
1469 - offset),
1470 NULL_RTX);
1472 copy = emit_move_insn (virtual_stack_vars_rtx, temp);
1475 else
1476 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1477 /* REG_NOTES will be copied later. */
1479 #ifdef HAVE_cc0
1480 /* If this insn is setting CC0, it may need to look at
1481 the insn that uses CC0 to see what type of insn it is.
1482 In that case, the call to recog via validate_change will
1483 fail. So don't substitute constants here. Instead,
1484 do it when we emit the following insn.
1486 For example, see the pyr.md file. That machine has signed and
1487 unsigned compares. The compare patterns must check the
1488 following branch insn to see which what kind of compare to
1489 emit.
1491 If the previous insn set CC0, substitute constants on it as
1492 well. */
1493 if (sets_cc0_p (PATTERN (copy)) != 0)
1494 cc0_insn = copy;
1495 else
1497 if (cc0_insn)
1498 try_constants (cc0_insn, map);
1499 cc0_insn = 0;
1500 try_constants (copy, map);
1502 #else
1503 try_constants (copy, map);
1504 #endif
1505 INSN_SCOPE (copy) = INSN_SCOPE (insn);
1506 break;
1508 case JUMP_INSN:
1509 if (map->integrating && returnjump_p (insn))
1511 if (map->local_return_label == 0)
1512 map->local_return_label = gen_label_rtx ();
1513 pattern = gen_jump (map->local_return_label);
1515 else
1516 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1518 copy = emit_jump_insn (pattern);
1520 #ifdef HAVE_cc0
1521 if (cc0_insn)
1522 try_constants (cc0_insn, map);
1523 cc0_insn = 0;
1524 #endif
1525 try_constants (copy, map);
1526 INSN_SCOPE (copy) = INSN_SCOPE (insn);
1528 /* If this used to be a conditional jump insn but whose branch
1529 direction is now know, we must do something special. */
1530 if (any_condjump_p (insn) && onlyjump_p (insn) && map->last_pc_value)
1532 #ifdef HAVE_cc0
1533 /* If the previous insn set cc0 for us, delete it. */
1534 if (only_sets_cc0_p (PREV_INSN (copy)))
1535 delete_related_insns (PREV_INSN (copy));
1536 #endif
1538 /* If this is now a no-op, delete it. */
1539 if (map->last_pc_value == pc_rtx)
1541 delete_related_insns (copy);
1542 copy = 0;
1544 else
1545 /* Otherwise, this is unconditional jump so we must put a
1546 BARRIER after it. We could do some dead code elimination
1547 here, but jump.c will do it just as well. */
1548 emit_barrier ();
1550 break;
1552 case CALL_INSN:
1553 /* If this is a CALL_PLACEHOLDER insn then we need to copy the
1554 three attached sequences: normal call, sibling call and tail
1555 recursion. */
1556 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1558 rtx sequence[3];
1559 rtx tail_label;
1561 for (i = 0; i < 3; i++)
1563 rtx seq;
1565 sequence[i] = NULL_RTX;
1566 seq = XEXP (PATTERN (insn), i);
1567 if (seq)
1569 start_sequence ();
1570 copy_insn_list (seq, map, static_chain_value);
1571 sequence[i] = get_insns ();
1572 end_sequence ();
1576 /* Find the new tail recursion label.
1577 It will already be substituted into sequence[2]. */
1578 tail_label = copy_rtx_and_substitute (XEXP (PATTERN (insn), 3),
1579 map, 0);
1581 copy = emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode,
1582 sequence[0],
1583 sequence[1],
1584 sequence[2],
1585 tail_label));
1586 break;
1589 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1590 copy = emit_call_insn (pattern);
1592 SIBLING_CALL_P (copy) = SIBLING_CALL_P (insn);
1593 CONST_OR_PURE_CALL_P (copy) = CONST_OR_PURE_CALL_P (insn);
1594 INSN_SCOPE (copy) = INSN_SCOPE (insn);
1596 /* Because the USAGE information potentially contains objects other
1597 than hard registers, we need to copy it. */
1599 CALL_INSN_FUNCTION_USAGE (copy)
1600 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1601 map, 0);
1603 #ifdef HAVE_cc0
1604 if (cc0_insn)
1605 try_constants (cc0_insn, map);
1606 cc0_insn = 0;
1607 #endif
1608 try_constants (copy, map);
1610 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1611 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1612 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1613 break;
1615 case CODE_LABEL:
1616 copy = emit_label (get_label_from_map (map,
1617 CODE_LABEL_NUMBER (insn)));
1618 LABEL_NAME (copy) = LABEL_NAME (insn);
1619 map->const_age++;
1620 break;
1622 case BARRIER:
1623 copy = emit_barrier ();
1624 break;
1626 case NOTE:
1627 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)
1629 copy = emit_label (get_label_from_map (map,
1630 CODE_LABEL_NUMBER (insn)));
1631 LABEL_NAME (copy) = NOTE_SOURCE_FILE (insn);
1632 map->const_age++;
1633 break;
1636 /* NOTE_INSN_FUNCTION_END and NOTE_INSN_FUNCTION_BEG are
1637 discarded because it is important to have only one of
1638 each in the current function.
1640 NOTE_INSN_DELETED notes aren't useful. */
1642 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1643 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1644 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1646 copy = emit_note (NOTE_SOURCE_FILE (insn),
1647 NOTE_LINE_NUMBER (insn));
1648 if (copy
1649 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
1650 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
1651 && NOTE_BLOCK (insn))
1653 tree *mapped_block_p;
1655 mapped_block_p
1656 = (tree *) bsearch (NOTE_BLOCK (insn),
1657 &VARRAY_TREE (map->block_map, 0),
1658 map->block_map->elements_used,
1659 sizeof (tree),
1660 find_block);
1662 if (!mapped_block_p)
1663 abort ();
1664 else
1665 NOTE_BLOCK (copy) = *mapped_block_p;
1667 else if (copy
1668 && NOTE_LINE_NUMBER (copy) == NOTE_INSN_EXPECTED_VALUE)
1669 NOTE_EXPECTED_VALUE (copy)
1670 = copy_rtx_and_substitute (NOTE_EXPECTED_VALUE (insn),
1671 map, 0);
1673 else
1674 copy = 0;
1675 break;
1677 default:
1678 abort ();
1681 if (copy)
1682 RTX_INTEGRATED_P (copy) = 1;
1684 map->insn_map[INSN_UID (insn)] = copy;
1688 /* Copy the REG_NOTES. Increment const_age, so that only constants
1689 from parameters can be substituted in. These are the only ones
1690 that are valid across the entire function. */
1692 static void
1693 copy_insn_notes (insns, map, eh_region_offset)
1694 rtx insns;
1695 struct inline_remap *map;
1696 int eh_region_offset;
1698 rtx insn, new_insn;
1700 map->const_age++;
1701 for (insn = insns; insn; insn = NEXT_INSN (insn))
1703 if (! INSN_P (insn))
1704 continue;
1706 new_insn = map->insn_map[INSN_UID (insn)];
1707 if (! new_insn)
1708 continue;
1710 if (REG_NOTES (insn))
1712 rtx next, note = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
1714 /* We must also do subst_constants, in case one of our parameters
1715 has const type and constant value. */
1716 subst_constants (&note, NULL_RTX, map, 0);
1717 apply_change_group ();
1718 REG_NOTES (new_insn) = note;
1720 /* Delete any REG_LABEL notes from the chain. Remap any
1721 REG_EH_REGION notes. */
1722 for (; note; note = next)
1724 next = XEXP (note, 1);
1725 if (REG_NOTE_KIND (note) == REG_LABEL)
1726 remove_note (new_insn, note);
1727 else if (REG_NOTE_KIND (note) == REG_EH_REGION
1728 && INTVAL (XEXP (note, 0)) > 0)
1729 XEXP (note, 0) = GEN_INT (INTVAL (XEXP (note, 0))
1730 + eh_region_offset);
1734 if (GET_CODE (insn) == CALL_INSN
1735 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1737 int i;
1738 for (i = 0; i < 3; i++)
1739 copy_insn_notes (XEXP (PATTERN (insn), i), map, eh_region_offset);
1742 if (GET_CODE (insn) == JUMP_INSN
1743 && GET_CODE (PATTERN (insn)) == RESX)
1744 XINT (PATTERN (new_insn), 0) += eh_region_offset;
1748 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1749 push all of those decls and give each one the corresponding home. */
1751 static void
1752 integrate_parm_decls (args, map, arg_vector)
1753 tree args;
1754 struct inline_remap *map;
1755 rtvec arg_vector;
1757 tree tail;
1758 int i;
1760 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1762 tree decl = copy_decl_for_inlining (tail, map->fndecl,
1763 current_function_decl);
1764 rtx new_decl_rtl
1765 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
1767 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1768 here, but that's going to require some more work. */
1769 /* DECL_INCOMING_RTL (decl) = ?; */
1770 /* Fully instantiate the address with the equivalent form so that the
1771 debugging information contains the actual register, instead of the
1772 virtual register. Do this by not passing an insn to
1773 subst_constants. */
1774 subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
1775 apply_change_group ();
1776 SET_DECL_RTL (decl, new_decl_rtl);
1780 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1781 current function a tree of contexts isomorphic to the one that is given.
1783 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1784 registers used in the DECL_RTL field should be remapped. If it is zero,
1785 no mapping is necessary. */
1787 static tree
1788 integrate_decl_tree (let, map)
1789 tree let;
1790 struct inline_remap *map;
1792 tree t;
1793 tree new_block;
1794 tree *next;
1796 new_block = make_node (BLOCK);
1797 VARRAY_PUSH_TREE (map->block_map, new_block);
1798 next = &BLOCK_VARS (new_block);
1800 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1802 tree d;
1804 d = copy_decl_for_inlining (t, map->fndecl, current_function_decl);
1806 if (DECL_RTL_SET_P (t))
1808 rtx r;
1810 SET_DECL_RTL (d, copy_rtx_and_substitute (DECL_RTL (t), map, 1));
1812 /* Fully instantiate the address with the equivalent form so that the
1813 debugging information contains the actual register, instead of the
1814 virtual register. Do this by not passing an insn to
1815 subst_constants. */
1816 r = DECL_RTL (d);
1817 subst_constants (&r, NULL_RTX, map, 1);
1818 SET_DECL_RTL (d, r);
1820 if (GET_CODE (r) == REG)
1821 REGNO_DECL (REGNO (r)) = d;
1822 else if (GET_CODE (r) == CONCAT)
1824 REGNO_DECL (REGNO (XEXP (r, 0))) = d;
1825 REGNO_DECL (REGNO (XEXP (r, 1))) = d;
1828 apply_change_group ();
1831 /* Add this declaration to the list of variables in the new
1832 block. */
1833 *next = d;
1834 next = &TREE_CHAIN (d);
1837 next = &BLOCK_SUBBLOCKS (new_block);
1838 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1840 *next = integrate_decl_tree (t, map);
1841 BLOCK_SUPERCONTEXT (*next) = new_block;
1842 next = &BLOCK_CHAIN (*next);
1845 TREE_USED (new_block) = TREE_USED (let);
1846 BLOCK_ABSTRACT_ORIGIN (new_block) = let;
1848 return new_block;
1851 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1852 except for those few rtx codes that are sharable.
1854 We always return an rtx that is similar to that incoming rtx, with the
1855 exception of possibly changing a REG to a SUBREG or vice versa. No
1856 rtl is ever emitted.
1858 If FOR_LHS is nonzero, if means we are processing something that will
1859 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1860 inlining since we need to be conservative in how it is set for
1861 such cases.
1863 Handle constants that need to be placed in the constant pool by
1864 calling `force_const_mem'. */
1867 copy_rtx_and_substitute (orig, map, for_lhs)
1868 rtx orig;
1869 struct inline_remap *map;
1870 int for_lhs;
1872 rtx copy, temp;
1873 int i, j;
1874 RTX_CODE code;
1875 enum machine_mode mode;
1876 const char *format_ptr;
1877 int regno;
1879 if (orig == 0)
1880 return 0;
1882 code = GET_CODE (orig);
1883 mode = GET_MODE (orig);
1885 switch (code)
1887 case REG:
1888 /* If the stack pointer register shows up, it must be part of
1889 stack-adjustments (*not* because we eliminated the frame pointer!).
1890 Small hard registers are returned as-is. Pseudo-registers
1891 go through their `reg_map'. */
1892 regno = REGNO (orig);
1893 if (regno <= LAST_VIRTUAL_REGISTER
1894 || (map->integrating
1895 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1897 /* Some hard registers are also mapped,
1898 but others are not translated. */
1899 if (map->reg_map[regno] != 0)
1900 return map->reg_map[regno];
1902 /* If this is the virtual frame pointer, make space in current
1903 function's stack frame for the stack frame of the inline function.
1905 Copy the address of this area into a pseudo. Map
1906 virtual_stack_vars_rtx to this pseudo and set up a constant
1907 equivalence for it to be the address. This will substitute the
1908 address into insns where it can be substituted and use the new
1909 pseudo where it can't. */
1910 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
1912 rtx loc, seq;
1913 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1914 #ifdef FRAME_GROWS_DOWNWARD
1915 int alignment
1916 = (DECL_SAVED_INSNS (map->fndecl)->stack_alignment_needed
1917 / BITS_PER_UNIT);
1919 /* In this case, virtual_stack_vars_rtx points to one byte
1920 higher than the top of the frame area. So make sure we
1921 allocate a big enough chunk to keep the frame pointer
1922 aligned like a real one. */
1923 if (alignment)
1924 size = CEIL_ROUND (size, alignment);
1925 #endif
1926 start_sequence ();
1927 loc = assign_stack_temp (BLKmode, size, 1);
1928 loc = XEXP (loc, 0);
1929 #ifdef FRAME_GROWS_DOWNWARD
1930 /* In this case, virtual_stack_vars_rtx points to one byte
1931 higher than the top of the frame area. So compute the offset
1932 to one byte higher than our substitute frame. */
1933 loc = plus_constant (loc, size);
1934 #endif
1935 map->reg_map[regno] = temp
1936 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1938 #ifdef STACK_BOUNDARY
1939 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1940 #endif
1942 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1944 seq = get_insns ();
1945 end_sequence ();
1946 emit_insn_after (seq, map->insns_at_start);
1947 return temp;
1949 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1950 || (map->integrating
1951 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1952 == orig)))
1954 /* Do the same for a block to contain any arguments referenced
1955 in memory. */
1956 rtx loc, seq;
1957 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1959 start_sequence ();
1960 loc = assign_stack_temp (BLKmode, size, 1);
1961 loc = XEXP (loc, 0);
1962 /* When arguments grow downward, the virtual incoming
1963 args pointer points to the top of the argument block,
1964 so the remapped location better do the same. */
1965 #ifdef ARGS_GROW_DOWNWARD
1966 loc = plus_constant (loc, size);
1967 #endif
1968 map->reg_map[regno] = temp
1969 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1971 #ifdef STACK_BOUNDARY
1972 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1973 #endif
1975 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1977 seq = get_insns ();
1978 end_sequence ();
1979 emit_insn_after (seq, map->insns_at_start);
1980 return temp;
1982 else if (REG_FUNCTION_VALUE_P (orig))
1984 /* This is a reference to the function return value. If
1985 the function doesn't have a return value, error. If the
1986 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
1987 if (map->inline_target == 0)
1989 if (rtx_equal_function_value_matters)
1990 /* This is an ignored return value. We must not
1991 leave it in with REG_FUNCTION_VALUE_P set, since
1992 that would confuse subsequent inlining of the
1993 current function into a later function. */
1994 return gen_rtx_REG (GET_MODE (orig), regno);
1995 else
1996 /* Must be unrolling loops or replicating code if we
1997 reach here, so return the register unchanged. */
1998 return orig;
2000 else if (GET_MODE (map->inline_target) != BLKmode
2001 && mode != GET_MODE (map->inline_target))
2002 return gen_lowpart (mode, map->inline_target);
2003 else
2004 return map->inline_target;
2006 #if defined (LEAF_REGISTERS) && defined (LEAF_REG_REMAP)
2007 /* If leaf_renumber_regs_insn() might remap this register to
2008 some other number, make sure we don't share it with the
2009 inlined function, otherwise delayed optimization of the
2010 inlined function may change it in place, breaking our
2011 reference to it. We may still shared it within the
2012 function, so create an entry for this register in the
2013 reg_map. */
2014 if (map->integrating && regno < FIRST_PSEUDO_REGISTER
2015 && LEAF_REGISTERS[regno] && LEAF_REG_REMAP (regno) != regno)
2017 if (!map->leaf_reg_map[regno][mode])
2018 map->leaf_reg_map[regno][mode] = gen_rtx_REG (mode, regno);
2019 return map->leaf_reg_map[regno][mode];
2021 #endif
2022 else
2023 return orig;
2025 abort ();
2027 if (map->reg_map[regno] == NULL)
2029 map->reg_map[regno] = gen_reg_rtx (mode);
2030 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2031 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2032 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2033 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2035 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
2036 mark_reg_pointer (map->reg_map[regno],
2037 map->regno_pointer_align[regno]);
2039 return map->reg_map[regno];
2041 case SUBREG:
2042 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
2043 return simplify_gen_subreg (GET_MODE (orig), copy,
2044 GET_MODE (SUBREG_REG (orig)),
2045 SUBREG_BYTE (orig));
2047 case ADDRESSOF:
2048 copy = gen_rtx_ADDRESSOF (mode,
2049 copy_rtx_and_substitute (XEXP (orig, 0),
2050 map, for_lhs),
2051 0, ADDRESSOF_DECL (orig));
2052 regno = ADDRESSOF_REGNO (orig);
2053 if (map->reg_map[regno])
2054 regno = REGNO (map->reg_map[regno]);
2055 else if (regno > LAST_VIRTUAL_REGISTER)
2057 temp = XEXP (orig, 0);
2058 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2059 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2060 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2061 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2062 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2064 /* Objects may initially be represented as registers, but
2065 but turned into a MEM if their address is taken by
2066 put_var_into_stack. Therefore, the register table may have
2067 entries which are MEMs.
2069 We briefly tried to clear such entries, but that ended up
2070 cascading into many changes due to the optimizers not being
2071 prepared for empty entries in the register table. So we've
2072 decided to allow the MEMs in the register table for now. */
2073 if (REG_P (map->x_regno_reg_rtx[regno])
2074 && REG_POINTER (map->x_regno_reg_rtx[regno]))
2075 mark_reg_pointer (map->reg_map[regno],
2076 map->regno_pointer_align[regno]);
2077 regno = REGNO (map->reg_map[regno]);
2079 ADDRESSOF_REGNO (copy) = regno;
2080 return copy;
2082 case USE:
2083 case CLOBBER:
2084 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2085 to (use foo) if the original insn didn't have a subreg.
2086 Removing the subreg distorts the VAX movstrhi pattern
2087 by changing the mode of an operand. */
2088 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
2089 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2090 copy = SUBREG_REG (copy);
2091 return gen_rtx_fmt_e (code, VOIDmode, copy);
2093 /* We need to handle "deleted" labels that appear in the DECL_RTL
2094 of a LABEL_DECL. */
2095 case NOTE:
2096 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
2097 break;
2099 /* ... FALLTHRU ... */
2100 case CODE_LABEL:
2101 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
2102 = LABEL_PRESERVE_P (orig);
2103 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
2105 case LABEL_REF:
2106 copy
2107 = gen_rtx_LABEL_REF
2108 (mode,
2109 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2110 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
2112 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2114 /* The fact that this label was previously nonlocal does not mean
2115 it still is, so we must check if it is within the range of
2116 this function's labels. */
2117 LABEL_REF_NONLOCAL_P (copy)
2118 = (LABEL_REF_NONLOCAL_P (orig)
2119 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2120 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2122 /* If we have made a nonlocal label local, it means that this
2123 inlined call will be referring to our nonlocal goto handler.
2124 So make sure we create one for this block; we normally would
2125 not since this is not otherwise considered a "call". */
2126 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2127 function_call_count++;
2129 return copy;
2131 case PC:
2132 case CC0:
2133 case CONST_INT:
2134 case CONST_VECTOR:
2135 return orig;
2137 case SYMBOL_REF:
2138 /* Symbols which represent the address of a label stored in the constant
2139 pool must be modified to point to a constant pool entry for the
2140 remapped label. Otherwise, symbols are returned unchanged. */
2141 if (CONSTANT_POOL_ADDRESS_P (orig))
2143 struct function *f = inlining ? inlining : cfun;
2144 rtx constant = get_pool_constant_for_function (f, orig);
2145 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
2146 if (inlining)
2148 rtx temp = force_const_mem (const_mode,
2149 copy_rtx_and_substitute (constant,
2150 map, 0));
2152 #if 0
2153 /* Legitimizing the address here is incorrect.
2155 Since we had a SYMBOL_REF before, we can assume it is valid
2156 to have one in this position in the insn.
2158 Also, change_address may create new registers. These
2159 registers will not have valid reg_map entries. This can
2160 cause try_constants() to fail because assumes that all
2161 registers in the rtx have valid reg_map entries, and it may
2162 end up replacing one of these new registers with junk. */
2164 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2165 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2166 #endif
2168 temp = XEXP (temp, 0);
2170 #ifdef POINTERS_EXTEND_UNSIGNED
2171 if (GET_MODE (temp) != GET_MODE (orig))
2172 temp = convert_memory_address (GET_MODE (orig), temp);
2173 #endif
2174 return temp;
2176 else if (GET_CODE (constant) == LABEL_REF)
2177 return XEXP (force_const_mem
2178 (GET_MODE (orig),
2179 copy_rtx_and_substitute (constant, map, for_lhs)),
2183 return orig;
2185 case CONST_DOUBLE:
2186 /* We have to make a new copy of this CONST_DOUBLE because don't want
2187 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2188 duplicate of a CONST_DOUBLE we have already seen. */
2189 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2191 REAL_VALUE_TYPE d;
2193 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2194 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2196 else
2197 return immed_double_const (CONST_DOUBLE_LOW (orig),
2198 CONST_DOUBLE_HIGH (orig), VOIDmode);
2200 case CONST:
2201 /* Make new constant pool entry for a constant
2202 that was in the pool of the inline function. */
2203 if (RTX_INTEGRATED_P (orig))
2204 abort ();
2205 break;
2207 case ASM_OPERANDS:
2208 /* If a single asm insn contains multiple output operands then
2209 it contains multiple ASM_OPERANDS rtx's that share the input
2210 and constraint vecs. We must make sure that the copied insn
2211 continues to share it. */
2212 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
2214 copy = rtx_alloc (ASM_OPERANDS);
2215 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2216 PUT_MODE (copy, GET_MODE (orig));
2217 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
2218 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
2219 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
2220 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
2221 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
2222 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
2223 = map->copy_asm_constraints_vector;
2224 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
2225 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
2226 return copy;
2228 break;
2230 case CALL:
2231 /* This is given special treatment because the first
2232 operand of a CALL is a (MEM ...) which may get
2233 forced into a register for cse. This is undesirable
2234 if function-address cse isn't wanted or if we won't do cse. */
2235 #ifndef NO_FUNCTION_CSE
2236 if (! (optimize && ! flag_no_function_cse))
2237 #endif
2239 rtx copy
2240 = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2241 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2242 map, 0));
2244 MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
2246 return
2247 gen_rtx_CALL (GET_MODE (orig), copy,
2248 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
2250 break;
2252 #if 0
2253 /* Must be ifdefed out for loop unrolling to work. */
2254 case RETURN:
2255 abort ();
2256 #endif
2258 case SET:
2259 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2260 Adjust the setting by the offset of the area we made.
2261 If the nonlocal goto is into the current function,
2262 this will result in unnecessarily bad code, but should work. */
2263 if (SET_DEST (orig) == virtual_stack_vars_rtx
2264 || SET_DEST (orig) == virtual_incoming_args_rtx)
2266 /* In case a translation hasn't occurred already, make one now. */
2267 rtx equiv_reg;
2268 rtx equiv_loc;
2269 HOST_WIDE_INT loc_offset;
2271 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
2272 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2273 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
2274 REGNO (equiv_reg)).rtx;
2275 loc_offset
2276 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2278 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2279 force_operand
2280 (plus_constant
2281 (copy_rtx_and_substitute (SET_SRC (orig),
2282 map, 0),
2283 - loc_offset),
2284 NULL_RTX));
2286 else
2287 return gen_rtx_SET (VOIDmode,
2288 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
2289 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
2290 break;
2292 case MEM:
2293 if (inlining
2294 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
2295 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
2297 enum machine_mode const_mode
2298 = get_pool_mode_for_function (inlining, XEXP (orig, 0));
2299 rtx constant
2300 = get_pool_constant_for_function (inlining, XEXP (orig, 0));
2302 constant = copy_rtx_and_substitute (constant, map, 0);
2304 /* If this was an address of a constant pool entry that itself
2305 had to be placed in the constant pool, it might not be a
2306 valid address. So the recursive call might have turned it
2307 into a register. In that case, it isn't a constant any
2308 more, so return it. This has the potential of changing a
2309 MEM into a REG, but we'll assume that it safe. */
2310 if (! CONSTANT_P (constant))
2311 return constant;
2313 return validize_mem (force_const_mem (const_mode, constant));
2316 copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
2317 map, 0));
2318 MEM_COPY_ATTRIBUTES (copy, orig);
2320 /* If inlining and this is not for the LHS, turn off RTX_UNCHANGING_P
2321 since this may be an indirect reference to a parameter and the
2322 actual may not be readonly. */
2323 if (inlining && !for_lhs)
2324 RTX_UNCHANGING_P (copy) = 0;
2326 return copy;
2328 default:
2329 break;
2332 copy = rtx_alloc (code);
2333 PUT_MODE (copy, mode);
2334 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2335 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2336 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2338 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2340 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2342 switch (*format_ptr++)
2344 case '0':
2345 /* Copy this through the wide int field; that's safest. */
2346 X0WINT (copy, i) = X0WINT (orig, i);
2347 break;
2349 case 'e':
2350 XEXP (copy, i)
2351 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
2352 break;
2354 case 'u':
2355 /* Change any references to old-insns to point to the
2356 corresponding copied insns. */
2357 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2358 break;
2360 case 'E':
2361 XVEC (copy, i) = XVEC (orig, i);
2362 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2364 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2365 for (j = 0; j < XVECLEN (copy, i); j++)
2366 XVECEXP (copy, i, j)
2367 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
2368 map, for_lhs);
2370 break;
2372 case 'w':
2373 XWINT (copy, i) = XWINT (orig, i);
2374 break;
2376 case 'i':
2377 XINT (copy, i) = XINT (orig, i);
2378 break;
2380 case 's':
2381 XSTR (copy, i) = XSTR (orig, i);
2382 break;
2384 case 't':
2385 XTREE (copy, i) = XTREE (orig, i);
2386 break;
2388 default:
2389 abort ();
2393 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2395 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
2396 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
2397 map->copy_asm_constraints_vector
2398 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
2401 return copy;
2404 /* Substitute known constant values into INSN, if that is valid. */
2406 void
2407 try_constants (insn, map)
2408 rtx insn;
2409 struct inline_remap *map;
2411 int i;
2413 map->num_sets = 0;
2415 /* First try just updating addresses, then other things. This is
2416 important when we have something like the store of a constant
2417 into memory and we can update the memory address but the machine
2418 does not support a constant source. */
2419 subst_constants (&PATTERN (insn), insn, map, 1);
2420 apply_change_group ();
2421 subst_constants (&PATTERN (insn), insn, map, 0);
2422 apply_change_group ();
2424 /* Show we don't know the value of anything stored or clobbered. */
2425 note_stores (PATTERN (insn), mark_stores, NULL);
2426 map->last_pc_value = 0;
2427 #ifdef HAVE_cc0
2428 map->last_cc0_value = 0;
2429 #endif
2431 /* Set up any constant equivalences made in this insn. */
2432 for (i = 0; i < map->num_sets; i++)
2434 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2436 int regno = REGNO (map->equiv_sets[i].dest);
2438 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2439 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2440 /* Following clause is a hack to make case work where GNU C++
2441 reassigns a variable to make cse work right. */
2442 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2443 regno).rtx,
2444 map->equiv_sets[i].equiv))
2445 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2446 map->equiv_sets[i].equiv, map->const_age);
2448 else if (map->equiv_sets[i].dest == pc_rtx)
2449 map->last_pc_value = map->equiv_sets[i].equiv;
2450 #ifdef HAVE_cc0
2451 else if (map->equiv_sets[i].dest == cc0_rtx)
2452 map->last_cc0_value = map->equiv_sets[i].equiv;
2453 #endif
2457 /* Substitute known constants for pseudo regs in the contents of LOC,
2458 which are part of INSN.
2459 If INSN is zero, the substitution should always be done (this is used to
2460 update DECL_RTL).
2461 These changes are taken out by try_constants if the result is not valid.
2463 Note that we are more concerned with determining when the result of a SET
2464 is a constant, for further propagation, than actually inserting constants
2465 into insns; cse will do the latter task better.
2467 This function is also used to adjust address of items previously addressed
2468 via the virtual stack variable or virtual incoming arguments registers.
2470 If MEMONLY is nonzero, only make changes inside a MEM. */
2472 static void
2473 subst_constants (loc, insn, map, memonly)
2474 rtx *loc;
2475 rtx insn;
2476 struct inline_remap *map;
2477 int memonly;
2479 rtx x = *loc;
2480 int i, j;
2481 enum rtx_code code;
2482 const char *format_ptr;
2483 int num_changes = num_validated_changes ();
2484 rtx new = 0;
2485 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2487 code = GET_CODE (x);
2489 switch (code)
2491 case PC:
2492 case CONST_INT:
2493 case CONST_DOUBLE:
2494 case CONST_VECTOR:
2495 case SYMBOL_REF:
2496 case CONST:
2497 case LABEL_REF:
2498 case ADDRESS:
2499 return;
2501 #ifdef HAVE_cc0
2502 case CC0:
2503 if (! memonly)
2504 validate_change (insn, loc, map->last_cc0_value, 1);
2505 return;
2506 #endif
2508 case USE:
2509 case CLOBBER:
2510 /* The only thing we can do with a USE or CLOBBER is possibly do
2511 some substitutions in a MEM within it. */
2512 if (GET_CODE (XEXP (x, 0)) == MEM)
2513 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
2514 return;
2516 case REG:
2517 /* Substitute for parms and known constants. Don't replace
2518 hard regs used as user variables with constants. */
2519 if (! memonly)
2521 int regno = REGNO (x);
2522 struct const_equiv_data *p;
2524 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2525 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2526 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2527 p->rtx != 0)
2528 && p->age >= map->const_age)
2529 validate_change (insn, loc, p->rtx, 1);
2531 return;
2533 case SUBREG:
2534 /* SUBREG applied to something other than a reg
2535 should be treated as ordinary, since that must
2536 be a special hack and we don't know how to treat it specially.
2537 Consider for example mulsidi3 in m68k.md.
2538 Ordinary SUBREG of a REG needs this special treatment. */
2539 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
2541 rtx inner = SUBREG_REG (x);
2542 rtx new = 0;
2544 /* We can't call subst_constants on &SUBREG_REG (x) because any
2545 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2546 see what is inside, try to form the new SUBREG and see if that is
2547 valid. We handle two cases: extracting a full word in an
2548 integral mode and extracting the low part. */
2549 subst_constants (&inner, NULL_RTX, map, 0);
2550 new = simplify_gen_subreg (GET_MODE (x), inner,
2551 GET_MODE (SUBREG_REG (x)),
2552 SUBREG_BYTE (x));
2554 if (new)
2555 validate_change (insn, loc, new, 1);
2556 else
2557 cancel_changes (num_changes);
2559 return;
2561 break;
2563 case MEM:
2564 subst_constants (&XEXP (x, 0), insn, map, 0);
2566 /* If a memory address got spoiled, change it back. */
2567 if (! memonly && insn != 0 && num_validated_changes () != num_changes
2568 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2569 cancel_changes (num_changes);
2570 return;
2572 case SET:
2574 /* Substitute constants in our source, and in any arguments to a
2575 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2576 itself. */
2577 rtx *dest_loc = &SET_DEST (x);
2578 rtx dest = *dest_loc;
2579 rtx src, tem;
2580 enum machine_mode compare_mode = VOIDmode;
2582 /* If SET_SRC is a COMPARE which subst_constants would turn into
2583 COMPARE of 2 VOIDmode constants, note the mode in which comparison
2584 is to be done. */
2585 if (GET_CODE (SET_SRC (x)) == COMPARE)
2587 src = SET_SRC (x);
2588 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2589 #ifdef HAVE_cc0
2590 || dest == cc0_rtx
2591 #endif
2594 compare_mode = GET_MODE (XEXP (src, 0));
2595 if (compare_mode == VOIDmode)
2596 compare_mode = GET_MODE (XEXP (src, 1));
2600 subst_constants (&SET_SRC (x), insn, map, memonly);
2601 src = SET_SRC (x);
2603 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2604 || GET_CODE (*dest_loc) == SUBREG
2605 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2607 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2609 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2610 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
2612 dest_loc = &XEXP (*dest_loc, 0);
2615 /* Do substitute in the address of a destination in memory. */
2616 if (GET_CODE (*dest_loc) == MEM)
2617 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
2619 /* Check for the case of DEST a SUBREG, both it and the underlying
2620 register are less than one word, and the SUBREG has the wider mode.
2621 In the case, we are really setting the underlying register to the
2622 source converted to the mode of DEST. So indicate that. */
2623 if (GET_CODE (dest) == SUBREG
2624 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2625 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2626 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2627 <= GET_MODE_SIZE (GET_MODE (dest)))
2628 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2629 src)))
2630 src = tem, dest = SUBREG_REG (dest);
2632 /* If storing a recognizable value save it for later recording. */
2633 if ((map->num_sets < MAX_RECOG_OPERANDS)
2634 && (CONSTANT_P (src)
2635 || (GET_CODE (src) == REG
2636 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2637 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2638 || (GET_CODE (src) == PLUS
2639 && GET_CODE (XEXP (src, 0)) == REG
2640 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2641 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2642 && CONSTANT_P (XEXP (src, 1)))
2643 || GET_CODE (src) == COMPARE
2644 #ifdef HAVE_cc0
2645 || dest == cc0_rtx
2646 #endif
2647 || (dest == pc_rtx
2648 && (src == pc_rtx || GET_CODE (src) == RETURN
2649 || GET_CODE (src) == LABEL_REF))))
2651 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2652 it will cause us to save the COMPARE with any constants
2653 substituted, which is what we want for later. */
2654 rtx src_copy = copy_rtx (src);
2655 map->equiv_sets[map->num_sets].equiv = src_copy;
2656 map->equiv_sets[map->num_sets++].dest = dest;
2657 if (compare_mode != VOIDmode
2658 && GET_CODE (src) == COMPARE
2659 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2660 #ifdef HAVE_cc0
2661 || dest == cc0_rtx
2662 #endif
2664 && GET_MODE (XEXP (src, 0)) == VOIDmode
2665 && GET_MODE (XEXP (src, 1)) == VOIDmode)
2667 map->compare_src = src_copy;
2668 map->compare_mode = compare_mode;
2672 return;
2674 default:
2675 break;
2678 format_ptr = GET_RTX_FORMAT (code);
2680 /* If the first operand is an expression, save its mode for later. */
2681 if (*format_ptr == 'e')
2682 op0_mode = GET_MODE (XEXP (x, 0));
2684 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2686 switch (*format_ptr++)
2688 case '0':
2689 break;
2691 case 'e':
2692 if (XEXP (x, i))
2693 subst_constants (&XEXP (x, i), insn, map, memonly);
2694 break;
2696 case 'u':
2697 case 'i':
2698 case 's':
2699 case 'w':
2700 case 'n':
2701 case 't':
2702 case 'B':
2703 break;
2705 case 'E':
2706 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2707 for (j = 0; j < XVECLEN (x, i); j++)
2708 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2710 break;
2712 default:
2713 abort ();
2717 /* If this is a commutative operation, move a constant to the second
2718 operand unless the second operand is already a CONST_INT. */
2719 if (! memonly
2720 && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2721 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2723 rtx tem = XEXP (x, 0);
2724 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2725 validate_change (insn, &XEXP (x, 1), tem, 1);
2728 /* Simplify the expression in case we put in some constants. */
2729 if (! memonly)
2730 switch (GET_RTX_CLASS (code))
2732 case '1':
2733 if (op0_mode == MAX_MACHINE_MODE)
2734 abort ();
2735 new = simplify_unary_operation (code, GET_MODE (x),
2736 XEXP (x, 0), op0_mode);
2737 break;
2739 case '<':
2741 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2743 if (op_mode == VOIDmode)
2744 op_mode = GET_MODE (XEXP (x, 1));
2745 new = simplify_relational_operation (code, op_mode,
2746 XEXP (x, 0), XEXP (x, 1));
2747 #ifdef FLOAT_STORE_FLAG_VALUE
2748 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2750 enum machine_mode mode = GET_MODE (x);
2751 if (new == const0_rtx)
2752 new = CONST0_RTX (mode);
2753 else
2755 REAL_VALUE_TYPE val;
2757 /* Avoid automatic aggregate initialization. */
2758 val = FLOAT_STORE_FLAG_VALUE (mode);
2759 new = CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
2762 #endif
2763 break;
2766 case '2':
2767 case 'c':
2768 new = simplify_binary_operation (code, GET_MODE (x),
2769 XEXP (x, 0), XEXP (x, 1));
2770 break;
2772 case 'b':
2773 case '3':
2774 if (op0_mode == MAX_MACHINE_MODE)
2775 abort ();
2777 if (code == IF_THEN_ELSE)
2779 rtx op0 = XEXP (x, 0);
2781 if (GET_RTX_CLASS (GET_CODE (op0)) == '<'
2782 && GET_MODE (op0) == VOIDmode
2783 && ! side_effects_p (op0)
2784 && XEXP (op0, 0) == map->compare_src
2785 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
2787 /* We have compare of two VOIDmode constants for which
2788 we recorded the comparison mode. */
2789 rtx temp =
2790 simplify_relational_operation (GET_CODE (op0),
2791 map->compare_mode,
2792 XEXP (op0, 0),
2793 XEXP (op0, 1));
2795 if (temp == const0_rtx)
2796 new = XEXP (x, 2);
2797 else if (temp == const1_rtx)
2798 new = XEXP (x, 1);
2801 if (!new)
2802 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2803 XEXP (x, 0), XEXP (x, 1),
2804 XEXP (x, 2));
2805 break;
2808 if (new)
2809 validate_change (insn, loc, new, 1);
2812 /* Show that register modified no longer contain known constants. We are
2813 called from note_stores with parts of the new insn. */
2815 static void
2816 mark_stores (dest, x, data)
2817 rtx dest;
2818 rtx x ATTRIBUTE_UNUSED;
2819 void *data ATTRIBUTE_UNUSED;
2821 int regno = -1;
2822 enum machine_mode mode = VOIDmode;
2824 /* DEST is always the innermost thing set, except in the case of
2825 SUBREGs of hard registers. */
2827 if (GET_CODE (dest) == REG)
2828 regno = REGNO (dest), mode = GET_MODE (dest);
2829 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2831 regno = REGNO (SUBREG_REG (dest));
2832 if (regno < FIRST_PSEUDO_REGISTER)
2833 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
2834 GET_MODE (SUBREG_REG (dest)),
2835 SUBREG_BYTE (dest),
2836 GET_MODE (dest));
2837 mode = GET_MODE (SUBREG_REG (dest));
2840 if (regno >= 0)
2842 unsigned int uregno = regno;
2843 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
2844 : uregno + HARD_REGNO_NREGS (uregno, mode) - 1);
2845 unsigned int i;
2847 /* Ignore virtual stack var or virtual arg register since those
2848 are handled separately. */
2849 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
2850 && uregno != VIRTUAL_STACK_VARS_REGNUM)
2851 for (i = uregno; i <= last_reg; i++)
2852 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2853 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2857 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2858 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2859 that it points to the node itself, thus indicating that the node is its
2860 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2861 the given node is NULL, recursively descend the decl/block tree which
2862 it is the root of, and for each other ..._DECL or BLOCK node contained
2863 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2864 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2865 values to point to themselves. */
2867 static void
2868 set_block_origin_self (stmt)
2869 tree stmt;
2871 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2873 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2876 tree local_decl;
2878 for (local_decl = BLOCK_VARS (stmt);
2879 local_decl != NULL_TREE;
2880 local_decl = TREE_CHAIN (local_decl))
2881 set_decl_origin_self (local_decl); /* Potential recursion. */
2885 tree subblock;
2887 for (subblock = BLOCK_SUBBLOCKS (stmt);
2888 subblock != NULL_TREE;
2889 subblock = BLOCK_CHAIN (subblock))
2890 set_block_origin_self (subblock); /* Recurse. */
2895 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2896 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2897 node to so that it points to the node itself, thus indicating that the
2898 node represents its own (abstract) origin. Additionally, if the
2899 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2900 the decl/block tree of which the given node is the root of, and for
2901 each other ..._DECL or BLOCK node contained therein whose
2902 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2903 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2904 point to themselves. */
2906 void
2907 set_decl_origin_self (decl)
2908 tree decl;
2910 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2912 DECL_ABSTRACT_ORIGIN (decl) = decl;
2913 if (TREE_CODE (decl) == FUNCTION_DECL)
2915 tree arg;
2917 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2918 DECL_ABSTRACT_ORIGIN (arg) = arg;
2919 if (DECL_INITIAL (decl) != NULL_TREE
2920 && DECL_INITIAL (decl) != error_mark_node)
2921 set_block_origin_self (DECL_INITIAL (decl));
2926 /* Given a pointer to some BLOCK node, and a boolean value to set the
2927 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2928 the given block, and for all local decls and all local sub-blocks
2929 (recursively) which are contained therein. */
2931 static void
2932 set_block_abstract_flags (stmt, setting)
2933 tree stmt;
2934 int setting;
2936 tree local_decl;
2937 tree subblock;
2939 BLOCK_ABSTRACT (stmt) = setting;
2941 for (local_decl = BLOCK_VARS (stmt);
2942 local_decl != NULL_TREE;
2943 local_decl = TREE_CHAIN (local_decl))
2944 set_decl_abstract_flags (local_decl, setting);
2946 for (subblock = BLOCK_SUBBLOCKS (stmt);
2947 subblock != NULL_TREE;
2948 subblock = BLOCK_CHAIN (subblock))
2949 set_block_abstract_flags (subblock, setting);
2952 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2953 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2954 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2955 set the abstract flags for all of the parameters, local vars, local
2956 blocks and sub-blocks (recursively) to the same setting. */
2958 void
2959 set_decl_abstract_flags (decl, setting)
2960 tree decl;
2961 int setting;
2963 DECL_ABSTRACT (decl) = setting;
2964 if (TREE_CODE (decl) == FUNCTION_DECL)
2966 tree arg;
2968 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2969 DECL_ABSTRACT (arg) = setting;
2970 if (DECL_INITIAL (decl) != NULL_TREE
2971 && DECL_INITIAL (decl) != error_mark_node)
2972 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2976 /* Output the assembly language code for the function FNDECL
2977 from its DECL_SAVED_INSNS. Used for inline functions that are output
2978 at end of compilation instead of where they came in the source. */
2980 void
2981 output_inline_function (fndecl)
2982 tree fndecl;
2984 struct function *old_cfun = cfun;
2985 enum debug_info_type old_write_symbols = write_symbols;
2986 const struct gcc_debug_hooks *const old_debug_hooks = debug_hooks;
2987 struct function *f = DECL_SAVED_INSNS (fndecl);
2989 cfun = f;
2990 current_function_decl = fndecl;
2992 set_new_last_label_num (f->inl_max_label_num);
2994 /* We're not deferring this any longer. */
2995 DECL_DEFER_OUTPUT (fndecl) = 0;
2997 /* If requested, suppress debugging information. */
2998 if (f->no_debugging_symbols)
3000 write_symbols = NO_DEBUG;
3001 debug_hooks = &do_nothing_debug_hooks;
3004 /* Compile this function all the way down to assembly code. As a
3005 side effect this destroys the saved RTL representation, but
3006 that's okay, because we don't need to inline this anymore. */
3007 rest_of_compilation (fndecl);
3008 DECL_INLINE (fndecl) = 0;
3010 cfun = old_cfun;
3011 current_function_decl = old_cfun ? old_cfun->decl : 0;
3012 write_symbols = old_write_symbols;
3013 debug_hooks = old_debug_hooks;
3017 /* Functions to keep track of the values hard regs had at the start of
3018 the function. */
3021 get_hard_reg_initial_reg (fun, reg)
3022 struct function *fun;
3023 rtx reg;
3025 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3026 int i;
3028 if (ivs == 0)
3029 return NULL_RTX;
3031 for (i = 0; i < ivs->num_entries; i++)
3032 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
3033 return ivs->entries[i].hard_reg;
3035 return NULL_RTX;
3039 has_func_hard_reg_initial_val (fun, reg)
3040 struct function *fun;
3041 rtx reg;
3043 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3044 int i;
3046 if (ivs == 0)
3047 return NULL_RTX;
3049 for (i = 0; i < ivs->num_entries; i++)
3050 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
3051 return ivs->entries[i].pseudo;
3053 return NULL_RTX;
3057 get_func_hard_reg_initial_val (fun, reg)
3058 struct function *fun;
3059 rtx reg;
3061 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3062 rtx rv = has_func_hard_reg_initial_val (fun, reg);
3064 if (rv)
3065 return rv;
3067 if (ivs == 0)
3069 fun->hard_reg_initial_vals = (void *) ggc_alloc (sizeof (initial_value_struct));
3070 ivs = fun->hard_reg_initial_vals;
3071 ivs->num_entries = 0;
3072 ivs->max_entries = 5;
3073 ivs->entries = (initial_value_pair *) ggc_alloc (5 * sizeof (initial_value_pair));
3076 if (ivs->num_entries >= ivs->max_entries)
3078 ivs->max_entries += 5;
3079 ivs->entries =
3080 (initial_value_pair *) ggc_realloc (ivs->entries,
3081 ivs->max_entries
3082 * sizeof (initial_value_pair));
3085 ivs->entries[ivs->num_entries].hard_reg = reg;
3086 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
3088 return ivs->entries[ivs->num_entries++].pseudo;
3092 get_hard_reg_initial_val (mode, regno)
3093 enum machine_mode mode;
3094 int regno;
3096 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3100 has_hard_reg_initial_val (mode, regno)
3101 enum machine_mode mode;
3102 int regno;
3104 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3107 static void
3108 setup_initial_hard_reg_value_integration (inl_f, remap)
3109 struct function *inl_f;
3110 struct inline_remap *remap;
3112 struct initial_value_struct *ivs = inl_f->hard_reg_initial_vals;
3113 int i;
3115 if (ivs == 0)
3116 return;
3118 for (i = 0; i < ivs->num_entries; i ++)
3119 remap->reg_map[REGNO (ivs->entries[i].pseudo)]
3120 = get_func_hard_reg_initial_val (cfun, ivs->entries[i].hard_reg);
3124 void
3125 emit_initial_value_sets ()
3127 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3128 int i;
3129 rtx seq;
3131 if (ivs == 0)
3132 return;
3134 start_sequence ();
3135 for (i = 0; i < ivs->num_entries; i++)
3136 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
3137 seq = get_insns ();
3138 end_sequence ();
3140 emit_insn_after (seq, get_insns ());
3143 /* If the backend knows where to allocate pseudos for hard
3144 register initial values, register these allocations now. */
3145 void
3146 allocate_initial_values (reg_equiv_memory_loc)
3147 rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED;
3149 #ifdef ALLOCATE_INITIAL_VALUE
3150 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3151 int i;
3153 if (ivs == 0)
3154 return;
3156 for (i = 0; i < ivs->num_entries; i++)
3158 int regno = REGNO (ivs->entries[i].pseudo);
3159 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
3161 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
3162 ; /* Do nothing. */
3163 else if (GET_CODE (x) == MEM)
3164 reg_equiv_memory_loc[regno] = x;
3165 else if (GET_CODE (x) == REG)
3167 reg_renumber[regno] = REGNO (x);
3168 /* Poke the regno right into regno_reg_rtx
3169 so that even fixed regs are accepted. */
3170 REGNO (ivs->entries[i].pseudo) = REGNO (x);
3172 else abort ();
3174 #endif
3177 #include "gt-integrate.h"