* name-lookup.c, init.c, except.c: Revert Giovanni's patch from
[official-gcc.git] / gcc / integrate.c
blobfa5d71efa000a060a4b68d301dca3cb5647baa69
1 /* Procedure integration for GCC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "regs.h"
32 #include "flags.h"
33 #include "debug.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "output.h"
37 #include "recog.h"
38 #include "integrate.h"
39 #include "real.h"
40 #include "except.h"
41 #include "function.h"
42 #include "toplev.h"
43 #include "intl.h"
44 #include "loop.h"
45 #include "params.h"
46 #include "ggc.h"
47 #include "target.h"
48 #include "langhooks.h"
50 /* Similar, but round to the next highest integer that meets the
51 alignment. */
52 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
54 /* Default max number of insns a function can have and still be inline.
55 This is overridden on RISC machines. */
56 #ifndef INTEGRATE_THRESHOLD
57 /* Inlining small functions might save more space then not inlining at
58 all. Assume 1 instruction for the call and 1.5 insns per argument. */
59 #define INTEGRATE_THRESHOLD(DECL) \
60 (optimize_size \
61 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
62 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
63 #endif
66 /* Private type used by {get/has}_func_hard_reg_initial_val. */
67 typedef struct initial_value_pair GTY(()) {
68 rtx hard_reg;
69 rtx pseudo;
70 } initial_value_pair;
71 typedef struct initial_value_struct GTY(()) {
72 int num_entries;
73 int max_entries;
74 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
75 } initial_value_struct;
77 static void setup_initial_hard_reg_value_integration (struct function *,
78 struct inline_remap *);
80 static rtvec initialize_for_inline (tree);
81 static void note_modified_parmregs (rtx, rtx, void *);
82 static void integrate_parm_decls (tree, struct inline_remap *, rtvec);
83 static tree integrate_decl_tree (tree, struct inline_remap *);
84 static void subst_constants (rtx *, rtx, struct inline_remap *, int);
85 static void set_block_origin_self (tree);
86 static void set_block_abstract_flags (tree, int);
87 static void process_reg_param (struct inline_remap *, rtx, rtx);
88 static void mark_stores (rtx, rtx, void *);
89 static void save_parm_insns (rtx, rtx);
90 static void copy_insn_list (rtx, struct inline_remap *, rtx);
91 static void copy_insn_notes (rtx, struct inline_remap *, int);
92 static int compare_blocks (const void *, const void *);
93 static int find_block (const void *, const void *);
95 /* Used by copy_rtx_and_substitute; this indicates whether the function is
96 called for the purpose of inlining or some other purpose (i.e. loop
97 unrolling). This affects how constant pool references are handled.
98 This variable contains the FUNCTION_DECL for the inlined function. */
99 static struct function *inlining = 0;
101 /* Returns the Ith entry in the label_map contained in MAP. If the
102 Ith entry has not yet been set, return a fresh label. This function
103 performs a lazy initialization of label_map, thereby avoiding huge memory
104 explosions when the label_map gets very large. */
107 get_label_from_map (struct inline_remap *map, int i)
109 rtx x = map->label_map[i];
111 if (x == NULL_RTX)
112 x = map->label_map[i] = gen_label_rtx ();
114 return x;
117 /* Return false if the function FNDECL cannot be inlined on account of its
118 attributes, true otherwise. */
119 bool
120 function_attribute_inlinable_p (tree fndecl)
122 if (targetm.attribute_table)
124 tree a;
126 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
128 tree name = TREE_PURPOSE (a);
129 int i;
131 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
132 if (is_attribute_p (targetm.attribute_table[i].name, name))
133 return (*targetm.function_attribute_inlinable_p) (fndecl);
137 return true;
140 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
141 is safe and reasonable to integrate into other functions.
142 Nonzero means value is a warning msgid with a single %s
143 for the function's name. */
145 const char *
146 function_cannot_inline_p (tree fndecl)
148 rtx insn;
149 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
151 /* For functions marked as inline increase the maximum size to
152 MAX_INLINE_INSNS_RTL (--param max-inline-insn-rtl=<n>). For
153 regular functions use the limit given by INTEGRATE_THRESHOLD.
154 Note that the RTL inliner is not used by the languages that use
155 the tree inliner (C, C++). */
157 int max_insns = (DECL_INLINE (fndecl))
158 ? (MAX_INLINE_INSNS_RTL
159 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
160 : INTEGRATE_THRESHOLD (fndecl);
162 int ninsns = 0;
163 tree parms;
165 if (DECL_UNINLINABLE (fndecl))
166 return N_("function cannot be inline");
168 /* No inlines with varargs. */
169 if (last && TREE_VALUE (last) != void_type_node)
170 return N_("varargs function cannot be inline");
172 if (current_function_calls_alloca)
173 return N_("function using alloca cannot be inline");
175 if (current_function_calls_setjmp)
176 return N_("function using setjmp cannot be inline");
178 if (current_function_calls_eh_return)
179 return N_("function uses __builtin_eh_return");
181 if (current_function_contains_functions)
182 return N_("function with nested functions cannot be inline");
184 if (forced_labels)
185 return
186 N_("function with label addresses used in initializers cannot inline");
188 if (current_function_cannot_inline)
189 return current_function_cannot_inline;
191 /* If its not even close, don't even look. */
192 if (get_max_uid () > 3 * max_insns)
193 return N_("function too large to be inline");
195 #if 0
196 /* Don't inline functions which do not specify a function prototype and
197 have BLKmode argument or take the address of a parameter. */
198 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
200 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
201 TREE_ADDRESSABLE (parms) = 1;
202 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
203 return N_("no prototype, and parameter address used; cannot be inline");
205 #endif
207 /* We can't inline functions that return structures
208 the old-fashioned PCC way, copying into a static block. */
209 if (current_function_returns_pcc_struct)
210 return N_("inline functions not supported for this return value type");
212 /* We can't inline functions that return structures of varying size. */
213 if (TREE_CODE (TREE_TYPE (TREE_TYPE (fndecl))) != VOID_TYPE
214 && int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
215 return N_("function with varying-size return value cannot be inline");
217 /* Cannot inline a function with a varying size argument or one that
218 receives a transparent union. */
219 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
221 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
222 return N_("function with varying-size parameter cannot be inline");
223 else if (TREE_CODE (TREE_TYPE (parms)) == UNION_TYPE
224 && TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
225 return N_("function with transparent unit parameter cannot be inline");
228 if (get_max_uid () > max_insns)
230 for (ninsns = 0, insn = get_first_nonparm_insn ();
231 insn && ninsns < max_insns;
232 insn = NEXT_INSN (insn))
233 if (INSN_P (insn))
234 ninsns++;
236 if (ninsns >= max_insns)
237 return N_("function too large to be inline");
240 /* We will not inline a function which uses computed goto. The addresses of
241 its local labels, which may be tucked into global storage, are of course
242 not constant across instantiations, which causes unexpected behavior. */
243 if (current_function_has_computed_jump)
244 return N_("function with computed jump cannot inline");
246 /* We cannot inline a nested function that jumps to a nonlocal label. */
247 if (current_function_has_nonlocal_goto)
248 return N_("function with nonlocal goto cannot be inline");
250 /* We can't inline functions that return a PARALLEL rtx. */
251 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
253 rtx result = DECL_RTL (DECL_RESULT (fndecl));
254 if (GET_CODE (result) == PARALLEL)
255 return N_("inline functions not supported for this return value type");
258 /* If the function has a target specific attribute attached to it,
259 then we assume that we should not inline it. This can be overridden
260 by the target if it defines TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P. */
261 if (!function_attribute_inlinable_p (fndecl))
262 return N_("function with target specific attribute(s) cannot be inlined");
264 return NULL;
267 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
268 Zero for a reg that isn't a parm's home.
269 Only reg numbers less than max_parm_reg are mapped here. */
270 static tree *parmdecl_map;
272 /* In save_for_inline, nonzero if past the parm-initialization insns. */
273 static int in_nonparm_insns;
275 /* Subroutine for `save_for_inline'. Performs initialization
276 needed to save FNDECL's insns and info for future inline expansion. */
278 static rtvec
279 initialize_for_inline (tree fndecl)
281 int i;
282 rtvec arg_vector;
283 tree parms;
285 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
286 memset (parmdecl_map, 0, max_parm_reg * sizeof (tree));
287 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
289 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
290 parms;
291 parms = TREE_CHAIN (parms), i++)
293 rtx p = DECL_RTL (parms);
295 /* If we have (mem (addressof (mem ...))), use the inner MEM since
296 otherwise the copy_rtx call below will not unshare the MEM since
297 it shares ADDRESSOF. */
298 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
299 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
300 p = XEXP (XEXP (p, 0), 0);
302 RTVEC_ELT (arg_vector, i) = p;
304 if (GET_CODE (p) == REG)
305 parmdecl_map[REGNO (p)] = parms;
306 else if (GET_CODE (p) == CONCAT)
308 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
309 rtx pimag = gen_imagpart (GET_MODE (preal), p);
311 if (GET_CODE (preal) == REG)
312 parmdecl_map[REGNO (preal)] = parms;
313 if (GET_CODE (pimag) == REG)
314 parmdecl_map[REGNO (pimag)] = parms;
317 /* This flag is cleared later
318 if the function ever modifies the value of the parm. */
319 TREE_READONLY (parms) = 1;
322 return arg_vector;
325 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
326 originally was in the FROM_FN, but now it will be in the
327 TO_FN. */
329 tree
330 copy_decl_for_inlining (tree decl, tree from_fn, tree to_fn)
332 tree copy;
334 /* Copy the declaration. */
335 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
337 tree type;
338 int invisiref = 0;
340 /* See if the frontend wants to pass this by invisible reference. */
341 if (TREE_CODE (decl) == PARM_DECL
342 && DECL_ARG_TYPE (decl) != TREE_TYPE (decl)
343 && POINTER_TYPE_P (DECL_ARG_TYPE (decl))
344 && TREE_TYPE (DECL_ARG_TYPE (decl)) == TREE_TYPE (decl))
346 invisiref = 1;
347 type = DECL_ARG_TYPE (decl);
349 else
350 type = TREE_TYPE (decl);
352 /* For a parameter, we must make an equivalent VAR_DECL, not a
353 new PARM_DECL. */
354 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
355 if (!invisiref)
357 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
358 TREE_READONLY (copy) = TREE_READONLY (decl);
359 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
361 else
363 TREE_ADDRESSABLE (copy) = 0;
364 TREE_READONLY (copy) = 1;
365 TREE_THIS_VOLATILE (copy) = 0;
368 else
370 copy = copy_node (decl);
371 /* The COPY is not abstract; it will be generated in TO_FN. */
372 DECL_ABSTRACT (copy) = 0;
373 (*lang_hooks.dup_lang_specific_decl) (copy);
375 /* TREE_ADDRESSABLE isn't used to indicate that a label's
376 address has been taken; it's for internal bookkeeping in
377 expand_goto_internal. */
378 if (TREE_CODE (copy) == LABEL_DECL)
379 TREE_ADDRESSABLE (copy) = 0;
382 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
383 declaration inspired this copy. */
384 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
386 /* The new variable/label has no RTL, yet. */
387 if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
388 SET_DECL_RTL (copy, NULL_RTX);
390 /* These args would always appear unused, if not for this. */
391 TREE_USED (copy) = 1;
393 /* Set the context for the new declaration. */
394 if (!DECL_CONTEXT (decl))
395 /* Globals stay global. */
397 else if (DECL_CONTEXT (decl) != from_fn)
398 /* Things that weren't in the scope of the function we're inlining
399 from aren't in the scope we're inlining to, either. */
401 else if (TREE_STATIC (decl))
402 /* Function-scoped static variables should stay in the original
403 function. */
405 else
406 /* Ordinary automatic local variables are now in the scope of the
407 new function. */
408 DECL_CONTEXT (copy) = to_fn;
410 return copy;
413 /* Make the insns and PARM_DECLs of the current function permanent
414 and record other information in DECL_SAVED_INSNS to allow inlining
415 of this function in subsequent calls.
417 This routine need not copy any insns because we are not going
418 to immediately compile the insns in the insn chain. There
419 are two cases when we would compile the insns for FNDECL:
420 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
421 be output at the end of other compilation, because somebody took
422 its address. In the first case, the insns of FNDECL are copied
423 as it is expanded inline, so FNDECL's saved insns are not
424 modified. In the second case, FNDECL is used for the last time,
425 so modifying the rtl is not a problem.
427 We don't have to worry about FNDECL being inline expanded by
428 other functions which are written at the end of compilation
429 because flag_no_inline is turned on when we begin writing
430 functions at the end of compilation. */
432 void
433 save_for_inline (tree fndecl)
435 rtx insn;
436 rtvec argvec;
437 rtx first_nonparm_insn;
439 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
440 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
441 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
442 for the parms, prior to elimination of virtual registers.
443 These values are needed for substituting parms properly. */
444 if (! flag_no_inline)
445 parmdecl_map = xmalloc (max_parm_reg * sizeof (tree));
447 /* Make and emit a return-label if we have not already done so. */
449 if (return_label == 0)
451 return_label = gen_label_rtx ();
452 emit_label (return_label);
455 if (! flag_no_inline)
456 argvec = initialize_for_inline (fndecl);
457 else
458 argvec = NULL;
460 /* Delete basic block notes created by early run of find_basic_block.
461 The notes would be later used by find_basic_blocks to reuse the memory
462 for basic_block structures on already freed obstack. */
463 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
464 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK)
465 delete_related_insns (insn);
467 /* If there are insns that copy parms from the stack into pseudo registers,
468 those insns are not copied. `expand_inline_function' must
469 emit the correct code to handle such things. */
471 insn = get_insns ();
472 if (GET_CODE (insn) != NOTE)
473 abort ();
475 if (! flag_no_inline)
477 /* Get the insn which signals the end of parameter setup code. */
478 first_nonparm_insn = get_first_nonparm_insn ();
480 /* Now just scan the chain of insns to see what happens to our
481 PARM_DECLs. If a PARM_DECL is used but never modified, we
482 can substitute its rtl directly when expanding inline (and
483 perform constant folding when its incoming value is
484 constant). Otherwise, we have to copy its value into a new
485 register and track the new register's life. */
486 in_nonparm_insns = 0;
487 save_parm_insns (insn, first_nonparm_insn);
489 cfun->inl_max_label_num = max_label_num ();
490 cfun->inl_last_parm_insn = cfun->x_last_parm_insn;
491 cfun->original_arg_vector = argvec;
493 cfun->original_decl_initial = DECL_INITIAL (fndecl);
494 cfun->no_debugging_symbols = (write_symbols == NO_DEBUG);
495 cfun->saved_for_inline = 1;
497 /* Clean up. */
498 if (! flag_no_inline)
499 free (parmdecl_map);
502 /* Scan the chain of insns to see what happens to our PARM_DECLs. If a
503 PARM_DECL is used but never modified, we can substitute its rtl directly
504 when expanding inline (and perform constant folding when its incoming
505 value is constant). Otherwise, we have to copy its value into a new
506 register and track the new register's life. */
508 static void
509 save_parm_insns (rtx insn, rtx first_nonparm_insn)
511 if (insn == NULL_RTX)
512 return;
514 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
516 if (insn == first_nonparm_insn)
517 in_nonparm_insns = 1;
519 if (INSN_P (insn))
521 /* Record what interesting things happen to our parameters. */
522 note_stores (PATTERN (insn), note_modified_parmregs, NULL);
524 /* If this is a CALL_PLACEHOLDER insn then we need to look into the
525 three attached sequences: normal call, sibling call and tail
526 recursion. */
527 if (GET_CODE (insn) == CALL_INSN
528 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
530 int i;
532 for (i = 0; i < 3; i++)
533 save_parm_insns (XEXP (PATTERN (insn), i),
534 first_nonparm_insn);
540 /* Note whether a parameter is modified or not. */
542 static void
543 note_modified_parmregs (rtx reg, rtx x ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
545 if (GET_CODE (reg) == REG && in_nonparm_insns
546 && REGNO (reg) < max_parm_reg
547 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
548 && parmdecl_map[REGNO (reg)] != 0)
549 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
552 /* Unfortunately, we need a global copy of const_equiv map for communication
553 with a function called from note_stores. Be *very* careful that this
554 is used properly in the presence of recursion. */
556 varray_type global_const_equiv_varray;
558 #define FIXED_BASE_PLUS_P(X) \
559 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
560 && GET_CODE (XEXP (X, 0)) == REG \
561 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
562 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
564 /* Called to set up a mapping for the case where a parameter is in a
565 register. If it is read-only and our argument is a constant, set up the
566 constant equivalence.
568 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
569 if it is a register.
571 Also, don't allow hard registers here; they might not be valid when
572 substituted into insns. */
573 static void
574 process_reg_param (struct inline_remap *map, rtx loc, rtx copy)
576 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
577 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
578 && ! REG_USERVAR_P (copy))
579 || (GET_CODE (copy) == REG
580 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
582 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
583 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
584 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
585 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
586 copy = temp;
588 map->reg_map[REGNO (loc)] = copy;
591 /* Compare two BLOCKs for qsort. The key we sort on is the
592 BLOCK_ABSTRACT_ORIGIN of the blocks. We cannot just subtract the
593 two pointers, because it may overflow sizeof(int). */
595 static int
596 compare_blocks (const void *v1, const void *v2)
598 tree b1 = *((const tree *) v1);
599 tree b2 = *((const tree *) v2);
600 char *p1 = (char *) BLOCK_ABSTRACT_ORIGIN (b1);
601 char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
603 if (p1 == p2)
604 return 0;
605 return p1 < p2 ? -1 : 1;
608 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
609 an original block; the second to a remapped equivalent. */
611 static int
612 find_block (const void *v1, const void *v2)
614 const union tree_node *b1 = (const union tree_node *) v1;
615 tree b2 = *((const tree *) v2);
616 char *p1 = (char *) b1;
617 char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
619 if (p1 == p2)
620 return 0;
621 return p1 < p2 ? -1 : 1;
624 /* Integrate the procedure defined by FNDECL. Note that this function
625 may wind up calling itself. Since the static variables are not
626 reentrant, we do not assign them until after the possibility
627 of recursion is eliminated.
629 If IGNORE is nonzero, do not produce a value.
630 Otherwise store the value in TARGET if it is nonzero and that is convenient.
632 Value is:
633 (rtx)-1 if we could not substitute the function
634 0 if we substituted it and it does not produce a value
635 else an rtx for where the value is stored. */
638 expand_inline_function (tree fndecl, tree parms, rtx target, int ignore,
639 tree type, rtx structure_value_addr)
641 struct function *inlining_previous;
642 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
643 tree formal, actual, block;
644 rtx parm_insns = inl_f->emit->x_first_insn;
645 rtx insns = (inl_f->inl_last_parm_insn
646 ? NEXT_INSN (inl_f->inl_last_parm_insn)
647 : parm_insns);
648 tree *arg_trees;
649 rtx *arg_vals;
650 int max_regno;
651 int i;
652 int min_labelno = inl_f->emit->x_first_label_num;
653 int max_labelno = inl_f->inl_max_label_num;
654 int nargs;
655 rtx loc;
656 rtx stack_save = 0;
657 rtx temp;
658 struct inline_remap *map = 0;
659 rtvec arg_vector = inl_f->original_arg_vector;
660 rtx static_chain_value = 0;
661 int inl_max_uid;
662 int eh_region_offset;
664 /* The pointer used to track the true location of the memory used
665 for MAP->LABEL_MAP. */
666 rtx *real_label_map = 0;
668 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
669 max_regno = inl_f->emit->x_reg_rtx_no + 3;
670 if (max_regno < FIRST_PSEUDO_REGISTER)
671 abort ();
673 /* Pull out the decl for the function definition; fndecl may be a
674 local declaration, which would break DECL_ABSTRACT_ORIGIN. */
675 fndecl = inl_f->decl;
677 nargs = list_length (DECL_ARGUMENTS (fndecl));
679 if (cfun->preferred_stack_boundary < inl_f->preferred_stack_boundary)
680 cfun->preferred_stack_boundary = inl_f->preferred_stack_boundary;
682 /* Check that the parms type match and that sufficient arguments were
683 passed. Since the appropriate conversions or default promotions have
684 already been applied, the machine modes should match exactly. */
686 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
687 formal;
688 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
690 tree arg;
691 enum machine_mode mode;
693 if (actual == 0)
694 return (rtx) (size_t) -1;
696 arg = TREE_VALUE (actual);
697 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
699 if (arg == error_mark_node
700 || mode != TYPE_MODE (TREE_TYPE (arg))
701 /* If they are block mode, the types should match exactly.
702 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
703 which could happen if the parameter has incomplete type. */
704 || (mode == BLKmode
705 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
706 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
707 return (rtx) (size_t) -1;
710 /* If there is a TARGET which is a readonly BLKmode MEM and DECL_RESULT
711 is also a mem, we are going to lose the readonly on the stores, so don't
712 inline. */
713 if (target != 0 && GET_CODE (target) == MEM && GET_MODE (target) == BLKmode
714 && RTX_UNCHANGING_P (target) && DECL_RTL_SET_P (DECL_RESULT (fndecl))
715 && GET_CODE (DECL_RTL (DECL_RESULT (fndecl))) == MEM)
716 return (rtx) (size_t) -1;
718 /* Extra arguments are valid, but will be ignored below, so we must
719 evaluate them here for side-effects. */
720 for (; actual; actual = TREE_CHAIN (actual))
721 expand_expr (TREE_VALUE (actual), const0_rtx,
722 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
724 /* Expand the function arguments. Do this first so that any
725 new registers get created before we allocate the maps. */
727 arg_vals = xmalloc (nargs * sizeof (rtx));
728 arg_trees = xmalloc (nargs * sizeof (tree));
730 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
731 formal;
732 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
734 /* Actual parameter, converted to the type of the argument within the
735 function. */
736 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
737 /* Mode of the variable used within the function. */
738 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
739 int invisiref = 0;
741 arg_trees[i] = arg;
742 loc = RTVEC_ELT (arg_vector, i);
744 /* If this is an object passed by invisible reference, we copy the
745 object into a stack slot and save its address. If this will go
746 into memory, we do nothing now. Otherwise, we just expand the
747 argument. */
748 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
749 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
751 rtx stack_slot = assign_temp (TREE_TYPE (arg), 1, 1, 1);
753 store_expr (arg, stack_slot, 0);
754 arg_vals[i] = XEXP (stack_slot, 0);
755 invisiref = 1;
757 else if (GET_CODE (loc) != MEM)
759 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
761 int unsignedp = TREE_UNSIGNED (TREE_TYPE (formal));
762 enum machine_mode pmode = TYPE_MODE (TREE_TYPE (formal));
764 pmode = promote_mode (TREE_TYPE (formal), pmode,
765 &unsignedp, 0);
767 if (GET_MODE (loc) != pmode)
768 abort ();
770 /* The mode if LOC and ARG can differ if LOC was a variable
771 that had its mode promoted via PROMOTED_MODE. */
772 arg_vals[i] = convert_modes (pmode,
773 TYPE_MODE (TREE_TYPE (arg)),
774 expand_expr (arg, NULL_RTX, mode,
775 EXPAND_SUM),
776 unsignedp);
778 else
779 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
781 else
782 arg_vals[i] = 0;
784 /* If the formal type was const but the actual was not, we might
785 end up here with an rtx wrongly tagged unchanging in the caller's
786 context. Fix that. */
787 if (arg_vals[i] != 0
788 && (GET_CODE (arg_vals[i]) == REG || GET_CODE (arg_vals[i]) == MEM)
789 && ! TREE_READONLY (TREE_VALUE (actual)))
790 RTX_UNCHANGING_P (arg_vals[i]) = 0;
792 if (arg_vals[i] != 0
793 && (! TREE_READONLY (formal)
794 /* If the parameter is not read-only, copy our argument through
795 a register. Also, we cannot use ARG_VALS[I] if it overlaps
796 TARGET in any way. In the inline function, they will likely
797 be two different pseudos, and `safe_from_p' will make all
798 sorts of smart assumptions about their not conflicting.
799 But if ARG_VALS[I] overlaps TARGET, these assumptions are
800 wrong, so put ARG_VALS[I] into a fresh register.
801 Don't worry about invisible references, since their stack
802 temps will never overlap the target. */
803 || (target != 0
804 && ! invisiref
805 && (GET_CODE (arg_vals[i]) == REG
806 || GET_CODE (arg_vals[i]) == SUBREG
807 || GET_CODE (arg_vals[i]) == MEM)
808 && reg_overlap_mentioned_p (arg_vals[i], target))
809 /* ??? We must always copy a SUBREG into a REG, because it might
810 get substituted into an address, and not all ports correctly
811 handle SUBREGs in addresses. */
812 || (GET_CODE (arg_vals[i]) == SUBREG)))
813 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
815 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
816 && POINTER_TYPE_P (TREE_TYPE (formal)))
817 mark_reg_pointer (arg_vals[i],
818 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal))));
821 /* Allocate the structures we use to remap things. */
823 map = xcalloc (1, sizeof (struct inline_remap));
824 map->fndecl = fndecl;
826 VARRAY_TREE_INIT (map->block_map, 10, "block_map");
827 map->reg_map = xcalloc (max_regno, sizeof (rtx));
829 /* We used to use alloca here, but the size of what it would try to
830 allocate would occasionally cause it to exceed the stack limit and
831 cause unpredictable core dumps. */
832 real_label_map = xmalloc ((max_labelno) * sizeof (rtx));
833 map->label_map = real_label_map;
834 map->local_return_label = NULL_RTX;
836 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
837 map->insn_map = xcalloc (inl_max_uid, sizeof (rtx));
838 map->min_insnno = 0;
839 map->max_insnno = inl_max_uid;
841 map->integrating = 1;
842 map->compare_src = NULL_RTX;
843 map->compare_mode = VOIDmode;
845 /* const_equiv_varray maps pseudos in our routine to constants, so
846 it needs to be large enough for all our pseudos. This is the
847 number we are currently using plus the number in the called
848 routine, plus 15 for each arg, five to compute the virtual frame
849 pointer, and five for the return value. This should be enough
850 for most cases. We do not reference entries outside the range of
851 the map.
853 ??? These numbers are quite arbitrary and were obtained by
854 experimentation. At some point, we should try to allocate the
855 table after all the parameters are set up so we can more accurately
856 estimate the number of pseudos we will need. */
858 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
859 (max_reg_num ()
860 + (max_regno - FIRST_PSEUDO_REGISTER)
861 + 15 * nargs
862 + 10),
863 "expand_inline_function");
864 map->const_age = 0;
866 /* Record the current insn in case we have to set up pointers to frame
867 and argument memory blocks. If there are no insns yet, add a dummy
868 insn that can be used as an insertion point. */
869 map->insns_at_start = get_last_insn ();
870 if (map->insns_at_start == 0)
871 map->insns_at_start = emit_note (NOTE_INSN_DELETED);
873 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
874 map->x_regno_reg_rtx = inl_f->emit->x_regno_reg_rtx;
876 /* Update the outgoing argument size to allow for those in the inlined
877 function. */
878 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
879 current_function_outgoing_args_size = inl_f->outgoing_args_size;
881 /* If the inline function needs to make PIC references, that means
882 that this function's PIC offset table must be used. */
883 if (inl_f->uses_pic_offset_table)
884 current_function_uses_pic_offset_table = 1;
886 /* If this function needs a context, set it up. */
887 if (inl_f->needs_context)
888 static_chain_value = lookup_static_chain (fndecl);
890 /* If the inlined function calls __builtin_constant_p, then we'll
891 need to call purge_builtin_constant_p on this function. */
892 if (inl_f->calls_constant_p)
893 current_function_calls_constant_p = 1;
895 if (GET_CODE (parm_insns) == NOTE
896 && NOTE_LINE_NUMBER (parm_insns) > 0)
898 rtx note = emit_note_copy (parm_insns);
900 if (note)
901 RTX_INTEGRATED_P (note) = 1;
904 /* Process each argument. For each, set up things so that the function's
905 reference to the argument will refer to the argument being passed.
906 We only replace REG with REG here. Any simplifications are done
907 via const_equiv_map.
909 We make two passes: In the first, we deal with parameters that will
910 be placed into registers, since we need to ensure that the allocated
911 register number fits in const_equiv_map. Then we store all non-register
912 parameters into their memory location. */
914 /* Don't try to free temp stack slots here, because we may put one of the
915 parameters into a temp stack slot. */
917 for (i = 0; i < nargs; i++)
919 rtx copy = arg_vals[i];
921 loc = RTVEC_ELT (arg_vector, i);
923 /* There are three cases, each handled separately. */
924 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
925 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
927 /* This must be an object passed by invisible reference (it could
928 also be a variable-sized object, but we forbid inlining functions
929 with variable-sized arguments). COPY is the address of the
930 actual value (this computation will cause it to be copied). We
931 map that address for the register, noting the actual address as
932 an equivalent in case it can be substituted into the insns. */
934 if (GET_CODE (copy) != REG)
936 temp = copy_addr_to_reg (copy);
937 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
938 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
939 copy = temp;
941 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
943 else if (GET_CODE (loc) == MEM)
945 /* This is the case of a parameter that lives in memory. It
946 will live in the block we allocate in the called routine's
947 frame that simulates the incoming argument area. Do nothing
948 with the parameter now; we will call store_expr later. In
949 this case, however, we must ensure that the virtual stack and
950 incoming arg rtx values are expanded now so that we can be
951 sure we have enough slots in the const equiv map since the
952 store_expr call can easily blow the size estimate. */
953 if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
954 copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
956 else if (GET_CODE (loc) == REG)
957 process_reg_param (map, loc, copy);
958 else if (GET_CODE (loc) == CONCAT)
960 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
961 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
962 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
963 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
965 process_reg_param (map, locreal, copyreal);
966 process_reg_param (map, locimag, copyimag);
968 else
969 abort ();
972 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
973 specially. This function can be called recursively, so we need to
974 save the previous value. */
975 inlining_previous = inlining;
976 inlining = inl_f;
978 /* Now do the parameters that will be placed in memory. */
980 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
981 formal; formal = TREE_CHAIN (formal), i++)
983 loc = RTVEC_ELT (arg_vector, i);
985 if (GET_CODE (loc) == MEM
986 /* Exclude case handled above. */
987 && ! (GET_CODE (XEXP (loc, 0)) == REG
988 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
990 rtx note = emit_line_note (DECL_SOURCE_LOCATION (formal));
992 if (note)
993 RTX_INTEGRATED_P (note) = 1;
995 /* Compute the address in the area we reserved and store the
996 value there. */
997 temp = copy_rtx_and_substitute (loc, map, 1);
998 subst_constants (&temp, NULL_RTX, map, 1);
999 apply_change_group ();
1000 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1001 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1002 store_expr (arg_trees[i], temp, 0);
1006 /* Deal with the places that the function puts its result.
1007 We are driven by what is placed into DECL_RESULT.
1009 Initially, we assume that we don't have anything special handling for
1010 REG_FUNCTION_RETURN_VALUE_P. */
1012 map->inline_target = 0;
1013 loc = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
1014 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
1016 if (TYPE_MODE (type) == VOIDmode)
1017 /* There is no return value to worry about. */
1019 else if (GET_CODE (loc) == MEM)
1021 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
1023 temp = copy_rtx_and_substitute (loc, map, 1);
1024 subst_constants (&temp, NULL_RTX, map, 1);
1025 apply_change_group ();
1026 target = temp;
1028 else
1030 if (! structure_value_addr
1031 || ! aggregate_value_p (DECL_RESULT (fndecl), fndecl))
1032 abort ();
1034 /* Pass the function the address in which to return a structure
1035 value. Note that a constructor can cause someone to call us
1036 with STRUCTURE_VALUE_ADDR, but the initialization takes place
1037 via the first parameter, rather than the struct return address.
1039 We have two cases: If the address is a simple register
1040 indirect, use the mapping mechanism to point that register to
1041 our structure return address. Otherwise, store the structure
1042 return value into the place that it will be referenced from. */
1044 if (GET_CODE (XEXP (loc, 0)) == REG)
1046 temp = force_operand (structure_value_addr, NULL_RTX);
1047 temp = force_reg (Pmode, temp);
1048 /* A virtual register might be invalid in an insn, because
1049 it can cause trouble in reload. Since we don't have access
1050 to the expanders at map translation time, make sure we have
1051 a proper register now.
1052 If a virtual register is actually valid, cse or combine
1053 can put it into the mapped insns. */
1054 if (REGNO (temp) >= FIRST_VIRTUAL_REGISTER
1055 && REGNO (temp) <= LAST_VIRTUAL_REGISTER)
1056 temp = copy_to_mode_reg (Pmode, temp);
1057 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1059 if (CONSTANT_P (structure_value_addr)
1060 || GET_CODE (structure_value_addr) == ADDRESSOF
1061 || (GET_CODE (structure_value_addr) == PLUS
1062 && (XEXP (structure_value_addr, 0)
1063 == virtual_stack_vars_rtx)
1064 && (GET_CODE (XEXP (structure_value_addr, 1))
1065 == CONST_INT)))
1067 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
1068 CONST_AGE_PARM);
1071 else
1073 temp = copy_rtx_and_substitute (loc, map, 1);
1074 subst_constants (&temp, NULL_RTX, map, 0);
1075 apply_change_group ();
1076 emit_move_insn (temp, structure_value_addr);
1080 else if (ignore)
1081 /* We will ignore the result value, so don't look at its structure.
1082 Note that preparations for an aggregate return value
1083 do need to be made (above) even if it will be ignored. */
1085 else if (GET_CODE (loc) == REG)
1087 /* The function returns an object in a register and we use the return
1088 value. Set up our target for remapping. */
1090 /* Machine mode function was declared to return. */
1091 enum machine_mode departing_mode = TYPE_MODE (type);
1092 /* (Possibly wider) machine mode it actually computes
1093 (for the sake of callers that fail to declare it right).
1094 We have to use the mode of the result's RTL, rather than
1095 its type, since expand_function_start may have promoted it. */
1096 enum machine_mode arriving_mode
1097 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1098 rtx reg_to_map;
1100 /* Don't use MEMs as direct targets because on some machines
1101 substituting a MEM for a REG makes invalid insns.
1102 Let the combiner substitute the MEM if that is valid. */
1103 if (target == 0 || GET_CODE (target) != REG
1104 || GET_MODE (target) != departing_mode)
1106 /* Don't make BLKmode registers. If this looks like
1107 a BLKmode object being returned in a register, get
1108 the mode from that, otherwise abort. */
1109 if (departing_mode == BLKmode)
1111 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1113 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1114 arriving_mode = departing_mode;
1116 else
1117 abort ();
1120 target = gen_reg_rtx (departing_mode);
1123 /* If function's value was promoted before return,
1124 avoid machine mode mismatch when we substitute INLINE_TARGET.
1125 But TARGET is what we will return to the caller. */
1126 if (arriving_mode != departing_mode)
1128 /* Avoid creating a paradoxical subreg wider than
1129 BITS_PER_WORD, since that is illegal. */
1130 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1132 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1133 GET_MODE_BITSIZE (arriving_mode)))
1134 /* Maybe could be handled by using convert_move () ? */
1135 abort ();
1136 reg_to_map = gen_reg_rtx (arriving_mode);
1137 target = gen_lowpart (departing_mode, reg_to_map);
1139 else
1140 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1142 else
1143 reg_to_map = target;
1145 /* Usually, the result value is the machine's return register.
1146 Sometimes it may be a pseudo. Handle both cases. */
1147 if (REG_FUNCTION_VALUE_P (loc))
1148 map->inline_target = reg_to_map;
1149 else
1150 map->reg_map[REGNO (loc)] = reg_to_map;
1152 else if (GET_CODE (loc) == CONCAT)
1154 enum machine_mode departing_mode = TYPE_MODE (type);
1155 enum machine_mode arriving_mode
1156 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1158 if (departing_mode != arriving_mode)
1159 abort ();
1160 if (GET_CODE (XEXP (loc, 0)) != REG
1161 || GET_CODE (XEXP (loc, 1)) != REG)
1162 abort ();
1164 /* Don't use MEMs as direct targets because on some machines
1165 substituting a MEM for a REG makes invalid insns.
1166 Let the combiner substitute the MEM if that is valid. */
1167 if (target == 0 || GET_CODE (target) != REG
1168 || GET_MODE (target) != departing_mode)
1169 target = gen_reg_rtx (departing_mode);
1171 if (GET_CODE (target) != CONCAT)
1172 abort ();
1174 map->reg_map[REGNO (XEXP (loc, 0))] = XEXP (target, 0);
1175 map->reg_map[REGNO (XEXP (loc, 1))] = XEXP (target, 1);
1177 else
1178 abort ();
1180 /* Remap the exception handler data pointer from one to the other. */
1181 temp = get_exception_pointer (inl_f);
1182 if (temp)
1183 map->reg_map[REGNO (temp)] = get_exception_pointer (cfun);
1185 /* Initialize label_map. get_label_from_map will actually make
1186 the labels. */
1187 memset (&map->label_map[min_labelno], 0,
1188 (max_labelno - min_labelno) * sizeof (rtx));
1190 /* Make copies of the decls of the symbols in the inline function, so that
1191 the copies of the variables get declared in the current function. Set
1192 up things so that lookup_static_chain knows that to interpret registers
1193 in SAVE_EXPRs for TYPE_SIZEs as local. */
1194 inline_function_decl = fndecl;
1195 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1196 block = integrate_decl_tree (inl_f->original_decl_initial, map);
1197 BLOCK_ABSTRACT_ORIGIN (block) = DECL_ORIGIN (fndecl);
1198 inline_function_decl = 0;
1200 /* Make a fresh binding contour that we can easily remove. Do this after
1201 expanding our arguments so cleanups are properly scoped. */
1202 expand_start_bindings_and_block (0, block);
1204 /* Sort the block-map so that it will be easy to find remapped
1205 blocks later. */
1206 qsort (&VARRAY_TREE (map->block_map, 0),
1207 map->block_map->elements_used,
1208 sizeof (tree),
1209 compare_blocks);
1211 /* Perform postincrements before actually calling the function. */
1212 emit_queue ();
1214 /* Clean up stack so that variables might have smaller offsets. */
1215 do_pending_stack_adjust ();
1217 /* Save a copy of the location of const_equiv_varray for
1218 mark_stores, called via note_stores. */
1219 global_const_equiv_varray = map->const_equiv_varray;
1221 /* If the called function does an alloca, save and restore the
1222 stack pointer around the call. This saves stack space, but
1223 also is required if this inline is being done between two
1224 pushes. */
1225 if (inl_f->calls_alloca)
1226 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1228 /* Map pseudos used for initial hard reg values. */
1229 setup_initial_hard_reg_value_integration (inl_f, map);
1231 /* Now copy the insns one by one. */
1232 copy_insn_list (insns, map, static_chain_value);
1234 /* Duplicate the EH regions. This will create an offset from the
1235 region numbers in the function we're inlining to the region
1236 numbers in the calling function. This must wait until after
1237 copy_insn_list, as we need the insn map to be complete. */
1238 eh_region_offset = duplicate_eh_regions (inl_f, map);
1240 /* Now copy the REG_NOTES for those insns. */
1241 copy_insn_notes (insns, map, eh_region_offset);
1243 /* If the insn sequence required one, emit the return label. */
1244 if (map->local_return_label)
1245 emit_label (map->local_return_label);
1247 /* Restore the stack pointer if we saved it above. */
1248 if (inl_f->calls_alloca)
1249 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1251 if (! cfun->x_whole_function_mode_p)
1252 /* In statement-at-a-time mode, we just tell the front-end to add
1253 this block to the list of blocks at this binding level. We
1254 can't do it the way it's done for function-at-a-time mode the
1255 superblocks have not been created yet. */
1256 (*lang_hooks.decls.insert_block) (block);
1257 else
1259 BLOCK_CHAIN (block)
1260 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1261 BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
1264 /* End the scope containing the copied formal parameter variables
1265 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1266 here so that expand_end_bindings will not check for unused
1267 variables. That's already been checked for when the inlined
1268 function was defined. */
1269 expand_end_bindings (NULL_TREE, 1, 1);
1271 /* Must mark the line number note after inlined functions as a repeat, so
1272 that the test coverage code can avoid counting the call twice. This
1273 just tells the code to ignore the immediately following line note, since
1274 there already exists a copy of this note before the expanded inline call.
1275 This line number note is still needed for debugging though, so we can't
1276 delete it. */
1277 if (flag_test_coverage)
1278 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
1280 emit_line_note (input_location);
1282 /* If the function returns a BLKmode object in a register, copy it
1283 out of the temp register into a BLKmode memory object. */
1284 if (target
1285 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1286 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl)), fndecl))
1287 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1289 if (structure_value_addr)
1291 target = gen_rtx_MEM (TYPE_MODE (type),
1292 memory_address (TYPE_MODE (type),
1293 structure_value_addr));
1294 set_mem_attributes (target, type, 1);
1297 /* Make sure we free the things we explicitly allocated with xmalloc. */
1298 if (real_label_map)
1299 free (real_label_map);
1300 VARRAY_FREE (map->const_equiv_varray);
1301 free (map->reg_map);
1302 free (map->insn_map);
1303 free (map);
1304 free (arg_vals);
1305 free (arg_trees);
1307 inlining = inlining_previous;
1309 return target;
1312 /* Make copies of each insn in the given list using the mapping
1313 computed in expand_inline_function. This function may call itself for
1314 insns containing sequences.
1316 Copying is done in two passes, first the insns and then their REG_NOTES.
1318 If static_chain_value is nonzero, it represents the context-pointer
1319 register for the function. */
1321 static void
1322 copy_insn_list (rtx insns, struct inline_remap *map, rtx static_chain_value)
1324 int i;
1325 rtx insn;
1326 rtx temp;
1327 #ifdef HAVE_cc0
1328 rtx cc0_insn = 0;
1329 #endif
1330 rtx static_chain_mem = 0;
1332 /* Copy the insns one by one. Do this in two passes, first the insns and
1333 then their REG_NOTES. */
1335 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1337 for (insn = insns; insn; insn = NEXT_INSN (insn))
1339 rtx copy, pattern, set;
1341 map->orig_asm_operands_vector = 0;
1343 switch (GET_CODE (insn))
1345 case INSN:
1346 pattern = PATTERN (insn);
1347 set = single_set (insn);
1348 copy = 0;
1349 if (GET_CODE (pattern) == USE
1350 && GET_CODE (XEXP (pattern, 0)) == REG
1351 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1352 /* The (USE (REG n)) at return from the function should
1353 be ignored since we are changing (REG n) into
1354 inline_target. */
1355 break;
1357 /* Ignore setting a function value that we don't want to use. */
1358 if (map->inline_target == 0
1359 && set != 0
1360 && GET_CODE (SET_DEST (set)) == REG
1361 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1363 if (volatile_refs_p (SET_SRC (set)))
1365 rtx new_set;
1367 /* If we must not delete the source,
1368 load it into a new temporary. */
1369 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1371 new_set = single_set (copy);
1372 if (new_set == 0)
1373 abort ();
1375 SET_DEST (new_set)
1376 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1378 /* If the source and destination are the same and it
1379 has a note on it, keep the insn. */
1380 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1381 && REG_NOTES (insn) != 0)
1382 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1383 else
1384 break;
1387 /* Similarly if an ignored return value is clobbered. */
1388 else if (map->inline_target == 0
1389 && GET_CODE (pattern) == CLOBBER
1390 && GET_CODE (XEXP (pattern, 0)) == REG
1391 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1392 break;
1394 /* Look for the address of the static chain slot. The
1395 rtx_equal_p comparisons against the
1396 static_chain_incoming_rtx below may fail if the static
1397 chain is in memory and the address specified is not
1398 "legitimate". This happens on Xtensa where the static
1399 chain is at a negative offset from argp and where only
1400 positive offsets are legitimate. When the RTL is
1401 generated, the address is "legitimized" by copying it
1402 into a register, causing the rtx_equal_p comparisons to
1403 fail. This workaround looks for code that sets a
1404 register to the address of the static chain. Subsequent
1405 memory references via that register can then be
1406 identified as static chain references. We assume that
1407 the register is only assigned once, and that the static
1408 chain address is only live in one register at a time. */
1410 else if (static_chain_value != 0
1411 && set != 0
1412 && GET_CODE (static_chain_incoming_rtx) == MEM
1413 && GET_CODE (SET_DEST (set)) == REG
1414 && rtx_equal_p (SET_SRC (set),
1415 XEXP (static_chain_incoming_rtx, 0)))
1417 static_chain_mem =
1418 gen_rtx_MEM (GET_MODE (static_chain_incoming_rtx),
1419 SET_DEST (set));
1421 /* emit the instruction in case it is used for something
1422 other than setting the static chain; if it's not used,
1423 it can always be removed as dead code */
1424 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1427 /* If this is setting the static chain rtx, omit it. */
1428 else if (static_chain_value != 0
1429 && set != 0
1430 && (rtx_equal_p (SET_DEST (set),
1431 static_chain_incoming_rtx)
1432 || (static_chain_mem
1433 && rtx_equal_p (SET_DEST (set), static_chain_mem))))
1434 break;
1436 /* If this is setting the static chain pseudo, set it from
1437 the value we want to give it instead. */
1438 else if (static_chain_value != 0
1439 && set != 0
1440 && (rtx_equal_p (SET_SRC (set),
1441 static_chain_incoming_rtx)
1442 || (static_chain_mem
1443 && rtx_equal_p (SET_SRC (set), static_chain_mem))))
1445 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
1447 copy = emit_move_insn (newdest, static_chain_value);
1448 if (GET_CODE (static_chain_incoming_rtx) != MEM)
1449 static_chain_value = 0;
1452 /* If this is setting the virtual stack vars register, this must
1453 be the code at the handler for a builtin longjmp. The value
1454 saved in the setjmp buffer will be the address of the frame
1455 we've made for this inlined instance within our frame. But we
1456 know the offset of that value so we can use it to reconstruct
1457 our virtual stack vars register from that value. If we are
1458 copying it from the stack pointer, leave it unchanged. */
1459 else if (set != 0
1460 && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1462 HOST_WIDE_INT offset;
1463 temp = map->reg_map[REGNO (SET_DEST (set))];
1464 temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1465 REGNO (temp)).rtx;
1467 if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1468 offset = 0;
1469 else if (GET_CODE (temp) == PLUS
1470 && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1471 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1472 offset = INTVAL (XEXP (temp, 1));
1473 else
1474 abort ();
1476 if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1477 temp = SET_SRC (set);
1478 else
1479 temp = force_operand (plus_constant (SET_SRC (set),
1480 - offset),
1481 NULL_RTX);
1483 copy = emit_move_insn (virtual_stack_vars_rtx, temp);
1486 else
1487 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1488 /* REG_NOTES will be copied later. */
1490 #ifdef HAVE_cc0
1491 /* If this insn is setting CC0, it may need to look at
1492 the insn that uses CC0 to see what type of insn it is.
1493 In that case, the call to recog via validate_change will
1494 fail. So don't substitute constants here. Instead,
1495 do it when we emit the following insn.
1497 For example, see the pyr.md file. That machine has signed and
1498 unsigned compares. The compare patterns must check the
1499 following branch insn to see which what kind of compare to
1500 emit.
1502 If the previous insn set CC0, substitute constants on it as
1503 well. */
1504 if (sets_cc0_p (PATTERN (copy)) != 0)
1505 cc0_insn = copy;
1506 else
1508 if (cc0_insn)
1509 try_constants (cc0_insn, map);
1510 cc0_insn = 0;
1511 try_constants (copy, map);
1513 #else
1514 try_constants (copy, map);
1515 #endif
1516 INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
1517 break;
1519 case JUMP_INSN:
1520 if (map->integrating && returnjump_p (insn))
1522 if (map->local_return_label == 0)
1523 map->local_return_label = gen_label_rtx ();
1524 pattern = gen_jump (map->local_return_label);
1526 else
1527 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1529 copy = emit_jump_insn (pattern);
1531 #ifdef HAVE_cc0
1532 if (cc0_insn)
1533 try_constants (cc0_insn, map);
1534 cc0_insn = 0;
1535 #endif
1536 try_constants (copy, map);
1537 INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
1539 /* If this used to be a conditional jump insn but whose branch
1540 direction is now know, we must do something special. */
1541 if (any_condjump_p (insn) && onlyjump_p (insn) && map->last_pc_value)
1543 #ifdef HAVE_cc0
1544 /* If the previous insn set cc0 for us, delete it. */
1545 if (only_sets_cc0_p (PREV_INSN (copy)))
1546 delete_related_insns (PREV_INSN (copy));
1547 #endif
1549 /* If this is now a no-op, delete it. */
1550 if (map->last_pc_value == pc_rtx)
1552 delete_related_insns (copy);
1553 copy = 0;
1555 else
1556 /* Otherwise, this is unconditional jump so we must put a
1557 BARRIER after it. We could do some dead code elimination
1558 here, but jump.c will do it just as well. */
1559 emit_barrier ();
1561 break;
1563 case CALL_INSN:
1564 /* If this is a CALL_PLACEHOLDER insn then we need to copy the
1565 three attached sequences: normal call, sibling call and tail
1566 recursion. */
1567 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1569 rtx sequence[3];
1570 rtx tail_label;
1572 for (i = 0; i < 3; i++)
1574 rtx seq;
1576 sequence[i] = NULL_RTX;
1577 seq = XEXP (PATTERN (insn), i);
1578 if (seq)
1580 start_sequence ();
1581 copy_insn_list (seq, map, static_chain_value);
1582 sequence[i] = get_insns ();
1583 end_sequence ();
1587 /* Find the new tail recursion label.
1588 It will already be substituted into sequence[2]. */
1589 tail_label = copy_rtx_and_substitute (XEXP (PATTERN (insn), 3),
1590 map, 0);
1592 copy = emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode,
1593 sequence[0],
1594 sequence[1],
1595 sequence[2],
1596 tail_label));
1597 break;
1600 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1601 copy = emit_call_insn (pattern);
1603 SIBLING_CALL_P (copy) = SIBLING_CALL_P (insn);
1604 CONST_OR_PURE_CALL_P (copy) = CONST_OR_PURE_CALL_P (insn);
1605 INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
1607 /* Because the USAGE information potentially contains objects other
1608 than hard registers, we need to copy it. */
1610 CALL_INSN_FUNCTION_USAGE (copy)
1611 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1612 map, 0);
1614 #ifdef HAVE_cc0
1615 if (cc0_insn)
1616 try_constants (cc0_insn, map);
1617 cc0_insn = 0;
1618 #endif
1619 try_constants (copy, map);
1621 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1622 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1623 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1624 break;
1626 case CODE_LABEL:
1627 copy = emit_label (get_label_from_map (map,
1628 CODE_LABEL_NUMBER (insn)));
1629 LABEL_NAME (copy) = LABEL_NAME (insn);
1630 map->const_age++;
1631 break;
1633 case BARRIER:
1634 copy = emit_barrier ();
1635 break;
1637 case NOTE:
1638 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)
1640 copy = emit_label (get_label_from_map (map,
1641 CODE_LABEL_NUMBER (insn)));
1642 LABEL_NAME (copy) = NOTE_SOURCE_FILE (insn);
1643 map->const_age++;
1644 break;
1647 /* NOTE_INSN_FUNCTION_END and NOTE_INSN_FUNCTION_BEG are
1648 discarded because it is important to have only one of
1649 each in the current function.
1651 NOTE_INSN_DELETED notes aren't useful. */
1653 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1654 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1655 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1657 copy = emit_note_copy (insn);
1658 if (!copy)
1659 /*Copied a line note, but line numbering is off*/;
1660 else if ((NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
1661 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
1662 && NOTE_BLOCK (insn))
1664 tree *mapped_block_p;
1666 mapped_block_p
1667 = (tree *) bsearch (NOTE_BLOCK (insn),
1668 &VARRAY_TREE (map->block_map, 0),
1669 map->block_map->elements_used,
1670 sizeof (tree),
1671 find_block);
1673 if (!mapped_block_p)
1674 abort ();
1675 else
1676 NOTE_BLOCK (copy) = *mapped_block_p;
1678 else if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EXPECTED_VALUE)
1679 NOTE_EXPECTED_VALUE (copy)
1680 = copy_rtx_and_substitute (NOTE_EXPECTED_VALUE (insn),
1681 map, 0);
1683 else
1684 copy = 0;
1685 break;
1687 default:
1688 abort ();
1691 if (copy)
1692 RTX_INTEGRATED_P (copy) = 1;
1694 map->insn_map[INSN_UID (insn)] = copy;
1698 /* Copy the REG_NOTES. Increment const_age, so that only constants
1699 from parameters can be substituted in. These are the only ones
1700 that are valid across the entire function. */
1702 static void
1703 copy_insn_notes (rtx insns, struct inline_remap *map, int eh_region_offset)
1705 rtx insn, new_insn;
1707 map->const_age++;
1708 for (insn = insns; insn; insn = NEXT_INSN (insn))
1710 if (! INSN_P (insn))
1711 continue;
1713 new_insn = map->insn_map[INSN_UID (insn)];
1714 if (! new_insn)
1715 continue;
1717 if (REG_NOTES (insn))
1719 rtx next, note = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
1721 /* We must also do subst_constants, in case one of our parameters
1722 has const type and constant value. */
1723 subst_constants (&note, NULL_RTX, map, 0);
1724 apply_change_group ();
1725 REG_NOTES (new_insn) = note;
1727 /* Delete any REG_LABEL notes from the chain. Remap any
1728 REG_EH_REGION notes. */
1729 for (; note; note = next)
1731 next = XEXP (note, 1);
1732 if (REG_NOTE_KIND (note) == REG_LABEL)
1733 remove_note (new_insn, note);
1734 else if (REG_NOTE_KIND (note) == REG_EH_REGION
1735 && INTVAL (XEXP (note, 0)) > 0)
1736 XEXP (note, 0) = GEN_INT (INTVAL (XEXP (note, 0))
1737 + eh_region_offset);
1741 if (GET_CODE (insn) == CALL_INSN
1742 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1744 int i;
1745 for (i = 0; i < 3; i++)
1746 copy_insn_notes (XEXP (PATTERN (insn), i), map, eh_region_offset);
1749 if (GET_CODE (insn) == JUMP_INSN
1750 && GET_CODE (PATTERN (insn)) == RESX)
1751 XINT (PATTERN (new_insn), 0) += eh_region_offset;
1755 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1756 push all of those decls and give each one the corresponding home. */
1758 static void
1759 integrate_parm_decls (tree args, struct inline_remap *map, rtvec arg_vector)
1761 tree tail;
1762 int i;
1764 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1766 tree decl = copy_decl_for_inlining (tail, map->fndecl,
1767 current_function_decl);
1768 rtx new_decl_rtl
1769 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
1771 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1772 here, but that's going to require some more work. */
1773 /* DECL_INCOMING_RTL (decl) = ?; */
1774 /* Fully instantiate the address with the equivalent form so that the
1775 debugging information contains the actual register, instead of the
1776 virtual register. Do this by not passing an insn to
1777 subst_constants. */
1778 subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
1779 apply_change_group ();
1780 SET_DECL_RTL (decl, new_decl_rtl);
1784 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1785 current function a tree of contexts isomorphic to the one that is given.
1787 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1788 registers used in the DECL_RTL field should be remapped. If it is zero,
1789 no mapping is necessary. */
1791 static tree
1792 integrate_decl_tree (tree let, struct inline_remap *map)
1794 tree t;
1795 tree new_block;
1796 tree *next;
1798 new_block = make_node (BLOCK);
1799 VARRAY_PUSH_TREE (map->block_map, new_block);
1800 next = &BLOCK_VARS (new_block);
1802 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1804 tree d;
1806 d = copy_decl_for_inlining (t, map->fndecl, current_function_decl);
1808 if (DECL_RTL_SET_P (t))
1810 rtx r;
1812 SET_DECL_RTL (d, copy_rtx_and_substitute (DECL_RTL (t), map, 1));
1814 /* Fully instantiate the address with the equivalent form so that the
1815 debugging information contains the actual register, instead of the
1816 virtual register. Do this by not passing an insn to
1817 subst_constants. */
1818 r = DECL_RTL (d);
1819 subst_constants (&r, NULL_RTX, map, 1);
1820 SET_DECL_RTL (d, r);
1822 apply_change_group ();
1825 /* Add this declaration to the list of variables in the new
1826 block. */
1827 *next = d;
1828 next = &TREE_CHAIN (d);
1831 next = &BLOCK_SUBBLOCKS (new_block);
1832 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1834 *next = integrate_decl_tree (t, map);
1835 BLOCK_SUPERCONTEXT (*next) = new_block;
1836 next = &BLOCK_CHAIN (*next);
1839 TREE_USED (new_block) = TREE_USED (let);
1840 BLOCK_ABSTRACT_ORIGIN (new_block) = let;
1842 return new_block;
1845 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1846 except for those few rtx codes that are sharable.
1848 We always return an rtx that is similar to that incoming rtx, with the
1849 exception of possibly changing a REG to a SUBREG or vice versa. No
1850 rtl is ever emitted.
1852 If FOR_LHS is nonzero, if means we are processing something that will
1853 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1854 inlining since we need to be conservative in how it is set for
1855 such cases.
1857 Handle constants that need to be placed in the constant pool by
1858 calling `force_const_mem'. */
1861 copy_rtx_and_substitute (rtx orig, struct inline_remap *map, int for_lhs)
1863 rtx copy, temp;
1864 int i, j;
1865 RTX_CODE code;
1866 enum machine_mode mode;
1867 const char *format_ptr;
1868 int regno;
1870 if (orig == 0)
1871 return 0;
1873 code = GET_CODE (orig);
1874 mode = GET_MODE (orig);
1876 switch (code)
1878 case REG:
1879 /* If the stack pointer register shows up, it must be part of
1880 stack-adjustments (*not* because we eliminated the frame pointer!).
1881 Small hard registers are returned as-is. Pseudo-registers
1882 go through their `reg_map'. */
1883 regno = REGNO (orig);
1884 if (regno <= LAST_VIRTUAL_REGISTER
1885 || (map->integrating
1886 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1888 /* Some hard registers are also mapped,
1889 but others are not translated. */
1890 if (map->reg_map[regno] != 0)
1891 return map->reg_map[regno];
1893 /* If this is the virtual frame pointer, make space in current
1894 function's stack frame for the stack frame of the inline function.
1896 Copy the address of this area into a pseudo. Map
1897 virtual_stack_vars_rtx to this pseudo and set up a constant
1898 equivalence for it to be the address. This will substitute the
1899 address into insns where it can be substituted and use the new
1900 pseudo where it can't. */
1901 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
1903 rtx loc, seq;
1904 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1905 #ifdef FRAME_GROWS_DOWNWARD
1906 int alignment
1907 = (DECL_SAVED_INSNS (map->fndecl)->stack_alignment_needed
1908 / BITS_PER_UNIT);
1910 /* In this case, virtual_stack_vars_rtx points to one byte
1911 higher than the top of the frame area. So make sure we
1912 allocate a big enough chunk to keep the frame pointer
1913 aligned like a real one. */
1914 if (alignment)
1915 size = CEIL_ROUND (size, alignment);
1916 #endif
1917 start_sequence ();
1918 loc = assign_stack_temp (BLKmode, size, 1);
1919 loc = XEXP (loc, 0);
1920 #ifdef FRAME_GROWS_DOWNWARD
1921 /* In this case, virtual_stack_vars_rtx points to one byte
1922 higher than the top of the frame area. So compute the offset
1923 to one byte higher than our substitute frame. */
1924 loc = plus_constant (loc, size);
1925 #endif
1926 map->reg_map[regno] = temp
1927 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1929 #ifdef STACK_BOUNDARY
1930 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1931 #endif
1933 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1935 seq = get_insns ();
1936 end_sequence ();
1937 emit_insn_after (seq, map->insns_at_start);
1938 return temp;
1940 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1941 || (map->integrating
1942 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1943 == orig)))
1945 /* Do the same for a block to contain any arguments referenced
1946 in memory. */
1947 rtx loc, seq;
1948 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1950 start_sequence ();
1951 loc = assign_stack_temp (BLKmode, size, 1);
1952 loc = XEXP (loc, 0);
1953 /* When arguments grow downward, the virtual incoming
1954 args pointer points to the top of the argument block,
1955 so the remapped location better do the same. */
1956 #ifdef ARGS_GROW_DOWNWARD
1957 loc = plus_constant (loc, size);
1958 #endif
1959 map->reg_map[regno] = temp
1960 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1962 #ifdef STACK_BOUNDARY
1963 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1964 #endif
1966 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1968 seq = get_insns ();
1969 end_sequence ();
1970 emit_insn_after (seq, map->insns_at_start);
1971 return temp;
1973 else if (REG_FUNCTION_VALUE_P (orig))
1975 /* This is a reference to the function return value. If
1976 the function doesn't have a return value, error. If the
1977 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
1978 if (map->inline_target == 0)
1980 if (rtx_equal_function_value_matters)
1981 /* This is an ignored return value. We must not
1982 leave it in with REG_FUNCTION_VALUE_P set, since
1983 that would confuse subsequent inlining of the
1984 current function into a later function. */
1985 return gen_rtx_REG (GET_MODE (orig), regno);
1986 else
1987 /* Must be unrolling loops or replicating code if we
1988 reach here, so return the register unchanged. */
1989 return orig;
1991 else if (GET_MODE (map->inline_target) != BLKmode
1992 && mode != GET_MODE (map->inline_target))
1993 return gen_lowpart (mode, map->inline_target);
1994 else
1995 return map->inline_target;
1997 #if defined (LEAF_REGISTERS) && defined (LEAF_REG_REMAP)
1998 /* If leaf_renumber_regs_insn() might remap this register to
1999 some other number, make sure we don't share it with the
2000 inlined function, otherwise delayed optimization of the
2001 inlined function may change it in place, breaking our
2002 reference to it. We may still shared it within the
2003 function, so create an entry for this register in the
2004 reg_map. */
2005 if (map->integrating && regno < FIRST_PSEUDO_REGISTER
2006 && LEAF_REGISTERS[regno] && LEAF_REG_REMAP (regno) != regno)
2008 if (!map->leaf_reg_map[regno][mode])
2009 map->leaf_reg_map[regno][mode] = gen_rtx_REG (mode, regno);
2010 return map->leaf_reg_map[regno][mode];
2012 #endif
2013 else
2014 return orig;
2016 abort ();
2018 if (map->reg_map[regno] == NULL)
2020 map->reg_map[regno] = gen_reg_rtx (mode);
2021 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2022 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2023 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2024 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2026 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
2027 mark_reg_pointer (map->reg_map[regno],
2028 map->regno_pointer_align[regno]);
2030 return map->reg_map[regno];
2032 case SUBREG:
2033 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
2034 return simplify_gen_subreg (GET_MODE (orig), copy,
2035 GET_MODE (SUBREG_REG (orig)),
2036 SUBREG_BYTE (orig));
2038 case ADDRESSOF:
2039 copy = gen_rtx_ADDRESSOF (mode,
2040 copy_rtx_and_substitute (XEXP (orig, 0),
2041 map, for_lhs),
2042 0, ADDRESSOF_DECL (orig));
2043 regno = ADDRESSOF_REGNO (orig);
2044 if (map->reg_map[regno])
2045 regno = REGNO (map->reg_map[regno]);
2046 else if (regno > LAST_VIRTUAL_REGISTER)
2048 temp = XEXP (orig, 0);
2049 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2050 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2051 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2052 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2053 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2055 /* Objects may initially be represented as registers, but
2056 but turned into a MEM if their address is taken by
2057 put_var_into_stack. Therefore, the register table may have
2058 entries which are MEMs.
2060 We briefly tried to clear such entries, but that ended up
2061 cascading into many changes due to the optimizers not being
2062 prepared for empty entries in the register table. So we've
2063 decided to allow the MEMs in the register table for now. */
2064 if (REG_P (map->x_regno_reg_rtx[regno])
2065 && REG_POINTER (map->x_regno_reg_rtx[regno]))
2066 mark_reg_pointer (map->reg_map[regno],
2067 map->regno_pointer_align[regno]);
2068 regno = REGNO (map->reg_map[regno]);
2070 ADDRESSOF_REGNO (copy) = regno;
2071 return copy;
2073 case USE:
2074 case CLOBBER:
2075 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2076 to (use foo) if the original insn didn't have a subreg.
2077 Removing the subreg distorts the VAX movstrhi pattern
2078 by changing the mode of an operand. */
2079 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
2080 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2081 copy = SUBREG_REG (copy);
2082 return gen_rtx_fmt_e (code, VOIDmode, copy);
2084 /* We need to handle "deleted" labels that appear in the DECL_RTL
2085 of a LABEL_DECL. */
2086 case NOTE:
2087 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
2088 break;
2090 /* ... FALLTHRU ... */
2091 case CODE_LABEL:
2092 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
2093 = LABEL_PRESERVE_P (orig);
2094 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
2096 case LABEL_REF:
2097 copy
2098 = gen_rtx_LABEL_REF
2099 (mode,
2100 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2101 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
2103 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2105 /* The fact that this label was previously nonlocal does not mean
2106 it still is, so we must check if it is within the range of
2107 this function's labels. */
2108 LABEL_REF_NONLOCAL_P (copy)
2109 = (LABEL_REF_NONLOCAL_P (orig)
2110 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2111 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2113 /* If we have made a nonlocal label local, it means that this
2114 inlined call will be referring to our nonlocal goto handler.
2115 So make sure we create one for this block; we normally would
2116 not since this is not otherwise considered a "call". */
2117 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2118 function_call_count++;
2120 return copy;
2122 case PC:
2123 case CC0:
2124 case CONST_INT:
2125 case CONST_VECTOR:
2126 return orig;
2128 case SYMBOL_REF:
2129 /* Symbols which represent the address of a label stored in the constant
2130 pool must be modified to point to a constant pool entry for the
2131 remapped label. Otherwise, symbols are returned unchanged. */
2132 if (CONSTANT_POOL_ADDRESS_P (orig))
2134 struct function *f = inlining ? inlining : cfun;
2135 rtx constant = get_pool_constant_for_function (f, orig);
2136 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
2137 if (inlining)
2139 rtx temp = force_const_mem (const_mode,
2140 copy_rtx_and_substitute (constant,
2141 map, 0));
2143 #if 0
2144 /* Legitimizing the address here is incorrect.
2146 Since we had a SYMBOL_REF before, we can assume it is valid
2147 to have one in this position in the insn.
2149 Also, change_address may create new registers. These
2150 registers will not have valid reg_map entries. This can
2151 cause try_constants() to fail because assumes that all
2152 registers in the rtx have valid reg_map entries, and it may
2153 end up replacing one of these new registers with junk. */
2155 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2156 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2157 #endif
2159 temp = XEXP (temp, 0);
2160 temp = convert_memory_address (GET_MODE (orig), temp);
2161 return temp;
2163 else if (GET_CODE (constant) == LABEL_REF)
2164 return XEXP (force_const_mem
2165 (GET_MODE (orig),
2166 copy_rtx_and_substitute (constant, map, for_lhs)),
2169 else if (TREE_CONSTANT_POOL_ADDRESS_P (orig) && inlining)
2170 notice_rtl_inlining_of_deferred_constant ();
2172 return orig;
2174 case CONST_DOUBLE:
2175 /* We have to make a new copy of this CONST_DOUBLE because don't want
2176 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2177 duplicate of a CONST_DOUBLE we have already seen. */
2178 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2180 REAL_VALUE_TYPE d;
2182 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2183 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2185 else
2186 return immed_double_const (CONST_DOUBLE_LOW (orig),
2187 CONST_DOUBLE_HIGH (orig), VOIDmode);
2189 case CONST:
2190 /* Make new constant pool entry for a constant
2191 that was in the pool of the inline function. */
2192 if (RTX_INTEGRATED_P (orig))
2193 abort ();
2194 break;
2196 case ASM_OPERANDS:
2197 /* If a single asm insn contains multiple output operands then
2198 it contains multiple ASM_OPERANDS rtx's that share the input
2199 and constraint vecs. We must make sure that the copied insn
2200 continues to share it. */
2201 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
2203 copy = rtx_alloc (ASM_OPERANDS);
2204 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2205 PUT_MODE (copy, GET_MODE (orig));
2206 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
2207 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
2208 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
2209 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
2210 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
2211 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
2212 = map->copy_asm_constraints_vector;
2213 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
2214 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
2215 return copy;
2217 break;
2219 case CALL:
2220 /* This is given special treatment because the first
2221 operand of a CALL is a (MEM ...) which may get
2222 forced into a register for cse. This is undesirable
2223 if function-address cse isn't wanted or if we won't do cse. */
2224 #ifndef NO_FUNCTION_CSE
2225 if (! (optimize && ! flag_no_function_cse))
2226 #endif
2228 rtx copy
2229 = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2230 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2231 map, 0));
2233 MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
2235 return
2236 gen_rtx_CALL (GET_MODE (orig), copy,
2237 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
2239 break;
2241 #if 0
2242 /* Must be ifdefed out for loop unrolling to work. */
2243 case RETURN:
2244 abort ();
2245 #endif
2247 case SET:
2248 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2249 Adjust the setting by the offset of the area we made.
2250 If the nonlocal goto is into the current function,
2251 this will result in unnecessarily bad code, but should work. */
2252 if (SET_DEST (orig) == virtual_stack_vars_rtx
2253 || SET_DEST (orig) == virtual_incoming_args_rtx)
2255 /* In case a translation hasn't occurred already, make one now. */
2256 rtx equiv_reg;
2257 rtx equiv_loc;
2258 HOST_WIDE_INT loc_offset;
2260 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
2261 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2262 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
2263 REGNO (equiv_reg)).rtx;
2264 loc_offset
2265 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2267 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2268 force_operand
2269 (plus_constant
2270 (copy_rtx_and_substitute (SET_SRC (orig),
2271 map, 0),
2272 - loc_offset),
2273 NULL_RTX));
2275 else
2276 return gen_rtx_SET (VOIDmode,
2277 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
2278 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
2279 break;
2281 case MEM:
2282 if (inlining
2283 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
2284 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
2286 enum machine_mode const_mode
2287 = get_pool_mode_for_function (inlining, XEXP (orig, 0));
2288 rtx constant
2289 = get_pool_constant_for_function (inlining, XEXP (orig, 0));
2291 constant = copy_rtx_and_substitute (constant, map, 0);
2293 /* If this was an address of a constant pool entry that itself
2294 had to be placed in the constant pool, it might not be a
2295 valid address. So the recursive call might have turned it
2296 into a register. In that case, it isn't a constant any
2297 more, so return it. This has the potential of changing a
2298 MEM into a REG, but we'll assume that it safe. */
2299 if (! CONSTANT_P (constant))
2300 return constant;
2302 return validize_mem (force_const_mem (const_mode, constant));
2305 copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
2306 map, 0));
2307 MEM_COPY_ATTRIBUTES (copy, orig);
2309 /* If inlining and this is not for the LHS, turn off RTX_UNCHANGING_P
2310 since this may be an indirect reference to a parameter and the
2311 actual may not be readonly. */
2312 if (inlining && !for_lhs)
2313 RTX_UNCHANGING_P (copy) = 0;
2315 /* If inlining, squish aliasing data that references the subroutine's
2316 parameter list, since that's no longer applicable. */
2317 if (inlining && MEM_EXPR (copy)
2318 && TREE_CODE (MEM_EXPR (copy)) == INDIRECT_REF
2319 && TREE_CODE (TREE_OPERAND (MEM_EXPR (copy), 0)) == PARM_DECL)
2320 set_mem_expr (copy, NULL_TREE);
2322 return copy;
2324 default:
2325 break;
2328 copy = rtx_alloc (code);
2329 PUT_MODE (copy, mode);
2330 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2331 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2332 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2334 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2336 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2338 switch (*format_ptr++)
2340 case '0':
2341 X0ANY (copy, i) = X0ANY (orig, i);
2342 break;
2344 case 'e':
2345 XEXP (copy, i)
2346 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
2347 break;
2349 case 'u':
2350 /* Change any references to old-insns to point to the
2351 corresponding copied insns. */
2352 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2353 break;
2355 case 'E':
2356 XVEC (copy, i) = XVEC (orig, i);
2357 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2359 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2360 for (j = 0; j < XVECLEN (copy, i); j++)
2361 XVECEXP (copy, i, j)
2362 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
2363 map, for_lhs);
2365 break;
2367 case 'w':
2368 XWINT (copy, i) = XWINT (orig, i);
2369 break;
2371 case 'i':
2372 XINT (copy, i) = XINT (orig, i);
2373 break;
2375 case 's':
2376 XSTR (copy, i) = XSTR (orig, i);
2377 break;
2379 case 't':
2380 XTREE (copy, i) = XTREE (orig, i);
2381 break;
2383 default:
2384 abort ();
2388 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2390 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
2391 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
2392 map->copy_asm_constraints_vector
2393 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
2396 return copy;
2399 /* Substitute known constant values into INSN, if that is valid. */
2401 void
2402 try_constants (rtx insn, struct inline_remap *map)
2404 int i;
2406 map->num_sets = 0;
2408 /* First try just updating addresses, then other things. This is
2409 important when we have something like the store of a constant
2410 into memory and we can update the memory address but the machine
2411 does not support a constant source. */
2412 subst_constants (&PATTERN (insn), insn, map, 1);
2413 apply_change_group ();
2414 subst_constants (&PATTERN (insn), insn, map, 0);
2415 apply_change_group ();
2417 /* Enforce consistency between the addresses in the regular insn flow
2418 and the ones in CALL_INSN_FUNCTION_USAGE lists, if any. */
2419 if (GET_CODE (insn) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (insn))
2421 subst_constants (&CALL_INSN_FUNCTION_USAGE (insn), insn, map, 1);
2422 apply_change_group ();
2425 /* Show we don't know the value of anything stored or clobbered. */
2426 note_stores (PATTERN (insn), mark_stores, NULL);
2427 map->last_pc_value = 0;
2428 #ifdef HAVE_cc0
2429 map->last_cc0_value = 0;
2430 #endif
2432 /* Set up any constant equivalences made in this insn. */
2433 for (i = 0; i < map->num_sets; i++)
2435 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2437 int regno = REGNO (map->equiv_sets[i].dest);
2439 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2440 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2441 /* Following clause is a hack to make case work where GNU C++
2442 reassigns a variable to make cse work right. */
2443 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2444 regno).rtx,
2445 map->equiv_sets[i].equiv))
2446 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2447 map->equiv_sets[i].equiv, map->const_age);
2449 else if (map->equiv_sets[i].dest == pc_rtx)
2450 map->last_pc_value = map->equiv_sets[i].equiv;
2451 #ifdef HAVE_cc0
2452 else if (map->equiv_sets[i].dest == cc0_rtx)
2453 map->last_cc0_value = map->equiv_sets[i].equiv;
2454 #endif
2458 /* Substitute known constants for pseudo regs in the contents of LOC,
2459 which are part of INSN.
2460 If INSN is zero, the substitution should always be done (this is used to
2461 update DECL_RTL).
2462 These changes are taken out by try_constants if the result is not valid.
2464 Note that we are more concerned with determining when the result of a SET
2465 is a constant, for further propagation, than actually inserting constants
2466 into insns; cse will do the latter task better.
2468 This function is also used to adjust address of items previously addressed
2469 via the virtual stack variable or virtual incoming arguments registers.
2471 If MEMONLY is nonzero, only make changes inside a MEM. */
2473 static void
2474 subst_constants (rtx *loc, rtx insn, struct inline_remap *map, int memonly)
2476 rtx x = *loc;
2477 int i, j;
2478 enum rtx_code code;
2479 const char *format_ptr;
2480 int num_changes = num_validated_changes ();
2481 rtx new = 0;
2482 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2484 code = GET_CODE (x);
2486 switch (code)
2488 case PC:
2489 case CONST_INT:
2490 case CONST_DOUBLE:
2491 case CONST_VECTOR:
2492 case SYMBOL_REF:
2493 case CONST:
2494 case LABEL_REF:
2495 case ADDRESS:
2496 return;
2498 #ifdef HAVE_cc0
2499 case CC0:
2500 if (! memonly)
2501 validate_change (insn, loc, map->last_cc0_value, 1);
2502 return;
2503 #endif
2505 case USE:
2506 case CLOBBER:
2507 /* The only thing we can do with a USE or CLOBBER is possibly do
2508 some substitutions in a MEM within it. */
2509 if (GET_CODE (XEXP (x, 0)) == MEM)
2510 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
2511 return;
2513 case REG:
2514 /* Substitute for parms and known constants. Don't replace
2515 hard regs used as user variables with constants. */
2516 if (! memonly)
2518 int regno = REGNO (x);
2519 struct const_equiv_data *p;
2521 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2522 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2523 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2524 p->rtx != 0)
2525 && p->age >= map->const_age)
2526 validate_change (insn, loc, p->rtx, 1);
2528 return;
2530 case SUBREG:
2531 /* SUBREG applied to something other than a reg
2532 should be treated as ordinary, since that must
2533 be a special hack and we don't know how to treat it specially.
2534 Consider for example mulsidi3 in m68k.md.
2535 Ordinary SUBREG of a REG needs this special treatment. */
2536 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
2538 rtx inner = SUBREG_REG (x);
2539 rtx new = 0;
2541 /* We can't call subst_constants on &SUBREG_REG (x) because any
2542 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2543 see what is inside, try to form the new SUBREG and see if that is
2544 valid. We handle two cases: extracting a full word in an
2545 integral mode and extracting the low part. */
2546 subst_constants (&inner, NULL_RTX, map, 0);
2547 new = simplify_gen_subreg (GET_MODE (x), inner,
2548 GET_MODE (SUBREG_REG (x)),
2549 SUBREG_BYTE (x));
2551 if (new)
2552 validate_change (insn, loc, new, 1);
2553 else
2554 cancel_changes (num_changes);
2556 return;
2558 break;
2560 case MEM:
2561 subst_constants (&XEXP (x, 0), insn, map, 0);
2563 /* If a memory address got spoiled, change it back. */
2564 if (! memonly && insn != 0 && num_validated_changes () != num_changes
2565 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2566 cancel_changes (num_changes);
2567 return;
2569 case SET:
2571 /* Substitute constants in our source, and in any arguments to a
2572 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2573 itself. */
2574 rtx *dest_loc = &SET_DEST (x);
2575 rtx dest = *dest_loc;
2576 rtx src, tem;
2577 enum machine_mode compare_mode = VOIDmode;
2579 /* If SET_SRC is a COMPARE which subst_constants would turn into
2580 COMPARE of 2 VOIDmode constants, note the mode in which comparison
2581 is to be done. */
2582 if (GET_CODE (SET_SRC (x)) == COMPARE)
2584 src = SET_SRC (x);
2585 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2586 || CC0_P (dest))
2588 compare_mode = GET_MODE (XEXP (src, 0));
2589 if (compare_mode == VOIDmode)
2590 compare_mode = GET_MODE (XEXP (src, 1));
2594 subst_constants (&SET_SRC (x), insn, map, memonly);
2595 src = SET_SRC (x);
2597 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2598 || GET_CODE (*dest_loc) == SUBREG
2599 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2601 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2603 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2604 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
2606 dest_loc = &XEXP (*dest_loc, 0);
2609 /* Do substitute in the address of a destination in memory. */
2610 if (GET_CODE (*dest_loc) == MEM)
2611 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
2613 /* Check for the case of DEST a SUBREG, both it and the underlying
2614 register are less than one word, and the SUBREG has the wider mode.
2615 In the case, we are really setting the underlying register to the
2616 source converted to the mode of DEST. So indicate that. */
2617 if (GET_CODE (dest) == SUBREG
2618 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2619 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2620 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2621 <= GET_MODE_SIZE (GET_MODE (dest)))
2622 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2623 src)))
2624 src = tem, dest = SUBREG_REG (dest);
2626 /* If storing a recognizable value save it for later recording. */
2627 if ((map->num_sets < MAX_RECOG_OPERANDS)
2628 && (CONSTANT_P (src)
2629 || (GET_CODE (src) == REG
2630 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2631 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2632 || (GET_CODE (src) == PLUS
2633 && GET_CODE (XEXP (src, 0)) == REG
2634 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2635 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2636 && CONSTANT_P (XEXP (src, 1)))
2637 || GET_CODE (src) == COMPARE
2638 || CC0_P (dest)
2639 || (dest == pc_rtx
2640 && (src == pc_rtx || GET_CODE (src) == RETURN
2641 || GET_CODE (src) == LABEL_REF))))
2643 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2644 it will cause us to save the COMPARE with any constants
2645 substituted, which is what we want for later. */
2646 rtx src_copy = copy_rtx (src);
2647 map->equiv_sets[map->num_sets].equiv = src_copy;
2648 map->equiv_sets[map->num_sets++].dest = dest;
2649 if (compare_mode != VOIDmode
2650 && GET_CODE (src) == COMPARE
2651 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2652 || CC0_P (dest))
2653 && GET_MODE (XEXP (src, 0)) == VOIDmode
2654 && GET_MODE (XEXP (src, 1)) == VOIDmode)
2656 map->compare_src = src_copy;
2657 map->compare_mode = compare_mode;
2661 return;
2663 default:
2664 break;
2667 format_ptr = GET_RTX_FORMAT (code);
2669 /* If the first operand is an expression, save its mode for later. */
2670 if (*format_ptr == 'e')
2671 op0_mode = GET_MODE (XEXP (x, 0));
2673 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2675 switch (*format_ptr++)
2677 case '0':
2678 break;
2680 case 'e':
2681 if (XEXP (x, i))
2682 subst_constants (&XEXP (x, i), insn, map, memonly);
2683 break;
2685 case 'u':
2686 case 'i':
2687 case 's':
2688 case 'w':
2689 case 'n':
2690 case 't':
2691 case 'B':
2692 break;
2694 case 'E':
2695 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2696 for (j = 0; j < XVECLEN (x, i); j++)
2697 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2699 break;
2701 default:
2702 abort ();
2706 /* If this is a commutative operation, move a constant to the second
2707 operand unless the second operand is already a CONST_INT. */
2708 if (! memonly
2709 && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2710 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2712 rtx tem = XEXP (x, 0);
2713 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2714 validate_change (insn, &XEXP (x, 1), tem, 1);
2717 /* Simplify the expression in case we put in some constants. */
2718 if (! memonly)
2719 switch (GET_RTX_CLASS (code))
2721 case '1':
2722 if (op0_mode == MAX_MACHINE_MODE)
2723 abort ();
2724 new = simplify_unary_operation (code, GET_MODE (x),
2725 XEXP (x, 0), op0_mode);
2726 break;
2728 case '<':
2730 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2732 if (op_mode == VOIDmode)
2733 op_mode = GET_MODE (XEXP (x, 1));
2734 new = simplify_relational_operation (code, op_mode,
2735 XEXP (x, 0), XEXP (x, 1));
2736 #ifdef FLOAT_STORE_FLAG_VALUE
2737 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2739 enum machine_mode mode = GET_MODE (x);
2740 if (new == const0_rtx)
2741 new = CONST0_RTX (mode);
2742 else
2744 REAL_VALUE_TYPE val;
2746 /* Avoid automatic aggregate initialization. */
2747 val = FLOAT_STORE_FLAG_VALUE (mode);
2748 new = CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
2751 #endif
2752 break;
2755 case '2':
2756 case 'c':
2757 new = simplify_binary_operation (code, GET_MODE (x),
2758 XEXP (x, 0), XEXP (x, 1));
2759 break;
2761 case 'b':
2762 case '3':
2763 if (op0_mode == MAX_MACHINE_MODE)
2764 abort ();
2766 if (code == IF_THEN_ELSE)
2768 rtx op0 = XEXP (x, 0);
2770 if (GET_RTX_CLASS (GET_CODE (op0)) == '<'
2771 && GET_MODE (op0) == VOIDmode
2772 && ! side_effects_p (op0)
2773 && XEXP (op0, 0) == map->compare_src
2774 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
2776 /* We have compare of two VOIDmode constants for which
2777 we recorded the comparison mode. */
2778 rtx temp =
2779 simplify_relational_operation (GET_CODE (op0),
2780 map->compare_mode,
2781 XEXP (op0, 0),
2782 XEXP (op0, 1));
2784 if (temp == const0_rtx)
2785 new = XEXP (x, 2);
2786 else if (temp == const1_rtx)
2787 new = XEXP (x, 1);
2790 if (!new)
2791 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2792 XEXP (x, 0), XEXP (x, 1),
2793 XEXP (x, 2));
2794 break;
2797 if (new)
2798 validate_change (insn, loc, new, 1);
2801 /* Show that register modified no longer contain known constants. We are
2802 called from note_stores with parts of the new insn. */
2804 static void
2805 mark_stores (rtx dest, rtx x ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
2807 int regno = -1;
2808 enum machine_mode mode = VOIDmode;
2810 /* DEST is always the innermost thing set, except in the case of
2811 SUBREGs of hard registers. */
2813 if (GET_CODE (dest) == REG)
2814 regno = REGNO (dest), mode = GET_MODE (dest);
2815 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2817 regno = REGNO (SUBREG_REG (dest));
2818 if (regno < FIRST_PSEUDO_REGISTER)
2819 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
2820 GET_MODE (SUBREG_REG (dest)),
2821 SUBREG_BYTE (dest),
2822 GET_MODE (dest));
2823 mode = GET_MODE (SUBREG_REG (dest));
2826 if (regno >= 0)
2828 unsigned int uregno = regno;
2829 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
2830 : uregno + HARD_REGNO_NREGS (uregno, mode) - 1);
2831 unsigned int i;
2833 /* Ignore virtual stack var or virtual arg register since those
2834 are handled separately. */
2835 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
2836 && uregno != VIRTUAL_STACK_VARS_REGNUM)
2837 for (i = uregno; i <= last_reg; i++)
2838 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2839 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2843 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2844 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2845 that it points to the node itself, thus indicating that the node is its
2846 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2847 the given node is NULL, recursively descend the decl/block tree which
2848 it is the root of, and for each other ..._DECL or BLOCK node contained
2849 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2850 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2851 values to point to themselves. */
2853 static void
2854 set_block_origin_self (tree stmt)
2856 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2858 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2861 tree local_decl;
2863 for (local_decl = BLOCK_VARS (stmt);
2864 local_decl != NULL_TREE;
2865 local_decl = TREE_CHAIN (local_decl))
2866 set_decl_origin_self (local_decl); /* Potential recursion. */
2870 tree subblock;
2872 for (subblock = BLOCK_SUBBLOCKS (stmt);
2873 subblock != NULL_TREE;
2874 subblock = BLOCK_CHAIN (subblock))
2875 set_block_origin_self (subblock); /* Recurse. */
2880 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2881 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2882 node to so that it points to the node itself, thus indicating that the
2883 node represents its own (abstract) origin. Additionally, if the
2884 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2885 the decl/block tree of which the given node is the root of, and for
2886 each other ..._DECL or BLOCK node contained therein whose
2887 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2888 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2889 point to themselves. */
2891 void
2892 set_decl_origin_self (tree decl)
2894 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2896 DECL_ABSTRACT_ORIGIN (decl) = decl;
2897 if (TREE_CODE (decl) == FUNCTION_DECL)
2899 tree arg;
2901 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2902 DECL_ABSTRACT_ORIGIN (arg) = arg;
2903 if (DECL_INITIAL (decl) != NULL_TREE
2904 && DECL_INITIAL (decl) != error_mark_node)
2905 set_block_origin_self (DECL_INITIAL (decl));
2910 /* Given a pointer to some BLOCK node, and a boolean value to set the
2911 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2912 the given block, and for all local decls and all local sub-blocks
2913 (recursively) which are contained therein. */
2915 static void
2916 set_block_abstract_flags (tree stmt, int setting)
2918 tree local_decl;
2919 tree subblock;
2921 BLOCK_ABSTRACT (stmt) = setting;
2923 for (local_decl = BLOCK_VARS (stmt);
2924 local_decl != NULL_TREE;
2925 local_decl = TREE_CHAIN (local_decl))
2926 set_decl_abstract_flags (local_decl, setting);
2928 for (subblock = BLOCK_SUBBLOCKS (stmt);
2929 subblock != NULL_TREE;
2930 subblock = BLOCK_CHAIN (subblock))
2931 set_block_abstract_flags (subblock, setting);
2934 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2935 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2936 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2937 set the abstract flags for all of the parameters, local vars, local
2938 blocks and sub-blocks (recursively) to the same setting. */
2940 void
2941 set_decl_abstract_flags (tree decl, int setting)
2943 DECL_ABSTRACT (decl) = setting;
2944 if (TREE_CODE (decl) == FUNCTION_DECL)
2946 tree arg;
2948 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2949 DECL_ABSTRACT (arg) = setting;
2950 if (DECL_INITIAL (decl) != NULL_TREE
2951 && DECL_INITIAL (decl) != error_mark_node)
2952 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2956 /* Output the assembly language code for the function FNDECL
2957 from its DECL_SAVED_INSNS. Used for inline functions that are output
2958 at end of compilation instead of where they came in the source. */
2960 static GTY(()) struct function *old_cfun;
2962 void
2963 output_inline_function (tree fndecl)
2965 enum debug_info_type old_write_symbols = write_symbols;
2966 const struct gcc_debug_hooks *const old_debug_hooks = debug_hooks;
2967 struct function *f = DECL_SAVED_INSNS (fndecl);
2969 old_cfun = cfun;
2970 cfun = f;
2971 current_function_decl = fndecl;
2973 set_new_last_label_num (f->inl_max_label_num);
2975 /* We're not deferring this any longer. */
2976 DECL_DEFER_OUTPUT (fndecl) = 0;
2978 /* If requested, suppress debugging information. */
2979 if (f->no_debugging_symbols)
2981 write_symbols = NO_DEBUG;
2982 debug_hooks = &do_nothing_debug_hooks;
2985 /* Make sure warnings emitted by the optimizers (e.g. control reaches
2986 end of non-void function) is not wildly incorrect. */
2987 input_location = DECL_SOURCE_LOCATION (fndecl);
2989 /* Compile this function all the way down to assembly code. As a
2990 side effect this destroys the saved RTL representation, but
2991 that's okay, because we don't need to inline this anymore. */
2992 rest_of_compilation (fndecl);
2993 DECL_INLINE (fndecl) = 0;
2995 cfun = old_cfun;
2996 current_function_decl = old_cfun ? old_cfun->decl : 0;
2997 write_symbols = old_write_symbols;
2998 debug_hooks = old_debug_hooks;
3002 /* Functions to keep track of the values hard regs had at the start of
3003 the function. */
3006 get_hard_reg_initial_reg (struct function *fun, rtx reg)
3008 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3009 int i;
3011 if (ivs == 0)
3012 return NULL_RTX;
3014 for (i = 0; i < ivs->num_entries; i++)
3015 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
3016 return ivs->entries[i].hard_reg;
3018 return NULL_RTX;
3022 has_func_hard_reg_initial_val (struct function *fun, rtx reg)
3024 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3025 int i;
3027 if (ivs == 0)
3028 return NULL_RTX;
3030 for (i = 0; i < ivs->num_entries; i++)
3031 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
3032 return ivs->entries[i].pseudo;
3034 return NULL_RTX;
3038 get_func_hard_reg_initial_val (struct function *fun, rtx reg)
3040 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3041 rtx rv = has_func_hard_reg_initial_val (fun, reg);
3043 if (rv)
3044 return rv;
3046 if (ivs == 0)
3048 fun->hard_reg_initial_vals = ggc_alloc (sizeof (initial_value_struct));
3049 ivs = fun->hard_reg_initial_vals;
3050 ivs->num_entries = 0;
3051 ivs->max_entries = 5;
3052 ivs->entries = ggc_alloc (5 * sizeof (initial_value_pair));
3055 if (ivs->num_entries >= ivs->max_entries)
3057 ivs->max_entries += 5;
3058 ivs->entries = ggc_realloc (ivs->entries,
3059 ivs->max_entries
3060 * sizeof (initial_value_pair));
3063 ivs->entries[ivs->num_entries].hard_reg = reg;
3064 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
3066 return ivs->entries[ivs->num_entries++].pseudo;
3070 get_hard_reg_initial_val (enum machine_mode mode, int regno)
3072 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3076 has_hard_reg_initial_val (enum machine_mode mode, int regno)
3078 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3081 static void
3082 setup_initial_hard_reg_value_integration (struct function *inl_f, struct inline_remap *remap)
3084 struct initial_value_struct *ivs = inl_f->hard_reg_initial_vals;
3085 int i;
3087 if (ivs == 0)
3088 return;
3090 for (i = 0; i < ivs->num_entries; i ++)
3091 remap->reg_map[REGNO (ivs->entries[i].pseudo)]
3092 = get_func_hard_reg_initial_val (cfun, ivs->entries[i].hard_reg);
3096 void
3097 emit_initial_value_sets (void)
3099 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3100 int i;
3101 rtx seq;
3103 if (ivs == 0)
3104 return;
3106 start_sequence ();
3107 for (i = 0; i < ivs->num_entries; i++)
3108 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
3109 seq = get_insns ();
3110 end_sequence ();
3112 emit_insn_after (seq, get_insns ());
3115 /* If the backend knows where to allocate pseudos for hard
3116 register initial values, register these allocations now. */
3117 void
3118 allocate_initial_values (rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED)
3120 #ifdef ALLOCATE_INITIAL_VALUE
3121 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3122 int i;
3124 if (ivs == 0)
3125 return;
3127 for (i = 0; i < ivs->num_entries; i++)
3129 int regno = REGNO (ivs->entries[i].pseudo);
3130 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
3132 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
3133 ; /* Do nothing. */
3134 else if (GET_CODE (x) == MEM)
3135 reg_equiv_memory_loc[regno] = x;
3136 else if (GET_CODE (x) == REG)
3138 reg_renumber[regno] = REGNO (x);
3139 /* Poke the regno right into regno_reg_rtx
3140 so that even fixed regs are accepted. */
3141 REGNO (ivs->entries[i].pseudo) = REGNO (x);
3143 else abort ();
3145 #endif
3148 #include "gt-integrate.h"