1 /* Procedure integration for GCC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "coretypes.h"
34 #include "insn-config.h"
38 #include "integrate.h"
47 #include "langhooks.h"
49 /* Round to the next highest integer that meets the alignment. */
50 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
53 /* Private type used by {get/has}_func_hard_reg_initial_val. */
54 typedef struct initial_value_pair
GTY(()) {
58 typedef struct initial_value_struct
GTY(()) {
61 initial_value_pair
* GTY ((length ("%h.num_entries"))) entries
;
62 } initial_value_struct
;
64 static void subst_constants (rtx
*, rtx
, struct inline_remap
*, int);
65 static void set_block_origin_self (tree
);
66 static void set_block_abstract_flags (tree
, int);
67 static void mark_stores (rtx
, rtx
, void *);
69 /* Returns the Ith entry in the label_map contained in MAP. If the
70 Ith entry has not yet been set, return a fresh label. This function
71 performs a lazy initialization of label_map, thereby avoiding huge memory
72 explosions when the label_map gets very large. */
75 get_label_from_map (struct inline_remap
*map
, int i
)
77 rtx x
= map
->label_map
[i
];
80 x
= map
->label_map
[i
] = gen_label_rtx ();
85 /* Return false if the function FNDECL cannot be inlined on account of its
86 attributes, true otherwise. */
88 function_attribute_inlinable_p (tree fndecl
)
90 if (targetm
.attribute_table
)
94 for (a
= DECL_ATTRIBUTES (fndecl
); a
; a
= TREE_CHAIN (a
))
96 tree name
= TREE_PURPOSE (a
);
99 for (i
= 0; targetm
.attribute_table
[i
].name
!= NULL
; i
++)
100 if (is_attribute_p (targetm
.attribute_table
[i
].name
, name
))
101 return targetm
.function_attribute_inlinable_p (fndecl
);
108 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
109 originally was in the FROM_FN, but now it will be in the
113 copy_decl_for_inlining (tree decl
, tree from_fn
, tree to_fn
)
117 /* Copy the declaration. */
118 if (TREE_CODE (decl
) == PARM_DECL
|| TREE_CODE (decl
) == RESULT_DECL
)
123 /* See if the frontend wants to pass this by invisible reference. */
124 if (TREE_CODE (decl
) == PARM_DECL
125 && DECL_ARG_TYPE (decl
) != TREE_TYPE (decl
)
126 && POINTER_TYPE_P (DECL_ARG_TYPE (decl
))
127 && TREE_TYPE (DECL_ARG_TYPE (decl
)) == TREE_TYPE (decl
))
130 type
= DECL_ARG_TYPE (decl
);
133 type
= TREE_TYPE (decl
);
135 /* For a parameter, we must make an equivalent VAR_DECL, not a
137 copy
= build_decl (VAR_DECL
, DECL_NAME (decl
), type
);
140 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
141 TREE_READONLY (copy
) = TREE_READONLY (decl
);
142 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
146 TREE_ADDRESSABLE (copy
) = 0;
147 TREE_READONLY (copy
) = 1;
148 TREE_THIS_VOLATILE (copy
) = 0;
153 copy
= copy_node (decl
);
154 /* The COPY is not abstract; it will be generated in TO_FN. */
155 DECL_ABSTRACT (copy
) = 0;
156 lang_hooks
.dup_lang_specific_decl (copy
);
158 /* TREE_ADDRESSABLE isn't used to indicate that a label's
159 address has been taken; it's for internal bookkeeping in
160 expand_goto_internal. */
161 if (TREE_CODE (copy
) == LABEL_DECL
)
163 TREE_ADDRESSABLE (copy
) = 0;
164 DECL_TOO_LATE (copy
) = 0;
168 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
169 declaration inspired this copy. */
170 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (decl
);
172 /* The new variable/label has no RTL, yet. */
173 if (!TREE_STATIC (copy
) && !DECL_EXTERNAL (copy
))
174 SET_DECL_RTL (copy
, NULL_RTX
);
176 /* These args would always appear unused, if not for this. */
177 TREE_USED (copy
) = 1;
179 /* Set the context for the new declaration. */
180 if (!DECL_CONTEXT (decl
))
181 /* Globals stay global. */
183 else if (DECL_CONTEXT (decl
) != from_fn
)
184 /* Things that weren't in the scope of the function we're inlining
185 from aren't in the scope we're inlining to, either. */
187 else if (TREE_STATIC (decl
))
188 /* Function-scoped static variables should stay in the original
192 /* Ordinary automatic local variables are now in the scope of the
194 DECL_CONTEXT (copy
) = to_fn
;
199 /* Unfortunately, we need a global copy of const_equiv map for communication
200 with a function called from note_stores. Be *very* careful that this
201 is used properly in the presence of recursion. */
203 varray_type global_const_equiv_varray
;
205 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
206 except for those few rtx codes that are sharable.
208 We always return an rtx that is similar to that incoming rtx, with the
209 exception of possibly changing a REG to a SUBREG or vice versa. No
212 If FOR_LHS is nonzero, if means we are processing something that will
213 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
214 inlining since we need to be conservative in how it is set for
217 Handle constants that need to be placed in the constant pool by
218 calling `force_const_mem'. */
221 copy_rtx_and_substitute (rtx orig
, struct inline_remap
*map
, int for_lhs
)
226 enum machine_mode mode
;
227 const char *format_ptr
;
233 code
= GET_CODE (orig
);
234 mode
= GET_MODE (orig
);
239 /* If the stack pointer register shows up, it must be part of
240 stack-adjustments (*not* because we eliminated the frame pointer!).
241 Small hard registers are returned as-is. Pseudo-registers
242 go through their `reg_map'. */
243 regno
= REGNO (orig
);
244 if (regno
<= LAST_VIRTUAL_REGISTER
)
246 /* Some hard registers are also mapped,
247 but others are not translated. */
248 if (map
->reg_map
[regno
] != 0)
249 return map
->reg_map
[regno
];
251 /* If this is the virtual frame pointer, make space in current
252 function's stack frame for the stack frame of the inline function.
254 Copy the address of this area into a pseudo. Map
255 virtual_stack_vars_rtx to this pseudo and set up a constant
256 equivalence for it to be the address. This will substitute the
257 address into insns where it can be substituted and use the new
258 pseudo where it can't. */
259 else if (regno
== VIRTUAL_STACK_VARS_REGNUM
)
263 = get_func_frame_size (DECL_STRUCT_FUNCTION (map
->fndecl
));
264 #ifdef FRAME_GROWS_DOWNWARD
266 = (DECL_STRUCT_FUNCTION (map
->fndecl
)->stack_alignment_needed
269 /* In this case, virtual_stack_vars_rtx points to one byte
270 higher than the top of the frame area. So make sure we
271 allocate a big enough chunk to keep the frame pointer
272 aligned like a real one. */
274 size
= CEIL_ROUND (size
, alignment
);
277 loc
= assign_stack_temp (BLKmode
, size
, 1);
279 #ifdef FRAME_GROWS_DOWNWARD
280 /* In this case, virtual_stack_vars_rtx points to one byte
281 higher than the top of the frame area. So compute the offset
282 to one byte higher than our substitute frame. */
283 loc
= plus_constant (loc
, size
);
285 map
->reg_map
[regno
] = temp
286 = force_reg (Pmode
, force_operand (loc
, NULL_RTX
));
288 #ifdef STACK_BOUNDARY
289 mark_reg_pointer (map
->reg_map
[regno
], STACK_BOUNDARY
);
292 SET_CONST_EQUIV_DATA (map
, temp
, loc
, CONST_AGE_PARM
);
296 emit_insn_after (seq
, map
->insns_at_start
);
299 else if (regno
== VIRTUAL_INCOMING_ARGS_REGNUM
)
301 /* Do the same for a block to contain any arguments referenced
304 int size
= DECL_STRUCT_FUNCTION (map
->fndecl
)->args_size
;
307 loc
= assign_stack_temp (BLKmode
, size
, 1);
309 /* When arguments grow downward, the virtual incoming
310 args pointer points to the top of the argument block,
311 so the remapped location better do the same. */
312 #ifdef ARGS_GROW_DOWNWARD
313 loc
= plus_constant (loc
, size
);
315 map
->reg_map
[regno
] = temp
316 = force_reg (Pmode
, force_operand (loc
, NULL_RTX
));
318 #ifdef STACK_BOUNDARY
319 mark_reg_pointer (map
->reg_map
[regno
], STACK_BOUNDARY
);
322 SET_CONST_EQUIV_DATA (map
, temp
, loc
, CONST_AGE_PARM
);
326 emit_insn_after (seq
, map
->insns_at_start
);
329 else if (REG_FUNCTION_VALUE_P (orig
))
331 if (rtx_equal_function_value_matters
)
332 /* This is an ignored return value. We must not
333 leave it in with REG_FUNCTION_VALUE_P set, since
334 that would confuse subsequent inlining of the
335 current function into a later function. */
336 return gen_rtx_REG (GET_MODE (orig
), regno
);
338 /* Must be unrolling loops or replicating code if we
339 reach here, so return the register unchanged. */
347 if (map
->reg_map
[regno
] == NULL
)
349 map
->reg_map
[regno
] = gen_reg_rtx (mode
);
350 REG_USERVAR_P (map
->reg_map
[regno
]) = REG_USERVAR_P (orig
);
351 REG_LOOP_TEST_P (map
->reg_map
[regno
]) = REG_LOOP_TEST_P (orig
);
352 RTX_UNCHANGING_P (map
->reg_map
[regno
]) = RTX_UNCHANGING_P (orig
);
353 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
355 if (REG_POINTER (map
->x_regno_reg_rtx
[regno
]))
356 mark_reg_pointer (map
->reg_map
[regno
],
357 map
->regno_pointer_align
[regno
]);
359 return map
->reg_map
[regno
];
362 copy
= copy_rtx_and_substitute (SUBREG_REG (orig
), map
, for_lhs
);
363 return simplify_gen_subreg (GET_MODE (orig
), copy
,
364 GET_MODE (SUBREG_REG (orig
)),
368 copy
= gen_rtx_ADDRESSOF (mode
,
369 copy_rtx_and_substitute (XEXP (orig
, 0),
371 0, ADDRESSOF_DECL (orig
));
372 regno
= ADDRESSOF_REGNO (orig
);
373 if (map
->reg_map
[regno
])
374 regno
= REGNO (map
->reg_map
[regno
]);
375 else if (regno
> LAST_VIRTUAL_REGISTER
)
377 temp
= XEXP (orig
, 0);
378 map
->reg_map
[regno
] = gen_reg_rtx (GET_MODE (temp
));
379 REG_USERVAR_P (map
->reg_map
[regno
]) = REG_USERVAR_P (temp
);
380 REG_LOOP_TEST_P (map
->reg_map
[regno
]) = REG_LOOP_TEST_P (temp
);
381 RTX_UNCHANGING_P (map
->reg_map
[regno
]) = RTX_UNCHANGING_P (temp
);
382 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
384 /* Objects may initially be represented as registers, but
385 but turned into a MEM if their address is taken by
386 put_var_into_stack. Therefore, the register table may have
387 entries which are MEMs.
389 We briefly tried to clear such entries, but that ended up
390 cascading into many changes due to the optimizers not being
391 prepared for empty entries in the register table. So we've
392 decided to allow the MEMs in the register table for now. */
393 if (REG_P (map
->x_regno_reg_rtx
[regno
])
394 && REG_POINTER (map
->x_regno_reg_rtx
[regno
]))
395 mark_reg_pointer (map
->reg_map
[regno
],
396 map
->regno_pointer_align
[regno
]);
397 regno
= REGNO (map
->reg_map
[regno
]);
399 ADDRESSOF_REGNO (copy
) = regno
;
404 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
405 to (use foo) if the original insn didn't have a subreg.
406 Removing the subreg distorts the VAX movstrhi pattern
407 by changing the mode of an operand. */
408 copy
= copy_rtx_and_substitute (XEXP (orig
, 0), map
, code
== CLOBBER
);
409 if (GET_CODE (copy
) == SUBREG
&& GET_CODE (XEXP (orig
, 0)) != SUBREG
)
410 copy
= SUBREG_REG (copy
);
411 return gen_rtx_fmt_e (code
, VOIDmode
, copy
);
413 /* We need to handle "deleted" labels that appear in the DECL_RTL
416 if (NOTE_LINE_NUMBER (orig
) != NOTE_INSN_DELETED_LABEL
)
421 LABEL_PRESERVE_P (get_label_from_map (map
, CODE_LABEL_NUMBER (orig
)))
422 = LABEL_PRESERVE_P (orig
);
423 return get_label_from_map (map
, CODE_LABEL_NUMBER (orig
));
429 LABEL_REF_NONLOCAL_P (orig
) ? XEXP (orig
, 0)
430 : get_label_from_map (map
, CODE_LABEL_NUMBER (XEXP (orig
, 0))));
432 LABEL_OUTSIDE_LOOP_P (copy
) = LABEL_OUTSIDE_LOOP_P (orig
);
434 /* The fact that this label was previously nonlocal does not mean
435 it still is, so we must check if it is within the range of
436 this function's labels. */
437 LABEL_REF_NONLOCAL_P (copy
)
438 = (LABEL_REF_NONLOCAL_P (orig
)
439 && ! (CODE_LABEL_NUMBER (XEXP (copy
, 0)) >= get_first_label_num ()
440 && CODE_LABEL_NUMBER (XEXP (copy
, 0)) < max_label_num ()));
451 /* Symbols which represent the address of a label stored in the constant
452 pool must be modified to point to a constant pool entry for the
453 remapped label. Otherwise, symbols are returned unchanged. */
454 if (CONSTANT_POOL_ADDRESS_P (orig
))
456 struct function
*f
= cfun
;
457 rtx constant
= get_pool_constant_for_function (f
, orig
);
458 if (GET_CODE (constant
) == LABEL_REF
)
459 return XEXP (force_const_mem
461 copy_rtx_and_substitute (constant
, map
, for_lhs
)),
467 /* We have to make a new copy of this CONST_DOUBLE because don't want
468 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
469 duplicate of a CONST_DOUBLE we have already seen. */
470 if (GET_MODE_CLASS (GET_MODE (orig
)) == MODE_FLOAT
)
474 REAL_VALUE_FROM_CONST_DOUBLE (d
, orig
);
475 return CONST_DOUBLE_FROM_REAL_VALUE (d
, GET_MODE (orig
));
478 return immed_double_const (CONST_DOUBLE_LOW (orig
),
479 CONST_DOUBLE_HIGH (orig
), VOIDmode
);
485 /* If a single asm insn contains multiple output operands then
486 it contains multiple ASM_OPERANDS rtx's that share the input
487 and constraint vecs. We must make sure that the copied insn
488 continues to share it. */
489 if (map
->orig_asm_operands_vector
== ASM_OPERANDS_INPUT_VEC (orig
))
491 copy
= rtx_alloc (ASM_OPERANDS
);
492 RTX_FLAG (copy
, volatil
) = RTX_FLAG (orig
, volatil
);
493 PUT_MODE (copy
, GET_MODE (orig
));
494 ASM_OPERANDS_TEMPLATE (copy
) = ASM_OPERANDS_TEMPLATE (orig
);
495 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy
)
496 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig
);
497 ASM_OPERANDS_OUTPUT_IDX (copy
) = ASM_OPERANDS_OUTPUT_IDX (orig
);
498 ASM_OPERANDS_INPUT_VEC (copy
) = map
->copy_asm_operands_vector
;
499 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy
)
500 = map
->copy_asm_constraints_vector
;
501 #ifdef USE_MAPPED_LOCATION
502 ASM_OPERANDS_SOURCE_LOCATION (copy
)
503 = ASM_OPERANDS_SOURCE_LOCATION (orig
);
505 ASM_OPERANDS_SOURCE_FILE (copy
) = ASM_OPERANDS_SOURCE_FILE (orig
);
506 ASM_OPERANDS_SOURCE_LINE (copy
) = ASM_OPERANDS_SOURCE_LINE (orig
);
513 /* This is given special treatment because the first
514 operand of a CALL is a (MEM ...) which may get
515 forced into a register for cse. This is undesirable
516 if function-address cse isn't wanted or if we won't do cse. */
517 #ifndef NO_FUNCTION_CSE
518 if (! (optimize
&& ! flag_no_function_cse
))
522 = gen_rtx_MEM (GET_MODE (XEXP (orig
, 0)),
523 copy_rtx_and_substitute (XEXP (XEXP (orig
, 0), 0),
526 MEM_COPY_ATTRIBUTES (copy
, XEXP (orig
, 0));
529 gen_rtx_CALL (GET_MODE (orig
), copy
,
530 copy_rtx_and_substitute (XEXP (orig
, 1), map
, 0));
535 /* Must be ifdefed out for loop unrolling to work. */
536 /* ??? Is this for the old or the new unroller? */
542 /* If this is setting fp or ap, it means that we have a nonlocal goto.
543 Adjust the setting by the offset of the area we made.
544 If the nonlocal goto is into the current function,
545 this will result in unnecessarily bad code, but should work. */
546 if (SET_DEST (orig
) == virtual_stack_vars_rtx
547 || SET_DEST (orig
) == virtual_incoming_args_rtx
)
549 /* In case a translation hasn't occurred already, make one now. */
552 HOST_WIDE_INT loc_offset
;
554 copy_rtx_and_substitute (SET_DEST (orig
), map
, for_lhs
);
555 equiv_reg
= map
->reg_map
[REGNO (SET_DEST (orig
))];
556 equiv_loc
= VARRAY_CONST_EQUIV (map
->const_equiv_varray
,
557 REGNO (equiv_reg
)).rtx
;
559 = REG_P (equiv_loc
) ? 0 : INTVAL (XEXP (equiv_loc
, 1));
561 return gen_rtx_SET (VOIDmode
, SET_DEST (orig
),
564 (copy_rtx_and_substitute (SET_SRC (orig
),
570 return gen_rtx_SET (VOIDmode
,
571 copy_rtx_and_substitute (SET_DEST (orig
), map
, 1),
572 copy_rtx_and_substitute (SET_SRC (orig
), map
, 0));
576 copy
= gen_rtx_MEM (mode
, copy_rtx_and_substitute (XEXP (orig
, 0),
578 MEM_COPY_ATTRIBUTES (copy
, orig
);
585 copy
= rtx_alloc (code
);
586 PUT_MODE (copy
, mode
);
587 RTX_FLAG (copy
, in_struct
) = RTX_FLAG (orig
, in_struct
);
588 RTX_FLAG (copy
, volatil
) = RTX_FLAG (orig
, volatil
);
589 RTX_FLAG (copy
, unchanging
) = RTX_FLAG (orig
, unchanging
);
591 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
593 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
595 switch (*format_ptr
++)
598 X0ANY (copy
, i
) = X0ANY (orig
, i
);
603 = copy_rtx_and_substitute (XEXP (orig
, i
), map
, for_lhs
);
607 /* Change any references to old-insns to point to the
608 corresponding copied insns. */
609 XEXP (copy
, i
) = map
->insn_map
[INSN_UID (XEXP (orig
, i
))];
613 XVEC (copy
, i
) = XVEC (orig
, i
);
614 if (XVEC (orig
, i
) != NULL
&& XVECLEN (orig
, i
) != 0)
616 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
617 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
619 = copy_rtx_and_substitute (XVECEXP (orig
, i
, j
),
625 XWINT (copy
, i
) = XWINT (orig
, i
);
629 XINT (copy
, i
) = XINT (orig
, i
);
633 XSTR (copy
, i
) = XSTR (orig
, i
);
637 XTREE (copy
, i
) = XTREE (orig
, i
);
645 if (code
== ASM_OPERANDS
&& map
->orig_asm_operands_vector
== 0)
647 map
->orig_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (orig
);
648 map
->copy_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (copy
);
649 map
->copy_asm_constraints_vector
650 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy
);
656 /* Substitute known constant values into INSN, if that is valid. */
659 try_constants (rtx insn
, struct inline_remap
*map
)
665 /* First try just updating addresses, then other things. This is
666 important when we have something like the store of a constant
667 into memory and we can update the memory address but the machine
668 does not support a constant source. */
669 subst_constants (&PATTERN (insn
), insn
, map
, 1);
670 apply_change_group ();
671 subst_constants (&PATTERN (insn
), insn
, map
, 0);
672 apply_change_group ();
674 /* Enforce consistency between the addresses in the regular insn flow
675 and the ones in CALL_INSN_FUNCTION_USAGE lists, if any. */
676 if (GET_CODE (insn
) == CALL_INSN
&& CALL_INSN_FUNCTION_USAGE (insn
))
678 subst_constants (&CALL_INSN_FUNCTION_USAGE (insn
), insn
, map
, 1);
679 apply_change_group ();
682 /* Show we don't know the value of anything stored or clobbered. */
683 note_stores (PATTERN (insn
), mark_stores
, NULL
);
684 map
->last_pc_value
= 0;
686 map
->last_cc0_value
= 0;
689 /* Set up any constant equivalences made in this insn. */
690 for (i
= 0; i
< map
->num_sets
; i
++)
692 if (REG_P (map
->equiv_sets
[i
].dest
))
694 int regno
= REGNO (map
->equiv_sets
[i
].dest
);
696 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map
, regno
);
697 if (VARRAY_CONST_EQUIV (map
->const_equiv_varray
, regno
).rtx
== 0
698 /* Following clause is a hack to make case work where GNU C++
699 reassigns a variable to make cse work right. */
700 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map
->const_equiv_varray
,
702 map
->equiv_sets
[i
].equiv
))
703 SET_CONST_EQUIV_DATA (map
, map
->equiv_sets
[i
].dest
,
704 map
->equiv_sets
[i
].equiv
, map
->const_age
);
706 else if (map
->equiv_sets
[i
].dest
== pc_rtx
)
707 map
->last_pc_value
= map
->equiv_sets
[i
].equiv
;
709 else if (map
->equiv_sets
[i
].dest
== cc0_rtx
)
710 map
->last_cc0_value
= map
->equiv_sets
[i
].equiv
;
715 /* Substitute known constants for pseudo regs in the contents of LOC,
716 which are part of INSN.
717 If INSN is zero, the substitution should always be done (this is used to
719 These changes are taken out by try_constants if the result is not valid.
721 Note that we are more concerned with determining when the result of a SET
722 is a constant, for further propagation, than actually inserting constants
723 into insns; cse will do the latter task better.
725 This function is also used to adjust address of items previously addressed
726 via the virtual stack variable or virtual incoming arguments registers.
728 If MEMONLY is nonzero, only make changes inside a MEM. */
731 subst_constants (rtx
*loc
, rtx insn
, struct inline_remap
*map
, int memonly
)
736 const char *format_ptr
;
737 int num_changes
= num_validated_changes ();
739 enum machine_mode op0_mode
= MAX_MACHINE_MODE
;
758 validate_change (insn
, loc
, map
->last_cc0_value
, 1);
764 /* The only thing we can do with a USE or CLOBBER is possibly do
765 some substitutions in a MEM within it. */
766 if (MEM_P (XEXP (x
, 0)))
767 subst_constants (&XEXP (XEXP (x
, 0), 0), insn
, map
, 0);
771 /* Substitute for parms and known constants. Don't replace
772 hard regs used as user variables with constants. */
775 int regno
= REGNO (x
);
776 struct const_equiv_data
*p
;
778 if (! (regno
< FIRST_PSEUDO_REGISTER
&& REG_USERVAR_P (x
))
779 && (size_t) regno
< VARRAY_SIZE (map
->const_equiv_varray
)
780 && (p
= &VARRAY_CONST_EQUIV (map
->const_equiv_varray
, regno
),
782 && p
->age
>= map
->const_age
)
783 validate_change (insn
, loc
, p
->rtx
, 1);
788 /* SUBREG applied to something other than a reg
789 should be treated as ordinary, since that must
790 be a special hack and we don't know how to treat it specially.
791 Consider for example mulsidi3 in m68k.md.
792 Ordinary SUBREG of a REG needs this special treatment. */
793 if (! memonly
&& REG_P (SUBREG_REG (x
)))
795 rtx inner
= SUBREG_REG (x
);
798 /* We can't call subst_constants on &SUBREG_REG (x) because any
799 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
800 see what is inside, try to form the new SUBREG and see if that is
801 valid. We handle two cases: extracting a full word in an
802 integral mode and extracting the low part. */
803 subst_constants (&inner
, NULL_RTX
, map
, 0);
804 new = simplify_gen_subreg (GET_MODE (x
), inner
,
805 GET_MODE (SUBREG_REG (x
)),
809 validate_change (insn
, loc
, new, 1);
811 cancel_changes (num_changes
);
818 subst_constants (&XEXP (x
, 0), insn
, map
, 0);
820 /* If a memory address got spoiled, change it back. */
821 if (! memonly
&& insn
!= 0 && num_validated_changes () != num_changes
822 && ! memory_address_p (GET_MODE (x
), XEXP (x
, 0)))
823 cancel_changes (num_changes
);
828 /* Substitute constants in our source, and in any arguments to a
829 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
831 rtx
*dest_loc
= &SET_DEST (x
);
832 rtx dest
= *dest_loc
;
834 enum machine_mode compare_mode
= VOIDmode
;
836 /* If SET_SRC is a COMPARE which subst_constants would turn into
837 COMPARE of 2 VOIDmode constants, note the mode in which comparison
839 if (GET_CODE (SET_SRC (x
)) == COMPARE
)
842 if (GET_MODE_CLASS (GET_MODE (src
)) == MODE_CC
845 compare_mode
= GET_MODE (XEXP (src
, 0));
846 if (compare_mode
== VOIDmode
)
847 compare_mode
= GET_MODE (XEXP (src
, 1));
851 subst_constants (&SET_SRC (x
), insn
, map
, memonly
);
854 while (GET_CODE (*dest_loc
) == ZERO_EXTRACT
855 || GET_CODE (*dest_loc
) == SUBREG
856 || GET_CODE (*dest_loc
) == STRICT_LOW_PART
)
858 if (GET_CODE (*dest_loc
) == ZERO_EXTRACT
)
860 subst_constants (&XEXP (*dest_loc
, 1), insn
, map
, memonly
);
861 subst_constants (&XEXP (*dest_loc
, 2), insn
, map
, memonly
);
863 dest_loc
= &XEXP (*dest_loc
, 0);
866 /* Do substitute in the address of a destination in memory. */
867 if (MEM_P (*dest_loc
))
868 subst_constants (&XEXP (*dest_loc
, 0), insn
, map
, 0);
870 /* Check for the case of DEST a SUBREG, both it and the underlying
871 register are less than one word, and the SUBREG has the wider mode.
872 In the case, we are really setting the underlying register to the
873 source converted to the mode of DEST. So indicate that. */
874 if (GET_CODE (dest
) == SUBREG
875 && GET_MODE_SIZE (GET_MODE (dest
)) <= UNITS_PER_WORD
876 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))) <= UNITS_PER_WORD
877 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
878 <= GET_MODE_SIZE (GET_MODE (dest
)))
879 && (tem
= gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest
)),
881 src
= tem
, dest
= SUBREG_REG (dest
);
883 /* If storing a recognizable value save it for later recording. */
884 if ((map
->num_sets
< MAX_RECOG_OPERANDS
)
887 && (REGNO (src
) == VIRTUAL_INCOMING_ARGS_REGNUM
888 || REGNO (src
) == VIRTUAL_STACK_VARS_REGNUM
))
889 || (GET_CODE (src
) == PLUS
890 && REG_P (XEXP (src
, 0))
891 && (REGNO (XEXP (src
, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
892 || REGNO (XEXP (src
, 0)) == VIRTUAL_STACK_VARS_REGNUM
)
893 && CONSTANT_P (XEXP (src
, 1)))
894 || GET_CODE (src
) == COMPARE
897 && (src
== pc_rtx
|| GET_CODE (src
) == RETURN
898 || GET_CODE (src
) == LABEL_REF
))))
900 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
901 it will cause us to save the COMPARE with any constants
902 substituted, which is what we want for later. */
903 rtx src_copy
= copy_rtx (src
);
904 map
->equiv_sets
[map
->num_sets
].equiv
= src_copy
;
905 map
->equiv_sets
[map
->num_sets
++].dest
= dest
;
906 if (compare_mode
!= VOIDmode
907 && GET_CODE (src
) == COMPARE
908 && (GET_MODE_CLASS (GET_MODE (src
)) == MODE_CC
910 && GET_MODE (XEXP (src
, 0)) == VOIDmode
911 && GET_MODE (XEXP (src
, 1)) == VOIDmode
)
913 map
->compare_src
= src_copy
;
914 map
->compare_mode
= compare_mode
;
924 format_ptr
= GET_RTX_FORMAT (code
);
926 /* If the first operand is an expression, save its mode for later. */
927 if (*format_ptr
== 'e')
928 op0_mode
= GET_MODE (XEXP (x
, 0));
930 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
932 switch (*format_ptr
++)
939 subst_constants (&XEXP (x
, i
), insn
, map
, memonly
);
952 if (XVEC (x
, i
) != NULL
&& XVECLEN (x
, i
) != 0)
953 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
954 subst_constants (&XVECEXP (x
, i
, j
), insn
, map
, memonly
);
963 /* If this is a commutative operation, move a constant to the second
964 operand unless the second operand is already a CONST_INT. */
966 && (GET_RTX_CLASS (code
) == RTX_COMM_ARITH
967 || GET_RTX_CLASS (code
) == RTX_COMM_COMPARE
)
968 && CONSTANT_P (XEXP (x
, 0)) && GET_CODE (XEXP (x
, 1)) != CONST_INT
)
970 rtx tem
= XEXP (x
, 0);
971 validate_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
972 validate_change (insn
, &XEXP (x
, 1), tem
, 1);
975 /* Simplify the expression in case we put in some constants. */
977 switch (GET_RTX_CLASS (code
))
980 if (op0_mode
== MAX_MACHINE_MODE
)
982 new = simplify_unary_operation (code
, GET_MODE (x
),
983 XEXP (x
, 0), op0_mode
);
987 case RTX_COMM_COMPARE
:
989 enum machine_mode op_mode
= GET_MODE (XEXP (x
, 0));
991 if (op_mode
== VOIDmode
)
992 op_mode
= GET_MODE (XEXP (x
, 1));
994 new = simplify_relational_operation (code
, GET_MODE (x
), op_mode
,
995 XEXP (x
, 0), XEXP (x
, 1));
1000 case RTX_COMM_ARITH
:
1001 new = simplify_binary_operation (code
, GET_MODE (x
),
1002 XEXP (x
, 0), XEXP (x
, 1));
1005 case RTX_BITFIELD_OPS
:
1007 if (op0_mode
== MAX_MACHINE_MODE
)
1010 if (code
== IF_THEN_ELSE
)
1012 rtx op0
= XEXP (x
, 0);
1014 if (COMPARISON_P (op0
)
1015 && GET_MODE (op0
) == VOIDmode
1016 && ! side_effects_p (op0
)
1017 && XEXP (op0
, 0) == map
->compare_src
1018 && GET_MODE (XEXP (op0
, 1)) == VOIDmode
)
1020 /* We have compare of two VOIDmode constants for which
1021 we recorded the comparison mode. */
1023 simplify_gen_relational (GET_CODE (op0
), GET_MODE (op0
),
1024 map
->compare_mode
, XEXP (op0
, 0),
1027 if (GET_CODE (tem
) != CONST_INT
)
1028 new = simplify_ternary_operation (code
, GET_MODE (x
),
1029 op0_mode
, tem
, XEXP (x
, 1),
1031 else if (tem
== const0_rtx
)
1038 new = simplify_ternary_operation (code
, GET_MODE (x
), op0_mode
,
1039 XEXP (x
, 0), XEXP (x
, 1),
1048 validate_change (insn
, loc
, new, 1);
1051 /* Show that register modified no longer contain known constants. We are
1052 called from note_stores with parts of the new insn. */
1055 mark_stores (rtx dest
, rtx x ATTRIBUTE_UNUSED
, void *data ATTRIBUTE_UNUSED
)
1058 enum machine_mode mode
= VOIDmode
;
1060 /* DEST is always the innermost thing set, except in the case of
1061 SUBREGs of hard registers. */
1064 regno
= REGNO (dest
), mode
= GET_MODE (dest
);
1065 else if (GET_CODE (dest
) == SUBREG
&& REG_P (SUBREG_REG (dest
)))
1067 regno
= REGNO (SUBREG_REG (dest
));
1068 if (regno
< FIRST_PSEUDO_REGISTER
)
1069 regno
+= subreg_regno_offset (REGNO (SUBREG_REG (dest
)),
1070 GET_MODE (SUBREG_REG (dest
)),
1073 mode
= GET_MODE (SUBREG_REG (dest
));
1078 unsigned int uregno
= regno
;
1079 unsigned int last_reg
= (uregno
>= FIRST_PSEUDO_REGISTER
? uregno
1080 : uregno
+ hard_regno_nregs
[uregno
][mode
] - 1);
1083 /* Ignore virtual stack var or virtual arg register since those
1084 are handled separately. */
1085 if (uregno
!= VIRTUAL_INCOMING_ARGS_REGNUM
1086 && uregno
!= VIRTUAL_STACK_VARS_REGNUM
)
1087 for (i
= uregno
; i
<= last_reg
; i
++)
1088 if ((size_t) i
< VARRAY_SIZE (global_const_equiv_varray
))
1089 VARRAY_CONST_EQUIV (global_const_equiv_varray
, i
).rtx
= 0;
1093 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
1094 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
1095 that it points to the node itself, thus indicating that the node is its
1096 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
1097 the given node is NULL, recursively descend the decl/block tree which
1098 it is the root of, and for each other ..._DECL or BLOCK node contained
1099 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
1100 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
1101 values to point to themselves. */
1104 set_block_origin_self (tree stmt
)
1106 if (BLOCK_ABSTRACT_ORIGIN (stmt
) == NULL_TREE
)
1108 BLOCK_ABSTRACT_ORIGIN (stmt
) = stmt
;
1113 for (local_decl
= BLOCK_VARS (stmt
);
1114 local_decl
!= NULL_TREE
;
1115 local_decl
= TREE_CHAIN (local_decl
))
1116 set_decl_origin_self (local_decl
); /* Potential recursion. */
1122 for (subblock
= BLOCK_SUBBLOCKS (stmt
);
1123 subblock
!= NULL_TREE
;
1124 subblock
= BLOCK_CHAIN (subblock
))
1125 set_block_origin_self (subblock
); /* Recurse. */
1130 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
1131 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
1132 node to so that it points to the node itself, thus indicating that the
1133 node represents its own (abstract) origin. Additionally, if the
1134 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
1135 the decl/block tree of which the given node is the root of, and for
1136 each other ..._DECL or BLOCK node contained therein whose
1137 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
1138 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
1139 point to themselves. */
1142 set_decl_origin_self (tree decl
)
1144 if (DECL_ABSTRACT_ORIGIN (decl
) == NULL_TREE
)
1146 DECL_ABSTRACT_ORIGIN (decl
) = decl
;
1147 if (TREE_CODE (decl
) == FUNCTION_DECL
)
1151 for (arg
= DECL_ARGUMENTS (decl
); arg
; arg
= TREE_CHAIN (arg
))
1152 DECL_ABSTRACT_ORIGIN (arg
) = arg
;
1153 if (DECL_INITIAL (decl
) != NULL_TREE
1154 && DECL_INITIAL (decl
) != error_mark_node
)
1155 set_block_origin_self (DECL_INITIAL (decl
));
1160 /* Given a pointer to some BLOCK node, and a boolean value to set the
1161 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
1162 the given block, and for all local decls and all local sub-blocks
1163 (recursively) which are contained therein. */
1166 set_block_abstract_flags (tree stmt
, int setting
)
1171 BLOCK_ABSTRACT (stmt
) = setting
;
1173 for (local_decl
= BLOCK_VARS (stmt
);
1174 local_decl
!= NULL_TREE
;
1175 local_decl
= TREE_CHAIN (local_decl
))
1176 set_decl_abstract_flags (local_decl
, setting
);
1178 for (subblock
= BLOCK_SUBBLOCKS (stmt
);
1179 subblock
!= NULL_TREE
;
1180 subblock
= BLOCK_CHAIN (subblock
))
1181 set_block_abstract_flags (subblock
, setting
);
1184 /* Given a pointer to some ..._DECL node, and a boolean value to set the
1185 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
1186 given decl, and (in the case where the decl is a FUNCTION_DECL) also
1187 set the abstract flags for all of the parameters, local vars, local
1188 blocks and sub-blocks (recursively) to the same setting. */
1191 set_decl_abstract_flags (tree decl
, int setting
)
1193 DECL_ABSTRACT (decl
) = setting
;
1194 if (TREE_CODE (decl
) == FUNCTION_DECL
)
1198 for (arg
= DECL_ARGUMENTS (decl
); arg
; arg
= TREE_CHAIN (arg
))
1199 DECL_ABSTRACT (arg
) = setting
;
1200 if (DECL_INITIAL (decl
) != NULL_TREE
1201 && DECL_INITIAL (decl
) != error_mark_node
)
1202 set_block_abstract_flags (DECL_INITIAL (decl
), setting
);
1206 /* Functions to keep track of the values hard regs had at the start of
1210 get_hard_reg_initial_reg (struct function
*fun
, rtx reg
)
1212 struct initial_value_struct
*ivs
= fun
->hard_reg_initial_vals
;
1218 for (i
= 0; i
< ivs
->num_entries
; i
++)
1219 if (rtx_equal_p (ivs
->entries
[i
].pseudo
, reg
))
1220 return ivs
->entries
[i
].hard_reg
;
1226 has_func_hard_reg_initial_val (struct function
*fun
, rtx reg
)
1228 struct initial_value_struct
*ivs
= fun
->hard_reg_initial_vals
;
1234 for (i
= 0; i
< ivs
->num_entries
; i
++)
1235 if (rtx_equal_p (ivs
->entries
[i
].hard_reg
, reg
))
1236 return ivs
->entries
[i
].pseudo
;
1242 get_func_hard_reg_initial_val (struct function
*fun
, rtx reg
)
1244 struct initial_value_struct
*ivs
= fun
->hard_reg_initial_vals
;
1245 rtx rv
= has_func_hard_reg_initial_val (fun
, reg
);
1252 fun
->hard_reg_initial_vals
= ggc_alloc (sizeof (initial_value_struct
));
1253 ivs
= fun
->hard_reg_initial_vals
;
1254 ivs
->num_entries
= 0;
1255 ivs
->max_entries
= 5;
1256 ivs
->entries
= ggc_alloc (5 * sizeof (initial_value_pair
));
1259 if (ivs
->num_entries
>= ivs
->max_entries
)
1261 ivs
->max_entries
+= 5;
1262 ivs
->entries
= ggc_realloc (ivs
->entries
,
1264 * sizeof (initial_value_pair
));
1267 ivs
->entries
[ivs
->num_entries
].hard_reg
= reg
;
1268 ivs
->entries
[ivs
->num_entries
].pseudo
= gen_reg_rtx (GET_MODE (reg
));
1270 return ivs
->entries
[ivs
->num_entries
++].pseudo
;
1274 get_hard_reg_initial_val (enum machine_mode mode
, int regno
)
1276 return get_func_hard_reg_initial_val (cfun
, gen_rtx_REG (mode
, regno
));
1280 has_hard_reg_initial_val (enum machine_mode mode
, int regno
)
1282 return has_func_hard_reg_initial_val (cfun
, gen_rtx_REG (mode
, regno
));
1286 emit_initial_value_sets (void)
1288 struct initial_value_struct
*ivs
= cfun
->hard_reg_initial_vals
;
1296 for (i
= 0; i
< ivs
->num_entries
; i
++)
1297 emit_move_insn (ivs
->entries
[i
].pseudo
, ivs
->entries
[i
].hard_reg
);
1301 emit_insn_after (seq
, entry_of_function ());
1304 /* If the backend knows where to allocate pseudos for hard
1305 register initial values, register these allocations now. */
1307 allocate_initial_values (rtx
*reg_equiv_memory_loc ATTRIBUTE_UNUSED
)
1309 #ifdef ALLOCATE_INITIAL_VALUE
1310 struct initial_value_struct
*ivs
= cfun
->hard_reg_initial_vals
;
1316 for (i
= 0; i
< ivs
->num_entries
; i
++)
1318 int regno
= REGNO (ivs
->entries
[i
].pseudo
);
1319 rtx x
= ALLOCATE_INITIAL_VALUE (ivs
->entries
[i
].hard_reg
);
1321 if (x
== NULL_RTX
|| REG_N_SETS (REGNO (ivs
->entries
[i
].pseudo
)) > 1)
1324 reg_equiv_memory_loc
[regno
] = x
;
1327 reg_renumber
[regno
] = REGNO (x
);
1328 /* Poke the regno right into regno_reg_rtx
1329 so that even fixed regs are accepted. */
1330 REGNO (ivs
->entries
[i
].pseudo
) = REGNO (x
);
1337 #include "gt-integrate.h"