* integrate.c (copy_decl_for_inlining): Remove invisible reference
[official-gcc.git] / gcc / integrate.c
blob2fa5164bec55cfe906ae0e9c6c4143b47fb9db8d
1 /* Procedure integration for GCC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "regs.h"
32 #include "flags.h"
33 #include "debug.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "output.h"
37 #include "recog.h"
38 #include "integrate.h"
39 #include "real.h"
40 #include "except.h"
41 #include "function.h"
42 #include "toplev.h"
43 #include "intl.h"
44 #include "params.h"
45 #include "ggc.h"
46 #include "target.h"
47 #include "langhooks.h"
49 /* Round to the next highest integer that meets the alignment. */
50 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
53 /* Private type used by {get/has}_func_hard_reg_initial_val. */
54 typedef struct initial_value_pair GTY(()) {
55 rtx hard_reg;
56 rtx pseudo;
57 } initial_value_pair;
58 typedef struct initial_value_struct GTY(()) {
59 int num_entries;
60 int max_entries;
61 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
62 } initial_value_struct;
64 static void subst_constants (rtx *, rtx, struct inline_remap *, int);
65 static void set_block_origin_self (tree);
66 static void set_block_abstract_flags (tree, int);
67 static void mark_stores (rtx, rtx, void *);
69 /* Returns the Ith entry in the label_map contained in MAP. If the
70 Ith entry has not yet been set, return a fresh label. This function
71 performs a lazy initialization of label_map, thereby avoiding huge memory
72 explosions when the label_map gets very large. */
74 rtx
75 get_label_from_map (struct inline_remap *map, int i)
77 rtx x = map->label_map[i];
79 if (x == NULL_RTX)
80 x = map->label_map[i] = gen_label_rtx ();
82 return x;
85 /* Return false if the function FNDECL cannot be inlined on account of its
86 attributes, true otherwise. */
87 bool
88 function_attribute_inlinable_p (tree fndecl)
90 if (targetm.attribute_table)
92 tree a;
94 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
96 tree name = TREE_PURPOSE (a);
97 int i;
99 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
100 if (is_attribute_p (targetm.attribute_table[i].name, name))
101 return targetm.function_attribute_inlinable_p (fndecl);
105 return true;
108 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
109 but now it will be in the TO_FN. */
111 tree
112 copy_decl_for_inlining (tree decl, tree from_fn, tree to_fn)
114 tree copy;
116 /* Copy the declaration. */
117 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
119 tree type = TREE_TYPE (decl);
121 /* For a parameter or result, we must make an equivalent VAR_DECL, not a
122 new PARM_DECL. */
123 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
124 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
125 TREE_READONLY (copy) = TREE_READONLY (decl);
126 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
128 else
130 copy = copy_node (decl);
131 /* The COPY is not abstract; it will be generated in TO_FN. */
132 DECL_ABSTRACT (copy) = 0;
133 lang_hooks.dup_lang_specific_decl (copy);
135 /* TREE_ADDRESSABLE isn't used to indicate that a label's
136 address has been taken; it's for internal bookkeeping in
137 expand_goto_internal. */
138 if (TREE_CODE (copy) == LABEL_DECL)
140 TREE_ADDRESSABLE (copy) = 0;
144 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
145 declaration inspired this copy. */
146 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
148 /* The new variable/label has no RTL, yet. */
149 if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
150 SET_DECL_RTL (copy, NULL_RTX);
152 /* These args would always appear unused, if not for this. */
153 TREE_USED (copy) = 1;
155 /* Set the context for the new declaration. */
156 if (!DECL_CONTEXT (decl))
157 /* Globals stay global. */
159 else if (DECL_CONTEXT (decl) != from_fn)
160 /* Things that weren't in the scope of the function we're inlining
161 from aren't in the scope we're inlining to, either. */
163 else if (TREE_STATIC (decl))
164 /* Function-scoped static variables should stay in the original
165 function. */
167 else
168 /* Ordinary automatic local variables are now in the scope of the
169 new function. */
170 DECL_CONTEXT (copy) = to_fn;
172 return copy;
175 /* Unfortunately, we need a global copy of const_equiv map for communication
176 with a function called from note_stores. Be *very* careful that this
177 is used properly in the presence of recursion. */
179 varray_type global_const_equiv_varray;
181 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
182 except for those few rtx codes that are sharable.
184 We always return an rtx that is similar to that incoming rtx, with the
185 exception of possibly changing a REG to a SUBREG or vice versa. No
186 rtl is ever emitted.
188 If FOR_LHS is nonzero, if means we are processing something that will
189 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
190 inlining since we need to be conservative in how it is set for
191 such cases.
193 Handle constants that need to be placed in the constant pool by
194 calling `force_const_mem'. */
197 copy_rtx_and_substitute (rtx orig, struct inline_remap *map, int for_lhs)
199 rtx copy, temp;
200 int i, j;
201 RTX_CODE code;
202 enum machine_mode mode;
203 const char *format_ptr;
204 int regno;
206 if (orig == 0)
207 return 0;
209 code = GET_CODE (orig);
210 mode = GET_MODE (orig);
212 switch (code)
214 case REG:
215 /* If the stack pointer register shows up, it must be part of
216 stack-adjustments (*not* because we eliminated the frame pointer!).
217 Small hard registers are returned as-is. Pseudo-registers
218 go through their `reg_map'. */
219 regno = REGNO (orig);
220 if (regno <= LAST_VIRTUAL_REGISTER)
222 /* Some hard registers are also mapped,
223 but others are not translated. */
224 if (map->reg_map[regno] != 0)
225 return map->reg_map[regno];
227 /* If this is the virtual frame pointer, make space in current
228 function's stack frame for the stack frame of the inline function.
230 Copy the address of this area into a pseudo. Map
231 virtual_stack_vars_rtx to this pseudo and set up a constant
232 equivalence for it to be the address. This will substitute the
233 address into insns where it can be substituted and use the new
234 pseudo where it can't. */
235 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
237 rtx loc, seq;
238 int size
239 = get_func_frame_size (DECL_STRUCT_FUNCTION (map->fndecl));
240 #ifdef FRAME_GROWS_DOWNWARD
241 int alignment
242 = (DECL_STRUCT_FUNCTION (map->fndecl)->stack_alignment_needed
243 / BITS_PER_UNIT);
245 /* In this case, virtual_stack_vars_rtx points to one byte
246 higher than the top of the frame area. So make sure we
247 allocate a big enough chunk to keep the frame pointer
248 aligned like a real one. */
249 if (alignment)
250 size = CEIL_ROUND (size, alignment);
251 #endif
252 start_sequence ();
253 loc = assign_stack_temp (BLKmode, size, 1);
254 loc = XEXP (loc, 0);
255 #ifdef FRAME_GROWS_DOWNWARD
256 /* In this case, virtual_stack_vars_rtx points to one byte
257 higher than the top of the frame area. So compute the offset
258 to one byte higher than our substitute frame. */
259 loc = plus_constant (loc, size);
260 #endif
261 map->reg_map[regno] = temp
262 = force_reg (Pmode, force_operand (loc, NULL_RTX));
264 #ifdef STACK_BOUNDARY
265 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
266 #endif
268 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
270 seq = get_insns ();
271 end_sequence ();
272 emit_insn_after (seq, map->insns_at_start);
273 return temp;
275 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
277 /* Do the same for a block to contain any arguments referenced
278 in memory. */
279 rtx loc, seq;
280 int size = DECL_STRUCT_FUNCTION (map->fndecl)->args_size;
282 start_sequence ();
283 loc = assign_stack_temp (BLKmode, size, 1);
284 loc = XEXP (loc, 0);
285 /* When arguments grow downward, the virtual incoming
286 args pointer points to the top of the argument block,
287 so the remapped location better do the same. */
288 #ifdef ARGS_GROW_DOWNWARD
289 loc = plus_constant (loc, size);
290 #endif
291 map->reg_map[regno] = temp
292 = force_reg (Pmode, force_operand (loc, NULL_RTX));
294 #ifdef STACK_BOUNDARY
295 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
296 #endif
298 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
300 seq = get_insns ();
301 end_sequence ();
302 emit_insn_after (seq, map->insns_at_start);
303 return temp;
305 else if (REG_FUNCTION_VALUE_P (orig))
307 if (rtx_equal_function_value_matters)
308 /* This is an ignored return value. We must not
309 leave it in with REG_FUNCTION_VALUE_P set, since
310 that would confuse subsequent inlining of the
311 current function into a later function. */
312 return gen_rtx_REG (GET_MODE (orig), regno);
313 else
314 /* Must be unrolling loops or replicating code if we
315 reach here, so return the register unchanged. */
316 return orig;
318 else
319 return orig;
321 abort ();
323 if (map->reg_map[regno] == NULL)
325 map->reg_map[regno] = gen_reg_rtx (mode);
326 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
327 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
328 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
329 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
331 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
332 mark_reg_pointer (map->reg_map[regno],
333 map->regno_pointer_align[regno]);
335 return map->reg_map[regno];
337 case SUBREG:
338 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
339 return simplify_gen_subreg (GET_MODE (orig), copy,
340 GET_MODE (SUBREG_REG (orig)),
341 SUBREG_BYTE (orig));
343 case USE:
344 case CLOBBER:
345 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
346 to (use foo) if the original insn didn't have a subreg.
347 Removing the subreg distorts the VAX movmemhi pattern
348 by changing the mode of an operand. */
349 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
350 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
351 copy = SUBREG_REG (copy);
352 return gen_rtx_fmt_e (code, VOIDmode, copy);
354 /* We need to handle "deleted" labels that appear in the DECL_RTL
355 of a LABEL_DECL. */
356 case NOTE:
357 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
358 break;
360 /* Fall through. */
361 case CODE_LABEL:
362 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
363 = LABEL_PRESERVE_P (orig);
364 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
366 case LABEL_REF:
367 copy
368 = gen_rtx_LABEL_REF
369 (mode,
370 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
371 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
373 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
375 /* The fact that this label was previously nonlocal does not mean
376 it still is, so we must check if it is within the range of
377 this function's labels. */
378 LABEL_REF_NONLOCAL_P (copy)
379 = (LABEL_REF_NONLOCAL_P (orig)
380 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
381 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
383 return copy;
385 case PC:
386 case CC0:
387 case CONST_INT:
388 case CONST_VECTOR:
389 return orig;
391 case SYMBOL_REF:
392 /* Symbols which represent the address of a label stored in the constant
393 pool must be modified to point to a constant pool entry for the
394 remapped label. Otherwise, symbols are returned unchanged. */
395 if (CONSTANT_POOL_ADDRESS_P (orig))
397 struct function *f = cfun;
398 rtx constant = get_pool_constant_for_function (f, orig);
399 if (GET_CODE (constant) == LABEL_REF)
400 return XEXP (force_const_mem
401 (GET_MODE (orig),
402 copy_rtx_and_substitute (constant, map, for_lhs)),
405 return orig;
407 case CONST_DOUBLE:
408 /* We have to make a new copy of this CONST_DOUBLE because don't want
409 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
410 duplicate of a CONST_DOUBLE we have already seen. */
411 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
413 REAL_VALUE_TYPE d;
415 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
416 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
418 else
419 return immed_double_const (CONST_DOUBLE_LOW (orig),
420 CONST_DOUBLE_HIGH (orig), VOIDmode);
422 case CONST:
423 break;
425 case ASM_OPERANDS:
426 /* If a single asm insn contains multiple output operands then
427 it contains multiple ASM_OPERANDS rtx's that share the input
428 and constraint vecs. We must make sure that the copied insn
429 continues to share it. */
430 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
432 copy = rtx_alloc (ASM_OPERANDS);
433 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
434 PUT_MODE (copy, GET_MODE (orig));
435 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
436 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
437 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
438 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
439 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
440 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
441 = map->copy_asm_constraints_vector;
442 #ifdef USE_MAPPED_LOCATION
443 ASM_OPERANDS_SOURCE_LOCATION (copy)
444 = ASM_OPERANDS_SOURCE_LOCATION (orig);
445 #else
446 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
447 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
448 #endif
449 return copy;
451 break;
453 case CALL:
454 /* This is given special treatment because the first
455 operand of a CALL is a (MEM ...) which may get
456 forced into a register for cse. This is undesirable
457 if function-address cse isn't wanted or if we won't do cse. */
458 #ifndef NO_FUNCTION_CSE
459 if (! (optimize && ! flag_no_function_cse))
460 #endif
462 rtx copy
463 = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
464 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
465 map, 0));
467 MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
469 return
470 gen_rtx_CALL (GET_MODE (orig), copy,
471 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
473 break;
475 #if 0
476 /* Must be ifdefed out for loop unrolling to work. */
477 /* ??? Is this for the old or the new unroller? */
478 case RETURN:
479 abort ();
480 #endif
482 case SET:
483 /* If this is setting fp or ap, it means that we have a nonlocal goto.
484 Adjust the setting by the offset of the area we made.
485 If the nonlocal goto is into the current function,
486 this will result in unnecessarily bad code, but should work. */
487 if (SET_DEST (orig) == virtual_stack_vars_rtx
488 || SET_DEST (orig) == virtual_incoming_args_rtx)
490 /* In case a translation hasn't occurred already, make one now. */
491 rtx equiv_reg;
492 rtx equiv_loc;
493 HOST_WIDE_INT loc_offset;
495 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
496 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
497 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
498 REGNO (equiv_reg)).rtx;
499 loc_offset
500 = REG_P (equiv_loc) ? 0 : INTVAL (XEXP (equiv_loc, 1));
502 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
503 force_operand
504 (plus_constant
505 (copy_rtx_and_substitute (SET_SRC (orig),
506 map, 0),
507 - loc_offset),
508 NULL_RTX));
510 else
511 return gen_rtx_SET (VOIDmode,
512 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
513 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
514 break;
516 case MEM:
517 copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
518 map, 0));
519 MEM_COPY_ATTRIBUTES (copy, orig);
520 return copy;
522 default:
523 break;
526 copy = rtx_alloc (code);
527 PUT_MODE (copy, mode);
528 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
529 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
530 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
532 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
534 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
536 switch (*format_ptr++)
538 case '0':
539 X0ANY (copy, i) = X0ANY (orig, i);
540 break;
542 case 'e':
543 XEXP (copy, i)
544 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
545 break;
547 case 'u':
548 /* Change any references to old-insns to point to the
549 corresponding copied insns. */
550 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
551 break;
553 case 'E':
554 XVEC (copy, i) = XVEC (orig, i);
555 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
557 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
558 for (j = 0; j < XVECLEN (copy, i); j++)
559 XVECEXP (copy, i, j)
560 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
561 map, for_lhs);
563 break;
565 case 'w':
566 XWINT (copy, i) = XWINT (orig, i);
567 break;
569 case 'i':
570 XINT (copy, i) = XINT (orig, i);
571 break;
573 case 's':
574 XSTR (copy, i) = XSTR (orig, i);
575 break;
577 case 't':
578 XTREE (copy, i) = XTREE (orig, i);
579 break;
581 default:
582 abort ();
586 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
588 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
589 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
590 map->copy_asm_constraints_vector
591 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
594 return copy;
597 /* Substitute known constant values into INSN, if that is valid. */
599 void
600 try_constants (rtx insn, struct inline_remap *map)
602 int i;
604 map->num_sets = 0;
606 /* First try just updating addresses, then other things. This is
607 important when we have something like the store of a constant
608 into memory and we can update the memory address but the machine
609 does not support a constant source. */
610 subst_constants (&PATTERN (insn), insn, map, 1);
611 apply_change_group ();
612 subst_constants (&PATTERN (insn), insn, map, 0);
613 apply_change_group ();
615 /* Enforce consistency between the addresses in the regular insn flow
616 and the ones in CALL_INSN_FUNCTION_USAGE lists, if any. */
617 if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
619 subst_constants (&CALL_INSN_FUNCTION_USAGE (insn), insn, map, 1);
620 apply_change_group ();
623 /* Show we don't know the value of anything stored or clobbered. */
624 note_stores (PATTERN (insn), mark_stores, NULL);
625 map->last_pc_value = 0;
626 #ifdef HAVE_cc0
627 map->last_cc0_value = 0;
628 #endif
630 /* Set up any constant equivalences made in this insn. */
631 for (i = 0; i < map->num_sets; i++)
633 if (REG_P (map->equiv_sets[i].dest))
635 int regno = REGNO (map->equiv_sets[i].dest);
637 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
638 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
639 /* Following clause is a hack to make case work where GNU C++
640 reassigns a variable to make cse work right. */
641 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
642 regno).rtx,
643 map->equiv_sets[i].equiv))
644 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
645 map->equiv_sets[i].equiv, map->const_age);
647 else if (map->equiv_sets[i].dest == pc_rtx)
648 map->last_pc_value = map->equiv_sets[i].equiv;
649 #ifdef HAVE_cc0
650 else if (map->equiv_sets[i].dest == cc0_rtx)
651 map->last_cc0_value = map->equiv_sets[i].equiv;
652 #endif
656 /* Substitute known constants for pseudo regs in the contents of LOC,
657 which are part of INSN.
658 If INSN is zero, the substitution should always be done (this is used to
659 update DECL_RTL).
660 These changes are taken out by try_constants if the result is not valid.
662 Note that we are more concerned with determining when the result of a SET
663 is a constant, for further propagation, than actually inserting constants
664 into insns; cse will do the latter task better.
666 This function is also used to adjust address of items previously addressed
667 via the virtual stack variable or virtual incoming arguments registers.
669 If MEMONLY is nonzero, only make changes inside a MEM. */
671 static void
672 subst_constants (rtx *loc, rtx insn, struct inline_remap *map, int memonly)
674 rtx x = *loc;
675 int i, j;
676 enum rtx_code code;
677 const char *format_ptr;
678 int num_changes = num_validated_changes ();
679 rtx new = 0;
680 enum machine_mode op0_mode = MAX_MACHINE_MODE;
682 code = GET_CODE (x);
684 switch (code)
686 case PC:
687 case CONST_INT:
688 case CONST_DOUBLE:
689 case CONST_VECTOR:
690 case SYMBOL_REF:
691 case CONST:
692 case LABEL_REF:
693 case ADDRESS:
694 return;
696 #ifdef HAVE_cc0
697 case CC0:
698 if (! memonly)
699 validate_change (insn, loc, map->last_cc0_value, 1);
700 return;
701 #endif
703 case USE:
704 case CLOBBER:
705 /* The only thing we can do with a USE or CLOBBER is possibly do
706 some substitutions in a MEM within it. */
707 if (MEM_P (XEXP (x, 0)))
708 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
709 return;
711 case REG:
712 /* Substitute for parms and known constants. Don't replace
713 hard regs used as user variables with constants. */
714 if (! memonly)
716 int regno = REGNO (x);
717 struct const_equiv_data *p;
719 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
720 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
721 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
722 p->rtx != 0)
723 && p->age >= map->const_age)
724 validate_change (insn, loc, p->rtx, 1);
726 return;
728 case SUBREG:
729 /* SUBREG applied to something other than a reg
730 should be treated as ordinary, since that must
731 be a special hack and we don't know how to treat it specially.
732 Consider for example mulsidi3 in m68k.md.
733 Ordinary SUBREG of a REG needs this special treatment. */
734 if (! memonly && REG_P (SUBREG_REG (x)))
736 rtx inner = SUBREG_REG (x);
737 rtx new = 0;
739 /* We can't call subst_constants on &SUBREG_REG (x) because any
740 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
741 see what is inside, try to form the new SUBREG and see if that is
742 valid. We handle two cases: extracting a full word in an
743 integral mode and extracting the low part. */
744 subst_constants (&inner, NULL_RTX, map, 0);
745 new = simplify_gen_subreg (GET_MODE (x), inner,
746 GET_MODE (SUBREG_REG (x)),
747 SUBREG_BYTE (x));
749 if (new)
750 validate_change (insn, loc, new, 1);
751 else
752 cancel_changes (num_changes);
754 return;
756 break;
758 case MEM:
759 subst_constants (&XEXP (x, 0), insn, map, 0);
761 /* If a memory address got spoiled, change it back. */
762 if (! memonly && insn != 0 && num_validated_changes () != num_changes
763 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
764 cancel_changes (num_changes);
765 return;
767 case SET:
769 /* Substitute constants in our source, and in any arguments to a
770 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
771 itself. */
772 rtx *dest_loc = &SET_DEST (x);
773 rtx dest = *dest_loc;
774 rtx src, tem;
775 enum machine_mode compare_mode = VOIDmode;
777 /* If SET_SRC is a COMPARE which subst_constants would turn into
778 COMPARE of 2 VOIDmode constants, note the mode in which comparison
779 is to be done. */
780 if (GET_CODE (SET_SRC (x)) == COMPARE)
782 src = SET_SRC (x);
783 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
784 || CC0_P (dest))
786 compare_mode = GET_MODE (XEXP (src, 0));
787 if (compare_mode == VOIDmode)
788 compare_mode = GET_MODE (XEXP (src, 1));
792 subst_constants (&SET_SRC (x), insn, map, memonly);
793 src = SET_SRC (x);
795 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
796 || GET_CODE (*dest_loc) == SUBREG
797 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
799 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
801 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
802 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
804 dest_loc = &XEXP (*dest_loc, 0);
807 /* Do substitute in the address of a destination in memory. */
808 if (MEM_P (*dest_loc))
809 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
811 /* Check for the case of DEST a SUBREG, both it and the underlying
812 register are less than one word, and the SUBREG has the wider mode.
813 In the case, we are really setting the underlying register to the
814 source converted to the mode of DEST. So indicate that. */
815 if (GET_CODE (dest) == SUBREG
816 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
817 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
818 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
819 <= GET_MODE_SIZE (GET_MODE (dest)))
820 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
821 src)))
822 src = tem, dest = SUBREG_REG (dest);
824 /* If storing a recognizable value save it for later recording. */
825 if ((map->num_sets < MAX_RECOG_OPERANDS)
826 && (CONSTANT_P (src)
827 || (REG_P (src)
828 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
829 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
830 || (GET_CODE (src) == PLUS
831 && REG_P (XEXP (src, 0))
832 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
833 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
834 && CONSTANT_P (XEXP (src, 1)))
835 || GET_CODE (src) == COMPARE
836 || CC0_P (dest)
837 || (dest == pc_rtx
838 && (src == pc_rtx || GET_CODE (src) == RETURN
839 || GET_CODE (src) == LABEL_REF))))
841 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
842 it will cause us to save the COMPARE with any constants
843 substituted, which is what we want for later. */
844 rtx src_copy = copy_rtx (src);
845 map->equiv_sets[map->num_sets].equiv = src_copy;
846 map->equiv_sets[map->num_sets++].dest = dest;
847 if (compare_mode != VOIDmode
848 && GET_CODE (src) == COMPARE
849 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
850 || CC0_P (dest))
851 && GET_MODE (XEXP (src, 0)) == VOIDmode
852 && GET_MODE (XEXP (src, 1)) == VOIDmode)
854 map->compare_src = src_copy;
855 map->compare_mode = compare_mode;
859 return;
861 default:
862 break;
865 format_ptr = GET_RTX_FORMAT (code);
867 /* If the first operand is an expression, save its mode for later. */
868 if (*format_ptr == 'e')
869 op0_mode = GET_MODE (XEXP (x, 0));
871 for (i = 0; i < GET_RTX_LENGTH (code); i++)
873 switch (*format_ptr++)
875 case '0':
876 break;
878 case 'e':
879 if (XEXP (x, i))
880 subst_constants (&XEXP (x, i), insn, map, memonly);
881 break;
883 case 'u':
884 case 'i':
885 case 's':
886 case 'w':
887 case 'n':
888 case 't':
889 case 'B':
890 break;
892 case 'E':
893 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
894 for (j = 0; j < XVECLEN (x, i); j++)
895 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
897 break;
899 default:
900 abort ();
904 /* If this is a commutative operation, move a constant to the second
905 operand unless the second operand is already a CONST_INT. */
906 if (! memonly
907 && (GET_RTX_CLASS (code) == RTX_COMM_ARITH
908 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
909 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
911 rtx tem = XEXP (x, 0);
912 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
913 validate_change (insn, &XEXP (x, 1), tem, 1);
916 /* Simplify the expression in case we put in some constants. */
917 if (! memonly)
918 switch (GET_RTX_CLASS (code))
920 case RTX_UNARY:
921 if (op0_mode == MAX_MACHINE_MODE)
922 abort ();
923 new = simplify_unary_operation (code, GET_MODE (x),
924 XEXP (x, 0), op0_mode);
925 break;
927 case RTX_COMPARE:
928 case RTX_COMM_COMPARE:
930 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
932 if (op_mode == VOIDmode)
933 op_mode = GET_MODE (XEXP (x, 1));
935 new = simplify_relational_operation (code, GET_MODE (x), op_mode,
936 XEXP (x, 0), XEXP (x, 1));
937 break;
940 case RTX_BIN_ARITH:
941 case RTX_COMM_ARITH:
942 new = simplify_binary_operation (code, GET_MODE (x),
943 XEXP (x, 0), XEXP (x, 1));
944 break;
946 case RTX_BITFIELD_OPS:
947 case RTX_TERNARY:
948 if (op0_mode == MAX_MACHINE_MODE)
949 abort ();
951 if (code == IF_THEN_ELSE)
953 rtx op0 = XEXP (x, 0);
955 if (COMPARISON_P (op0)
956 && GET_MODE (op0) == VOIDmode
957 && ! side_effects_p (op0)
958 && XEXP (op0, 0) == map->compare_src
959 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
961 /* We have compare of two VOIDmode constants for which
962 we recorded the comparison mode. */
963 rtx tem =
964 simplify_gen_relational (GET_CODE (op0), GET_MODE (op0),
965 map->compare_mode, XEXP (op0, 0),
966 XEXP (op0, 1));
968 if (GET_CODE (tem) != CONST_INT)
969 new = simplify_ternary_operation (code, GET_MODE (x),
970 op0_mode, tem, XEXP (x, 1),
971 XEXP (x, 2));
972 else if (tem == const0_rtx)
973 new = XEXP (x, 2);
974 else
975 new = XEXP (x, 1);
978 if (!new)
979 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
980 XEXP (x, 0), XEXP (x, 1),
981 XEXP (x, 2));
982 break;
984 default:
985 break;
988 if (new)
989 validate_change (insn, loc, new, 1);
992 /* Show that register modified no longer contain known constants. We are
993 called from note_stores with parts of the new insn. */
995 static void
996 mark_stores (rtx dest, rtx x ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
998 int regno = -1;
999 enum machine_mode mode = VOIDmode;
1001 /* DEST is always the innermost thing set, except in the case of
1002 SUBREGs of hard registers. */
1004 if (REG_P (dest))
1005 regno = REGNO (dest), mode = GET_MODE (dest);
1006 else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
1008 regno = REGNO (SUBREG_REG (dest));
1009 if (regno < FIRST_PSEUDO_REGISTER)
1010 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
1011 GET_MODE (SUBREG_REG (dest)),
1012 SUBREG_BYTE (dest),
1013 GET_MODE (dest));
1014 mode = GET_MODE (SUBREG_REG (dest));
1017 if (regno >= 0)
1019 unsigned int uregno = regno;
1020 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
1021 : uregno + hard_regno_nregs[uregno][mode] - 1);
1022 unsigned int i;
1024 /* Ignore virtual stack var or virtual arg register since those
1025 are handled separately. */
1026 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
1027 && uregno != VIRTUAL_STACK_VARS_REGNUM)
1028 for (i = uregno; i <= last_reg; i++)
1029 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
1030 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
1034 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
1035 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
1036 that it points to the node itself, thus indicating that the node is its
1037 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
1038 the given node is NULL, recursively descend the decl/block tree which
1039 it is the root of, and for each other ..._DECL or BLOCK node contained
1040 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
1041 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
1042 values to point to themselves. */
1044 static void
1045 set_block_origin_self (tree stmt)
1047 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
1049 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
1052 tree local_decl;
1054 for (local_decl = BLOCK_VARS (stmt);
1055 local_decl != NULL_TREE;
1056 local_decl = TREE_CHAIN (local_decl))
1057 set_decl_origin_self (local_decl); /* Potential recursion. */
1061 tree subblock;
1063 for (subblock = BLOCK_SUBBLOCKS (stmt);
1064 subblock != NULL_TREE;
1065 subblock = BLOCK_CHAIN (subblock))
1066 set_block_origin_self (subblock); /* Recurse. */
1071 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
1072 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
1073 node to so that it points to the node itself, thus indicating that the
1074 node represents its own (abstract) origin. Additionally, if the
1075 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
1076 the decl/block tree of which the given node is the root of, and for
1077 each other ..._DECL or BLOCK node contained therein whose
1078 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
1079 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
1080 point to themselves. */
1082 void
1083 set_decl_origin_self (tree decl)
1085 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
1087 DECL_ABSTRACT_ORIGIN (decl) = decl;
1088 if (TREE_CODE (decl) == FUNCTION_DECL)
1090 tree arg;
1092 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1093 DECL_ABSTRACT_ORIGIN (arg) = arg;
1094 if (DECL_INITIAL (decl) != NULL_TREE
1095 && DECL_INITIAL (decl) != error_mark_node)
1096 set_block_origin_self (DECL_INITIAL (decl));
1101 /* Given a pointer to some BLOCK node, and a boolean value to set the
1102 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
1103 the given block, and for all local decls and all local sub-blocks
1104 (recursively) which are contained therein. */
1106 static void
1107 set_block_abstract_flags (tree stmt, int setting)
1109 tree local_decl;
1110 tree subblock;
1112 BLOCK_ABSTRACT (stmt) = setting;
1114 for (local_decl = BLOCK_VARS (stmt);
1115 local_decl != NULL_TREE;
1116 local_decl = TREE_CHAIN (local_decl))
1117 set_decl_abstract_flags (local_decl, setting);
1119 for (subblock = BLOCK_SUBBLOCKS (stmt);
1120 subblock != NULL_TREE;
1121 subblock = BLOCK_CHAIN (subblock))
1122 set_block_abstract_flags (subblock, setting);
1125 /* Given a pointer to some ..._DECL node, and a boolean value to set the
1126 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
1127 given decl, and (in the case where the decl is a FUNCTION_DECL) also
1128 set the abstract flags for all of the parameters, local vars, local
1129 blocks and sub-blocks (recursively) to the same setting. */
1131 void
1132 set_decl_abstract_flags (tree decl, int setting)
1134 DECL_ABSTRACT (decl) = setting;
1135 if (TREE_CODE (decl) == FUNCTION_DECL)
1137 tree arg;
1139 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1140 DECL_ABSTRACT (arg) = setting;
1141 if (DECL_INITIAL (decl) != NULL_TREE
1142 && DECL_INITIAL (decl) != error_mark_node)
1143 set_block_abstract_flags (DECL_INITIAL (decl), setting);
1147 /* Functions to keep track of the values hard regs had at the start of
1148 the function. */
1151 get_hard_reg_initial_reg (struct function *fun, rtx reg)
1153 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1154 int i;
1156 if (ivs == 0)
1157 return NULL_RTX;
1159 for (i = 0; i < ivs->num_entries; i++)
1160 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1161 return ivs->entries[i].hard_reg;
1163 return NULL_RTX;
1167 has_func_hard_reg_initial_val (struct function *fun, rtx reg)
1169 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1170 int i;
1172 if (ivs == 0)
1173 return NULL_RTX;
1175 for (i = 0; i < ivs->num_entries; i++)
1176 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
1177 return ivs->entries[i].pseudo;
1179 return NULL_RTX;
1183 get_func_hard_reg_initial_val (struct function *fun, rtx reg)
1185 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1186 rtx rv = has_func_hard_reg_initial_val (fun, reg);
1188 if (rv)
1189 return rv;
1191 if (ivs == 0)
1193 fun->hard_reg_initial_vals = ggc_alloc (sizeof (initial_value_struct));
1194 ivs = fun->hard_reg_initial_vals;
1195 ivs->num_entries = 0;
1196 ivs->max_entries = 5;
1197 ivs->entries = ggc_alloc (5 * sizeof (initial_value_pair));
1200 if (ivs->num_entries >= ivs->max_entries)
1202 ivs->max_entries += 5;
1203 ivs->entries = ggc_realloc (ivs->entries,
1204 ivs->max_entries
1205 * sizeof (initial_value_pair));
1208 ivs->entries[ivs->num_entries].hard_reg = reg;
1209 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
1211 return ivs->entries[ivs->num_entries++].pseudo;
1215 get_hard_reg_initial_val (enum machine_mode mode, int regno)
1217 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1221 has_hard_reg_initial_val (enum machine_mode mode, int regno)
1223 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1226 void
1227 emit_initial_value_sets (void)
1229 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1230 int i;
1231 rtx seq;
1233 if (ivs == 0)
1234 return;
1236 start_sequence ();
1237 for (i = 0; i < ivs->num_entries; i++)
1238 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1239 seq = get_insns ();
1240 end_sequence ();
1242 emit_insn_after (seq, entry_of_function ());
1245 /* If the backend knows where to allocate pseudos for hard
1246 register initial values, register these allocations now. */
1247 void
1248 allocate_initial_values (rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED)
1250 #ifdef ALLOCATE_INITIAL_VALUE
1251 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1252 int i;
1254 if (ivs == 0)
1255 return;
1257 for (i = 0; i < ivs->num_entries; i++)
1259 int regno = REGNO (ivs->entries[i].pseudo);
1260 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
1262 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
1263 ; /* Do nothing. */
1264 else if (MEM_P (x))
1265 reg_equiv_memory_loc[regno] = x;
1266 else if (REG_P (x))
1268 reg_renumber[regno] = REGNO (x);
1269 /* Poke the regno right into regno_reg_rtx
1270 so that even fixed regs are accepted. */
1271 REGNO (ivs->entries[i].pseudo) = REGNO (x);
1273 else abort ();
1275 #endif
1278 #include "gt-integrate.h"