2004-09-23 H.J. Lu <hongjiu.lu@intel.com>
[official-gcc.git] / gcc / integrate.c
blobfed0a2736d62703fd519b128fab7e0c6b5c5df16
1 /* Procedure integration for GCC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "regs.h"
32 #include "flags.h"
33 #include "debug.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "output.h"
37 #include "recog.h"
38 #include "integrate.h"
39 #include "real.h"
40 #include "except.h"
41 #include "function.h"
42 #include "toplev.h"
43 #include "intl.h"
44 #include "params.h"
45 #include "ggc.h"
46 #include "target.h"
47 #include "langhooks.h"
49 /* Round to the next highest integer that meets the alignment. */
50 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
53 /* Private type used by {get/has}_func_hard_reg_initial_val. */
54 typedef struct initial_value_pair GTY(()) {
55 rtx hard_reg;
56 rtx pseudo;
57 } initial_value_pair;
58 typedef struct initial_value_struct GTY(()) {
59 int num_entries;
60 int max_entries;
61 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
62 } initial_value_struct;
64 static void subst_constants (rtx *, rtx, struct inline_remap *, int);
65 static void set_block_origin_self (tree);
66 static void set_block_abstract_flags (tree, int);
67 static void mark_stores (rtx, rtx, void *);
69 /* Returns the Ith entry in the label_map contained in MAP. If the
70 Ith entry has not yet been set, return a fresh label. This function
71 performs a lazy initialization of label_map, thereby avoiding huge memory
72 explosions when the label_map gets very large. */
74 rtx
75 get_label_from_map (struct inline_remap *map, int i)
77 rtx x = map->label_map[i];
79 if (x == NULL_RTX)
80 x = map->label_map[i] = gen_label_rtx ();
82 return x;
85 /* Return false if the function FNDECL cannot be inlined on account of its
86 attributes, true otherwise. */
87 bool
88 function_attribute_inlinable_p (tree fndecl)
90 if (targetm.attribute_table)
92 tree a;
94 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
96 tree name = TREE_PURPOSE (a);
97 int i;
99 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
100 if (is_attribute_p (targetm.attribute_table[i].name, name))
101 return targetm.function_attribute_inlinable_p (fndecl);
105 return true;
108 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
109 but now it will be in the TO_FN. */
111 tree
112 copy_decl_for_inlining (tree decl, tree from_fn, tree to_fn)
114 tree copy;
116 /* Copy the declaration. */
117 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
119 tree type = TREE_TYPE (decl);
121 /* For a parameter or result, we must make an equivalent VAR_DECL, not a
122 new PARM_DECL. */
123 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
124 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
125 TREE_READONLY (copy) = TREE_READONLY (decl);
126 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
128 else
130 copy = copy_node (decl);
131 /* The COPY is not abstract; it will be generated in TO_FN. */
132 DECL_ABSTRACT (copy) = 0;
133 lang_hooks.dup_lang_specific_decl (copy);
135 /* TREE_ADDRESSABLE isn't used to indicate that a label's
136 address has been taken; it's for internal bookkeeping in
137 expand_goto_internal. */
138 if (TREE_CODE (copy) == LABEL_DECL)
140 TREE_ADDRESSABLE (copy) = 0;
144 /* Don't generate debug information for the copy if we wouldn't have
145 generated it for the copy either. */
146 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
147 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
149 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
150 declaration inspired this copy. */
151 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
153 /* The new variable/label has no RTL, yet. */
154 if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
155 SET_DECL_RTL (copy, NULL_RTX);
157 /* These args would always appear unused, if not for this. */
158 TREE_USED (copy) = 1;
160 /* Set the context for the new declaration. */
161 if (!DECL_CONTEXT (decl))
162 /* Globals stay global. */
164 else if (DECL_CONTEXT (decl) != from_fn)
165 /* Things that weren't in the scope of the function we're inlining
166 from aren't in the scope we're inlining to, either. */
168 else if (TREE_STATIC (decl))
169 /* Function-scoped static variables should stay in the original
170 function. */
172 else
173 /* Ordinary automatic local variables are now in the scope of the
174 new function. */
175 DECL_CONTEXT (copy) = to_fn;
177 return copy;
180 /* Unfortunately, we need a global copy of const_equiv map for communication
181 with a function called from note_stores. Be *very* careful that this
182 is used properly in the presence of recursion. */
184 varray_type global_const_equiv_varray;
186 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
187 except for those few rtx codes that are sharable.
189 We always return an rtx that is similar to that incoming rtx, with the
190 exception of possibly changing a REG to a SUBREG or vice versa. No
191 rtl is ever emitted.
193 If FOR_LHS is nonzero, if means we are processing something that will
194 be the LHS of a SET.
196 Handle constants that need to be placed in the constant pool by
197 calling `force_const_mem'. */
200 copy_rtx_and_substitute (rtx orig, struct inline_remap *map, int for_lhs)
202 rtx copy, temp;
203 int i, j;
204 RTX_CODE code;
205 enum machine_mode mode;
206 const char *format_ptr;
207 int regno;
209 if (orig == 0)
210 return 0;
212 code = GET_CODE (orig);
213 mode = GET_MODE (orig);
215 switch (code)
217 case REG:
218 /* If the stack pointer register shows up, it must be part of
219 stack-adjustments (*not* because we eliminated the frame pointer!).
220 Small hard registers are returned as-is. Pseudo-registers
221 go through their `reg_map'. */
222 regno = REGNO (orig);
223 if (regno <= LAST_VIRTUAL_REGISTER)
225 /* Some hard registers are also mapped,
226 but others are not translated. */
227 if (map->reg_map[regno] != 0)
228 return map->reg_map[regno];
230 /* If this is the virtual frame pointer, make space in current
231 function's stack frame for the stack frame of the inline function.
233 Copy the address of this area into a pseudo. Map
234 virtual_stack_vars_rtx to this pseudo and set up a constant
235 equivalence for it to be the address. This will substitute the
236 address into insns where it can be substituted and use the new
237 pseudo where it can't. */
238 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
240 rtx loc, seq;
241 int size
242 = get_func_frame_size (DECL_STRUCT_FUNCTION (map->fndecl));
243 #ifdef FRAME_GROWS_DOWNWARD
244 int alignment
245 = (DECL_STRUCT_FUNCTION (map->fndecl)->stack_alignment_needed
246 / BITS_PER_UNIT);
248 /* In this case, virtual_stack_vars_rtx points to one byte
249 higher than the top of the frame area. So make sure we
250 allocate a big enough chunk to keep the frame pointer
251 aligned like a real one. */
252 if (alignment)
253 size = CEIL_ROUND (size, alignment);
254 #endif
255 start_sequence ();
256 loc = assign_stack_temp (BLKmode, size, 1);
257 loc = XEXP (loc, 0);
258 #ifdef FRAME_GROWS_DOWNWARD
259 /* In this case, virtual_stack_vars_rtx points to one byte
260 higher than the top of the frame area. So compute the offset
261 to one byte higher than our substitute frame. */
262 loc = plus_constant (loc, size);
263 #endif
264 map->reg_map[regno] = temp
265 = force_reg (Pmode, force_operand (loc, NULL_RTX));
267 #ifdef STACK_BOUNDARY
268 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
269 #endif
271 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
273 seq = get_insns ();
274 end_sequence ();
275 emit_insn_after (seq, map->insns_at_start);
276 return temp;
278 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
280 /* Do the same for a block to contain any arguments referenced
281 in memory. */
282 rtx loc, seq;
283 int size = DECL_STRUCT_FUNCTION (map->fndecl)->args_size;
285 start_sequence ();
286 loc = assign_stack_temp (BLKmode, size, 1);
287 loc = XEXP (loc, 0);
288 /* When arguments grow downward, the virtual incoming
289 args pointer points to the top of the argument block,
290 so the remapped location better do the same. */
291 #ifdef ARGS_GROW_DOWNWARD
292 loc = plus_constant (loc, size);
293 #endif
294 map->reg_map[regno] = temp
295 = force_reg (Pmode, force_operand (loc, NULL_RTX));
297 #ifdef STACK_BOUNDARY
298 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
299 #endif
301 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
303 seq = get_insns ();
304 end_sequence ();
305 emit_insn_after (seq, map->insns_at_start);
306 return temp;
308 else
309 return orig;
311 abort ();
313 if (map->reg_map[regno] == NULL)
315 map->reg_map[regno] = gen_reg_rtx (mode);
316 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
317 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
318 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
320 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
321 mark_reg_pointer (map->reg_map[regno],
322 map->regno_pointer_align[regno]);
324 return map->reg_map[regno];
326 case SUBREG:
327 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
328 return simplify_gen_subreg (GET_MODE (orig), copy,
329 GET_MODE (SUBREG_REG (orig)),
330 SUBREG_BYTE (orig));
332 case USE:
333 case CLOBBER:
334 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
335 to (use foo) if the original insn didn't have a subreg.
336 Removing the subreg distorts the VAX movmemhi pattern
337 by changing the mode of an operand. */
338 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
339 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
340 copy = SUBREG_REG (copy);
341 return gen_rtx_fmt_e (code, VOIDmode, copy);
343 /* We need to handle "deleted" labels that appear in the DECL_RTL
344 of a LABEL_DECL. */
345 case NOTE:
346 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
347 break;
349 /* Fall through. */
350 case CODE_LABEL:
351 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
352 = LABEL_PRESERVE_P (orig);
353 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
355 case LABEL_REF:
356 copy
357 = gen_rtx_LABEL_REF
358 (mode,
359 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
360 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
362 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
364 /* The fact that this label was previously nonlocal does not mean
365 it still is, so we must check if it is within the range of
366 this function's labels. */
367 LABEL_REF_NONLOCAL_P (copy)
368 = (LABEL_REF_NONLOCAL_P (orig)
369 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
370 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
372 return copy;
374 case PC:
375 case CC0:
376 case CONST_INT:
377 case CONST_VECTOR:
378 return orig;
380 case SYMBOL_REF:
381 /* Symbols which represent the address of a label stored in the constant
382 pool must be modified to point to a constant pool entry for the
383 remapped label. Otherwise, symbols are returned unchanged. */
384 if (CONSTANT_POOL_ADDRESS_P (orig))
386 struct function *f = cfun;
387 rtx constant = get_pool_constant_for_function (f, orig);
388 if (GET_CODE (constant) == LABEL_REF)
389 return XEXP (force_const_mem
390 (GET_MODE (orig),
391 copy_rtx_and_substitute (constant, map, for_lhs)),
394 return orig;
396 case CONST_DOUBLE:
397 /* We have to make a new copy of this CONST_DOUBLE because don't want
398 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
399 duplicate of a CONST_DOUBLE we have already seen. */
400 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
402 REAL_VALUE_TYPE d;
404 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
405 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
407 else
408 return immed_double_const (CONST_DOUBLE_LOW (orig),
409 CONST_DOUBLE_HIGH (orig), VOIDmode);
411 case CONST:
412 break;
414 case ASM_OPERANDS:
415 /* If a single asm insn contains multiple output operands then
416 it contains multiple ASM_OPERANDS rtx's that share the input
417 and constraint vecs. We must make sure that the copied insn
418 continues to share it. */
419 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
421 copy = rtx_alloc (ASM_OPERANDS);
422 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
423 PUT_MODE (copy, GET_MODE (orig));
424 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
425 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
426 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
427 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
428 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
429 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
430 = map->copy_asm_constraints_vector;
431 #ifdef USE_MAPPED_LOCATION
432 ASM_OPERANDS_SOURCE_LOCATION (copy)
433 = ASM_OPERANDS_SOURCE_LOCATION (orig);
434 #else
435 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
436 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
437 #endif
438 return copy;
440 break;
442 case CALL:
443 /* This is given special treatment because the first
444 operand of a CALL is a (MEM ...) which may get
445 forced into a register for cse. This is undesirable
446 if function-address cse isn't wanted or if we won't do cse. */
447 #ifndef NO_FUNCTION_CSE
448 if (! (optimize && ! flag_no_function_cse))
449 #endif
451 rtx copy
452 = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
453 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
454 map, 0));
456 MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
458 return
459 gen_rtx_CALL (GET_MODE (orig), copy,
460 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
462 break;
464 #if 0
465 /* Must be ifdefed out for loop unrolling to work. */
466 /* ??? Is this for the old or the new unroller? */
467 case RETURN:
468 abort ();
469 #endif
471 case SET:
472 /* If this is setting fp or ap, it means that we have a nonlocal goto.
473 Adjust the setting by the offset of the area we made.
474 If the nonlocal goto is into the current function,
475 this will result in unnecessarily bad code, but should work. */
476 if (SET_DEST (orig) == virtual_stack_vars_rtx
477 || SET_DEST (orig) == virtual_incoming_args_rtx)
479 /* In case a translation hasn't occurred already, make one now. */
480 rtx equiv_reg;
481 rtx equiv_loc;
482 HOST_WIDE_INT loc_offset;
484 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
485 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
486 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
487 REGNO (equiv_reg)).rtx;
488 loc_offset
489 = REG_P (equiv_loc) ? 0 : INTVAL (XEXP (equiv_loc, 1));
491 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
492 force_operand
493 (plus_constant
494 (copy_rtx_and_substitute (SET_SRC (orig),
495 map, 0),
496 - loc_offset),
497 NULL_RTX));
499 else
500 return gen_rtx_SET (VOIDmode,
501 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
502 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
503 break;
505 case MEM:
506 copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
507 map, 0));
508 MEM_COPY_ATTRIBUTES (copy, orig);
509 return copy;
511 default:
512 break;
515 copy = rtx_alloc (code);
516 PUT_MODE (copy, mode);
517 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
518 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
519 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
521 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
523 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
525 switch (*format_ptr++)
527 case '0':
528 X0ANY (copy, i) = X0ANY (orig, i);
529 break;
531 case 'e':
532 XEXP (copy, i)
533 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
534 break;
536 case 'u':
537 /* Change any references to old-insns to point to the
538 corresponding copied insns. */
539 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
540 break;
542 case 'E':
543 XVEC (copy, i) = XVEC (orig, i);
544 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
546 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
547 for (j = 0; j < XVECLEN (copy, i); j++)
548 XVECEXP (copy, i, j)
549 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
550 map, for_lhs);
552 break;
554 case 'w':
555 XWINT (copy, i) = XWINT (orig, i);
556 break;
558 case 'i':
559 XINT (copy, i) = XINT (orig, i);
560 break;
562 case 's':
563 XSTR (copy, i) = XSTR (orig, i);
564 break;
566 case 't':
567 XTREE (copy, i) = XTREE (orig, i);
568 break;
570 default:
571 abort ();
575 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
577 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
578 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
579 map->copy_asm_constraints_vector
580 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
583 return copy;
586 /* Substitute known constant values into INSN, if that is valid. */
588 void
589 try_constants (rtx insn, struct inline_remap *map)
591 int i;
593 map->num_sets = 0;
595 /* First try just updating addresses, then other things. This is
596 important when we have something like the store of a constant
597 into memory and we can update the memory address but the machine
598 does not support a constant source. */
599 subst_constants (&PATTERN (insn), insn, map, 1);
600 apply_change_group ();
601 subst_constants (&PATTERN (insn), insn, map, 0);
602 apply_change_group ();
604 /* Enforce consistency between the addresses in the regular insn flow
605 and the ones in CALL_INSN_FUNCTION_USAGE lists, if any. */
606 if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
608 subst_constants (&CALL_INSN_FUNCTION_USAGE (insn), insn, map, 1);
609 apply_change_group ();
612 /* Show we don't know the value of anything stored or clobbered. */
613 note_stores (PATTERN (insn), mark_stores, NULL);
614 map->last_pc_value = 0;
615 #ifdef HAVE_cc0
616 map->last_cc0_value = 0;
617 #endif
619 /* Set up any constant equivalences made in this insn. */
620 for (i = 0; i < map->num_sets; i++)
622 if (REG_P (map->equiv_sets[i].dest))
624 int regno = REGNO (map->equiv_sets[i].dest);
626 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
627 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
628 /* Following clause is a hack to make case work where GNU C++
629 reassigns a variable to make cse work right. */
630 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
631 regno).rtx,
632 map->equiv_sets[i].equiv))
633 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
634 map->equiv_sets[i].equiv, map->const_age);
636 else if (map->equiv_sets[i].dest == pc_rtx)
637 map->last_pc_value = map->equiv_sets[i].equiv;
638 #ifdef HAVE_cc0
639 else if (map->equiv_sets[i].dest == cc0_rtx)
640 map->last_cc0_value = map->equiv_sets[i].equiv;
641 #endif
645 /* Substitute known constants for pseudo regs in the contents of LOC,
646 which are part of INSN.
647 If INSN is zero, the substitution should always be done (this is used to
648 update DECL_RTL).
649 These changes are taken out by try_constants if the result is not valid.
651 Note that we are more concerned with determining when the result of a SET
652 is a constant, for further propagation, than actually inserting constants
653 into insns; cse will do the latter task better.
655 This function is also used to adjust address of items previously addressed
656 via the virtual stack variable or virtual incoming arguments registers.
658 If MEMONLY is nonzero, only make changes inside a MEM. */
660 static void
661 subst_constants (rtx *loc, rtx insn, struct inline_remap *map, int memonly)
663 rtx x = *loc;
664 int i, j;
665 enum rtx_code code;
666 const char *format_ptr;
667 int num_changes = num_validated_changes ();
668 rtx new = 0;
669 enum machine_mode op0_mode = MAX_MACHINE_MODE;
671 code = GET_CODE (x);
673 switch (code)
675 case PC:
676 case CONST_INT:
677 case CONST_DOUBLE:
678 case CONST_VECTOR:
679 case SYMBOL_REF:
680 case CONST:
681 case LABEL_REF:
682 case ADDRESS:
683 return;
685 #ifdef HAVE_cc0
686 case CC0:
687 if (! memonly)
688 validate_change (insn, loc, map->last_cc0_value, 1);
689 return;
690 #endif
692 case USE:
693 case CLOBBER:
694 /* The only thing we can do with a USE or CLOBBER is possibly do
695 some substitutions in a MEM within it. */
696 if (MEM_P (XEXP (x, 0)))
697 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
698 return;
700 case REG:
701 /* Substitute for parms and known constants. Don't replace
702 hard regs used as user variables with constants. */
703 if (! memonly)
705 int regno = REGNO (x);
706 struct const_equiv_data *p;
708 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
709 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
710 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
711 p->rtx != 0)
712 && p->age >= map->const_age)
713 validate_change (insn, loc, p->rtx, 1);
715 return;
717 case SUBREG:
718 /* SUBREG applied to something other than a reg
719 should be treated as ordinary, since that must
720 be a special hack and we don't know how to treat it specially.
721 Consider for example mulsidi3 in m68k.md.
722 Ordinary SUBREG of a REG needs this special treatment. */
723 if (! memonly && REG_P (SUBREG_REG (x)))
725 rtx inner = SUBREG_REG (x);
726 rtx new = 0;
728 /* We can't call subst_constants on &SUBREG_REG (x) because any
729 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
730 see what is inside, try to form the new SUBREG and see if that is
731 valid. We handle two cases: extracting a full word in an
732 integral mode and extracting the low part. */
733 subst_constants (&inner, NULL_RTX, map, 0);
734 new = simplify_gen_subreg (GET_MODE (x), inner,
735 GET_MODE (SUBREG_REG (x)),
736 SUBREG_BYTE (x));
738 if (new)
739 validate_change (insn, loc, new, 1);
740 else
741 cancel_changes (num_changes);
743 return;
745 break;
747 case MEM:
748 subst_constants (&XEXP (x, 0), insn, map, 0);
750 /* If a memory address got spoiled, change it back. */
751 if (! memonly && insn != 0 && num_validated_changes () != num_changes
752 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
753 cancel_changes (num_changes);
754 return;
756 case SET:
758 /* Substitute constants in our source, and in any arguments to a
759 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
760 itself. */
761 rtx *dest_loc = &SET_DEST (x);
762 rtx dest = *dest_loc;
763 rtx src, tem;
764 enum machine_mode compare_mode = VOIDmode;
766 /* If SET_SRC is a COMPARE which subst_constants would turn into
767 COMPARE of 2 VOIDmode constants, note the mode in which comparison
768 is to be done. */
769 if (GET_CODE (SET_SRC (x)) == COMPARE)
771 src = SET_SRC (x);
772 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
773 || CC0_P (dest))
775 compare_mode = GET_MODE (XEXP (src, 0));
776 if (compare_mode == VOIDmode)
777 compare_mode = GET_MODE (XEXP (src, 1));
781 subst_constants (&SET_SRC (x), insn, map, memonly);
782 src = SET_SRC (x);
784 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
785 || GET_CODE (*dest_loc) == SUBREG
786 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
788 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
790 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
791 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
793 dest_loc = &XEXP (*dest_loc, 0);
796 /* Do substitute in the address of a destination in memory. */
797 if (MEM_P (*dest_loc))
798 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
800 /* Check for the case of DEST a SUBREG, both it and the underlying
801 register are less than one word, and the SUBREG has the wider mode.
802 In the case, we are really setting the underlying register to the
803 source converted to the mode of DEST. So indicate that. */
804 if (GET_CODE (dest) == SUBREG
805 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
806 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
807 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
808 <= GET_MODE_SIZE (GET_MODE (dest)))
809 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
810 src)))
811 src = tem, dest = SUBREG_REG (dest);
813 /* If storing a recognizable value save it for later recording. */
814 if ((map->num_sets < MAX_RECOG_OPERANDS)
815 && (CONSTANT_P (src)
816 || (REG_P (src)
817 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
818 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
819 || (GET_CODE (src) == PLUS
820 && REG_P (XEXP (src, 0))
821 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
822 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
823 && CONSTANT_P (XEXP (src, 1)))
824 || GET_CODE (src) == COMPARE
825 || CC0_P (dest)
826 || (dest == pc_rtx
827 && (src == pc_rtx || GET_CODE (src) == RETURN
828 || GET_CODE (src) == LABEL_REF))))
830 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
831 it will cause us to save the COMPARE with any constants
832 substituted, which is what we want for later. */
833 rtx src_copy = copy_rtx (src);
834 map->equiv_sets[map->num_sets].equiv = src_copy;
835 map->equiv_sets[map->num_sets++].dest = dest;
836 if (compare_mode != VOIDmode
837 && GET_CODE (src) == COMPARE
838 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
839 || CC0_P (dest))
840 && GET_MODE (XEXP (src, 0)) == VOIDmode
841 && GET_MODE (XEXP (src, 1)) == VOIDmode)
843 map->compare_src = src_copy;
844 map->compare_mode = compare_mode;
848 return;
850 default:
851 break;
854 format_ptr = GET_RTX_FORMAT (code);
856 /* If the first operand is an expression, save its mode for later. */
857 if (*format_ptr == 'e')
858 op0_mode = GET_MODE (XEXP (x, 0));
860 for (i = 0; i < GET_RTX_LENGTH (code); i++)
862 switch (*format_ptr++)
864 case '0':
865 break;
867 case 'e':
868 if (XEXP (x, i))
869 subst_constants (&XEXP (x, i), insn, map, memonly);
870 break;
872 case 'u':
873 case 'i':
874 case 's':
875 case 'w':
876 case 'n':
877 case 't':
878 case 'B':
879 break;
881 case 'E':
882 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
883 for (j = 0; j < XVECLEN (x, i); j++)
884 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
886 break;
888 default:
889 abort ();
893 /* If this is a commutative operation, move a constant to the second
894 operand unless the second operand is already a CONST_INT. */
895 if (! memonly
896 && (GET_RTX_CLASS (code) == RTX_COMM_ARITH
897 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
898 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
900 rtx tem = XEXP (x, 0);
901 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
902 validate_change (insn, &XEXP (x, 1), tem, 1);
905 /* Simplify the expression in case we put in some constants. */
906 if (! memonly)
907 switch (GET_RTX_CLASS (code))
909 case RTX_UNARY:
910 if (op0_mode == MAX_MACHINE_MODE)
911 abort ();
912 new = simplify_unary_operation (code, GET_MODE (x),
913 XEXP (x, 0), op0_mode);
914 break;
916 case RTX_COMPARE:
917 case RTX_COMM_COMPARE:
919 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
921 if (op_mode == VOIDmode)
922 op_mode = GET_MODE (XEXP (x, 1));
924 new = simplify_relational_operation (code, GET_MODE (x), op_mode,
925 XEXP (x, 0), XEXP (x, 1));
926 break;
929 case RTX_BIN_ARITH:
930 case RTX_COMM_ARITH:
931 new = simplify_binary_operation (code, GET_MODE (x),
932 XEXP (x, 0), XEXP (x, 1));
933 break;
935 case RTX_BITFIELD_OPS:
936 case RTX_TERNARY:
937 if (op0_mode == MAX_MACHINE_MODE)
938 abort ();
940 if (code == IF_THEN_ELSE)
942 rtx op0 = XEXP (x, 0);
944 if (COMPARISON_P (op0)
945 && GET_MODE (op0) == VOIDmode
946 && ! side_effects_p (op0)
947 && XEXP (op0, 0) == map->compare_src
948 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
950 /* We have compare of two VOIDmode constants for which
951 we recorded the comparison mode. */
952 rtx tem =
953 simplify_gen_relational (GET_CODE (op0), GET_MODE (op0),
954 map->compare_mode, XEXP (op0, 0),
955 XEXP (op0, 1));
957 if (GET_CODE (tem) != CONST_INT)
958 new = simplify_ternary_operation (code, GET_MODE (x),
959 op0_mode, tem, XEXP (x, 1),
960 XEXP (x, 2));
961 else if (tem == const0_rtx)
962 new = XEXP (x, 2);
963 else
964 new = XEXP (x, 1);
967 if (!new)
968 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
969 XEXP (x, 0), XEXP (x, 1),
970 XEXP (x, 2));
971 break;
973 default:
974 break;
977 if (new)
978 validate_change (insn, loc, new, 1);
981 /* Show that register modified no longer contain known constants. We are
982 called from note_stores with parts of the new insn. */
984 static void
985 mark_stores (rtx dest, rtx x ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
987 int regno = -1;
988 enum machine_mode mode = VOIDmode;
990 /* DEST is always the innermost thing set, except in the case of
991 SUBREGs of hard registers. */
993 if (REG_P (dest))
994 regno = REGNO (dest), mode = GET_MODE (dest);
995 else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
997 regno = REGNO (SUBREG_REG (dest));
998 if (regno < FIRST_PSEUDO_REGISTER)
999 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
1000 GET_MODE (SUBREG_REG (dest)),
1001 SUBREG_BYTE (dest),
1002 GET_MODE (dest));
1003 mode = GET_MODE (SUBREG_REG (dest));
1006 if (regno >= 0)
1008 unsigned int uregno = regno;
1009 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
1010 : uregno + hard_regno_nregs[uregno][mode] - 1);
1011 unsigned int i;
1013 /* Ignore virtual stack var or virtual arg register since those
1014 are handled separately. */
1015 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
1016 && uregno != VIRTUAL_STACK_VARS_REGNUM)
1017 for (i = uregno; i <= last_reg; i++)
1018 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
1019 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
1023 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
1024 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
1025 that it points to the node itself, thus indicating that the node is its
1026 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
1027 the given node is NULL, recursively descend the decl/block tree which
1028 it is the root of, and for each other ..._DECL or BLOCK node contained
1029 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
1030 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
1031 values to point to themselves. */
1033 static void
1034 set_block_origin_self (tree stmt)
1036 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
1038 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
1041 tree local_decl;
1043 for (local_decl = BLOCK_VARS (stmt);
1044 local_decl != NULL_TREE;
1045 local_decl = TREE_CHAIN (local_decl))
1046 set_decl_origin_self (local_decl); /* Potential recursion. */
1050 tree subblock;
1052 for (subblock = BLOCK_SUBBLOCKS (stmt);
1053 subblock != NULL_TREE;
1054 subblock = BLOCK_CHAIN (subblock))
1055 set_block_origin_self (subblock); /* Recurse. */
1060 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
1061 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
1062 node to so that it points to the node itself, thus indicating that the
1063 node represents its own (abstract) origin. Additionally, if the
1064 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
1065 the decl/block tree of which the given node is the root of, and for
1066 each other ..._DECL or BLOCK node contained therein whose
1067 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
1068 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
1069 point to themselves. */
1071 void
1072 set_decl_origin_self (tree decl)
1074 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
1076 DECL_ABSTRACT_ORIGIN (decl) = decl;
1077 if (TREE_CODE (decl) == FUNCTION_DECL)
1079 tree arg;
1081 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1082 DECL_ABSTRACT_ORIGIN (arg) = arg;
1083 if (DECL_INITIAL (decl) != NULL_TREE
1084 && DECL_INITIAL (decl) != error_mark_node)
1085 set_block_origin_self (DECL_INITIAL (decl));
1090 /* Given a pointer to some BLOCK node, and a boolean value to set the
1091 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
1092 the given block, and for all local decls and all local sub-blocks
1093 (recursively) which are contained therein. */
1095 static void
1096 set_block_abstract_flags (tree stmt, int setting)
1098 tree local_decl;
1099 tree subblock;
1101 BLOCK_ABSTRACT (stmt) = setting;
1103 for (local_decl = BLOCK_VARS (stmt);
1104 local_decl != NULL_TREE;
1105 local_decl = TREE_CHAIN (local_decl))
1106 set_decl_abstract_flags (local_decl, setting);
1108 for (subblock = BLOCK_SUBBLOCKS (stmt);
1109 subblock != NULL_TREE;
1110 subblock = BLOCK_CHAIN (subblock))
1111 set_block_abstract_flags (subblock, setting);
1114 /* Given a pointer to some ..._DECL node, and a boolean value to set the
1115 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
1116 given decl, and (in the case where the decl is a FUNCTION_DECL) also
1117 set the abstract flags for all of the parameters, local vars, local
1118 blocks and sub-blocks (recursively) to the same setting. */
1120 void
1121 set_decl_abstract_flags (tree decl, int setting)
1123 DECL_ABSTRACT (decl) = setting;
1124 if (TREE_CODE (decl) == FUNCTION_DECL)
1126 tree arg;
1128 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1129 DECL_ABSTRACT (arg) = setting;
1130 if (DECL_INITIAL (decl) != NULL_TREE
1131 && DECL_INITIAL (decl) != error_mark_node)
1132 set_block_abstract_flags (DECL_INITIAL (decl), setting);
1136 /* Functions to keep track of the values hard regs had at the start of
1137 the function. */
1140 get_hard_reg_initial_reg (struct function *fun, rtx reg)
1142 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1143 int i;
1145 if (ivs == 0)
1146 return NULL_RTX;
1148 for (i = 0; i < ivs->num_entries; i++)
1149 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1150 return ivs->entries[i].hard_reg;
1152 return NULL_RTX;
1156 has_func_hard_reg_initial_val (struct function *fun, rtx reg)
1158 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1159 int i;
1161 if (ivs == 0)
1162 return NULL_RTX;
1164 for (i = 0; i < ivs->num_entries; i++)
1165 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
1166 return ivs->entries[i].pseudo;
1168 return NULL_RTX;
1172 get_func_hard_reg_initial_val (struct function *fun, rtx reg)
1174 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1175 rtx rv = has_func_hard_reg_initial_val (fun, reg);
1177 if (rv)
1178 return rv;
1180 if (ivs == 0)
1182 fun->hard_reg_initial_vals = ggc_alloc (sizeof (initial_value_struct));
1183 ivs = fun->hard_reg_initial_vals;
1184 ivs->num_entries = 0;
1185 ivs->max_entries = 5;
1186 ivs->entries = ggc_alloc (5 * sizeof (initial_value_pair));
1189 if (ivs->num_entries >= ivs->max_entries)
1191 ivs->max_entries += 5;
1192 ivs->entries = ggc_realloc (ivs->entries,
1193 ivs->max_entries
1194 * sizeof (initial_value_pair));
1197 ivs->entries[ivs->num_entries].hard_reg = reg;
1198 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
1200 return ivs->entries[ivs->num_entries++].pseudo;
1204 get_hard_reg_initial_val (enum machine_mode mode, int regno)
1206 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1210 has_hard_reg_initial_val (enum machine_mode mode, int regno)
1212 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1215 void
1216 emit_initial_value_sets (void)
1218 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1219 int i;
1220 rtx seq;
1222 if (ivs == 0)
1223 return;
1225 start_sequence ();
1226 for (i = 0; i < ivs->num_entries; i++)
1227 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1228 seq = get_insns ();
1229 end_sequence ();
1231 emit_insn_after (seq, entry_of_function ());
1234 /* If the backend knows where to allocate pseudos for hard
1235 register initial values, register these allocations now. */
1236 void
1237 allocate_initial_values (rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED)
1239 #ifdef ALLOCATE_INITIAL_VALUE
1240 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1241 int i;
1243 if (ivs == 0)
1244 return;
1246 for (i = 0; i < ivs->num_entries; i++)
1248 int regno = REGNO (ivs->entries[i].pseudo);
1249 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
1251 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
1252 ; /* Do nothing. */
1253 else if (MEM_P (x))
1254 reg_equiv_memory_loc[regno] = x;
1255 else if (REG_P (x))
1257 reg_renumber[regno] = REGNO (x);
1258 /* Poke the regno right into regno_reg_rtx
1259 so that even fixed regs are accepted. */
1260 REGNO (ivs->entries[i].pseudo) = REGNO (x);
1262 else abort ();
1264 #endif
1267 #include "gt-integrate.h"