1 /* Perform simple optimizations to clean up the result of reload.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
38 #include "cfgcleanup.h"
41 #include "tree-pass.h"
44 static int reload_cse_noop_set_p (rtx
);
45 static bool reload_cse_simplify (rtx_insn
*, rtx
);
46 static void reload_cse_regs_1 (void);
47 static int reload_cse_simplify_set (rtx
, rtx_insn
*);
48 static int reload_cse_simplify_operands (rtx_insn
*, rtx
);
50 static void reload_combine (void);
51 static void reload_combine_note_use (rtx
*, rtx_insn
*, int, rtx
);
52 static void reload_combine_note_store (rtx
, const_rtx
, void *);
54 static bool reload_cse_move2add (rtx_insn
*);
55 static void move2add_note_store (rtx
, const_rtx
, void *);
57 /* Call cse / combine like post-reload optimization phases.
58 FIRST is the first instruction. */
61 reload_cse_regs (rtx_insn
*first ATTRIBUTE_UNUSED
)
66 moves_converted
= reload_cse_move2add (first
);
67 if (flag_expensive_optimizations
)
75 /* See whether a single set SET is a noop. */
77 reload_cse_noop_set_p (rtx set
)
79 if (cselib_reg_set_mode (SET_DEST (set
)) != GET_MODE (SET_DEST (set
)))
82 return rtx_equal_for_cselib_p (SET_DEST (set
), SET_SRC (set
));
85 /* Try to simplify INSN. Return true if the CFG may have changed. */
87 reload_cse_simplify (rtx_insn
*insn
, rtx testreg
)
89 rtx body
= PATTERN (insn
);
90 basic_block insn_bb
= BLOCK_FOR_INSN (insn
);
91 unsigned insn_bb_succs
= EDGE_COUNT (insn_bb
->succs
);
93 if (GET_CODE (body
) == SET
)
97 /* Simplify even if we may think it is a no-op.
98 We may think a memory load of a value smaller than WORD_SIZE
99 is redundant because we haven't taken into account possible
100 implicit extension. reload_cse_simplify_set() will bring
101 this out, so it's safer to simplify before we delete. */
102 count
+= reload_cse_simplify_set (body
, insn
);
104 if (!count
&& reload_cse_noop_set_p (body
))
106 if (check_for_inc_dec (insn
))
107 delete_insn_and_edges (insn
);
108 /* We're done with this insn. */
113 apply_change_group ();
115 reload_cse_simplify_operands (insn
, testreg
);
117 else if (GET_CODE (body
) == PARALLEL
)
121 rtx value
= NULL_RTX
;
123 /* Registers mentioned in the clobber list for an asm cannot be reused
124 within the body of the asm. Invalidate those registers now so that
125 we don't try to substitute values for them. */
126 if (asm_noperands (body
) >= 0)
128 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
130 rtx part
= XVECEXP (body
, 0, i
);
131 if (GET_CODE (part
) == CLOBBER
&& REG_P (XEXP (part
, 0)))
132 cselib_invalidate_rtx (XEXP (part
, 0));
136 /* If every action in a PARALLEL is a noop, we can delete
137 the entire PARALLEL. */
138 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
140 rtx part
= XVECEXP (body
, 0, i
);
141 if (GET_CODE (part
) == SET
)
143 if (! reload_cse_noop_set_p (part
))
145 if (REG_P (SET_DEST (part
))
146 && REG_FUNCTION_VALUE_P (SET_DEST (part
)))
150 value
= SET_DEST (part
);
153 else if (GET_CODE (part
) != CLOBBER
154 && GET_CODE (part
) != USE
)
160 if (check_for_inc_dec (insn
))
161 delete_insn_and_edges (insn
);
162 /* We're done with this insn. */
166 /* It's not a no-op, but we can try to simplify it. */
167 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
168 if (GET_CODE (XVECEXP (body
, 0, i
)) == SET
)
169 count
+= reload_cse_simplify_set (XVECEXP (body
, 0, i
), insn
);
172 apply_change_group ();
174 reload_cse_simplify_operands (insn
, testreg
);
178 return (EDGE_COUNT (insn_bb
->succs
) != insn_bb_succs
);
181 /* Do a very simple CSE pass over the hard registers.
183 This function detects no-op moves where we happened to assign two
184 different pseudo-registers to the same hard register, and then
185 copied one to the other. Reload will generate a useless
186 instruction copying a register to itself.
188 This function also detects cases where we load a value from memory
189 into two different registers, and (if memory is more expensive than
190 registers) changes it to simply copy the first register into the
193 Another optimization is performed that scans the operands of each
194 instruction to see whether the value is already available in a
195 hard register. It then replaces the operand with the hard register
196 if possible, much like an optional reload would. */
199 reload_cse_regs_1 (void)
201 bool cfg_changed
= false;
204 rtx testreg
= gen_rtx_REG (word_mode
, LAST_VIRTUAL_REGISTER
+ 1);
206 cselib_init (CSELIB_RECORD_MEMORY
);
207 init_alias_analysis ();
209 FOR_EACH_BB_FN (bb
, cfun
)
210 FOR_BB_INSNS (bb
, insn
)
213 cfg_changed
|= reload_cse_simplify (insn
, testreg
);
215 cselib_process_insn (insn
);
219 end_alias_analysis ();
225 /* Try to simplify a single SET instruction. SET is the set pattern.
226 INSN is the instruction it came from.
227 This function only handles one case: if we set a register to a value
228 which is not a register, we try to find that value in some other register
229 and change the set into a register copy. */
232 reload_cse_simplify_set (rtx set
, rtx_insn
*insn
)
240 struct elt_loc_list
*l
;
241 enum rtx_code extend_op
= UNKNOWN
;
242 bool speed
= optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn
));
244 dreg
= true_regnum (SET_DEST (set
));
249 if (side_effects_p (src
) || true_regnum (src
) >= 0)
252 dclass
= REGNO_REG_CLASS (dreg
);
254 /* When replacing a memory with a register, we need to honor assumptions
255 that combine made wrt the contents of sign bits. We'll do this by
256 generating an extend instruction instead of a reg->reg copy. Thus
257 the destination must be a register that we can widen. */
259 && (extend_op
= load_extend_op (GET_MODE (src
))) != UNKNOWN
260 && !REG_P (SET_DEST (set
)))
263 val
= cselib_lookup (src
, GET_MODE (SET_DEST (set
)), 0, VOIDmode
);
267 /* If memory loads are cheaper than register copies, don't change them. */
269 old_cost
= memory_move_cost (GET_MODE (src
), dclass
, true);
270 else if (REG_P (src
))
271 old_cost
= register_move_cost (GET_MODE (src
),
272 REGNO_REG_CLASS (REGNO (src
)), dclass
);
274 old_cost
= set_src_cost (src
, GET_MODE (SET_DEST (set
)), speed
);
276 for (l
= val
->locs
; l
; l
= l
->next
)
278 rtx this_rtx
= l
->loc
;
281 if (CONSTANT_P (this_rtx
) && ! references_value_p (this_rtx
, 0))
283 if (extend_op
!= UNKNOWN
)
287 if (!CONST_SCALAR_INT_P (this_rtx
))
293 result
= wide_int::from (rtx_mode_t (this_rtx
,
295 BITS_PER_WORD
, UNSIGNED
);
298 result
= wide_int::from (rtx_mode_t (this_rtx
,
300 BITS_PER_WORD
, SIGNED
);
305 this_rtx
= immed_wide_int_const (result
, word_mode
);
308 this_cost
= set_src_cost (this_rtx
, GET_MODE (SET_DEST (set
)), speed
);
310 else if (REG_P (this_rtx
))
312 if (extend_op
!= UNKNOWN
)
314 this_rtx
= gen_rtx_fmt_e (extend_op
, word_mode
, this_rtx
);
315 this_cost
= set_src_cost (this_rtx
, word_mode
, speed
);
318 this_cost
= register_move_cost (GET_MODE (this_rtx
),
319 REGNO_REG_CLASS (REGNO (this_rtx
)),
325 /* If equal costs, prefer registers over anything else. That
326 tends to lead to smaller instructions on some machines. */
327 if (this_cost
< old_cost
328 || (this_cost
== old_cost
330 && !REG_P (SET_SRC (set
))))
332 if (extend_op
!= UNKNOWN
333 #ifdef CANNOT_CHANGE_MODE_CLASS
334 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set
)),
336 REGNO_REG_CLASS (REGNO (SET_DEST (set
))))
340 rtx wide_dest
= gen_rtx_REG (word_mode
, REGNO (SET_DEST (set
)));
341 ORIGINAL_REGNO (wide_dest
) = ORIGINAL_REGNO (SET_DEST (set
));
342 validate_change (insn
, &SET_DEST (set
), wide_dest
, 1);
345 validate_unshare_change (insn
, &SET_SRC (set
), this_rtx
, 1);
346 old_cost
= this_cost
, did_change
= 1;
353 /* Try to replace operands in INSN with equivalent values that are already
354 in registers. This can be viewed as optional reloading.
356 For each non-register operand in the insn, see if any hard regs are
357 known to be equivalent to that operand. Record the alternatives which
358 can accept these hard registers. Among all alternatives, select the
359 ones which are better or equal to the one currently matching, where
360 "better" is in terms of '?' and '!' constraints. Among the remaining
361 alternatives, select the one which replaces most operands with
365 reload_cse_simplify_operands (rtx_insn
*insn
, rtx testreg
)
369 /* For each operand, all registers that are equivalent to it. */
370 HARD_REG_SET equiv_regs
[MAX_RECOG_OPERANDS
];
372 const char *constraints
[MAX_RECOG_OPERANDS
];
374 /* Vector recording how bad an alternative is. */
375 int *alternative_reject
;
376 /* Vector recording how many registers can be introduced by choosing
378 int *alternative_nregs
;
379 /* Array of vectors recording, for each operand and each alternative,
380 which hard register to substitute, or -1 if the operand should be
382 int *op_alt_regno
[MAX_RECOG_OPERANDS
];
383 /* Array of alternatives, sorted in order of decreasing desirability. */
384 int *alternative_order
;
386 extract_constrain_insn (insn
);
388 if (recog_data
.n_alternatives
== 0 || recog_data
.n_operands
== 0)
391 alternative_reject
= XALLOCAVEC (int, recog_data
.n_alternatives
);
392 alternative_nregs
= XALLOCAVEC (int, recog_data
.n_alternatives
);
393 alternative_order
= XALLOCAVEC (int, recog_data
.n_alternatives
);
394 memset (alternative_reject
, 0, recog_data
.n_alternatives
* sizeof (int));
395 memset (alternative_nregs
, 0, recog_data
.n_alternatives
* sizeof (int));
397 /* For each operand, find out which regs are equivalent. */
398 for (i
= 0; i
< recog_data
.n_operands
; i
++)
401 struct elt_loc_list
*l
;
404 CLEAR_HARD_REG_SET (equiv_regs
[i
]);
406 /* cselib blows up on CODE_LABELs. Trying to fix that doesn't seem
407 right, so avoid the problem here. Likewise if we have a constant
408 and the insn pattern doesn't tell us the mode we need. */
409 if (LABEL_P (recog_data
.operand
[i
])
410 || (CONSTANT_P (recog_data
.operand
[i
])
411 && recog_data
.operand_mode
[i
] == VOIDmode
))
414 op
= recog_data
.operand
[i
];
415 if (MEM_P (op
) && load_extend_op (GET_MODE (op
)) != UNKNOWN
)
417 rtx set
= single_set (insn
);
419 /* We might have multiple sets, some of which do implicit
420 extension. Punt on this for now. */
423 /* If the destination is also a MEM or a STRICT_LOW_PART, no
425 Also, if there is an explicit extension, we don't have to
426 worry about an implicit one. */
427 else if (MEM_P (SET_DEST (set
))
428 || GET_CODE (SET_DEST (set
)) == STRICT_LOW_PART
429 || GET_CODE (SET_SRC (set
)) == ZERO_EXTEND
430 || GET_CODE (SET_SRC (set
)) == SIGN_EXTEND
)
431 ; /* Continue ordinary processing. */
432 #ifdef CANNOT_CHANGE_MODE_CLASS
433 /* If the register cannot change mode to word_mode, it follows that
434 it cannot have been used in word_mode. */
435 else if (REG_P (SET_DEST (set
))
436 && CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set
)),
438 REGNO_REG_CLASS (REGNO (SET_DEST (set
)))))
439 ; /* Continue ordinary processing. */
441 /* If this is a straight load, make the extension explicit. */
442 else if (REG_P (SET_DEST (set
))
443 && recog_data
.n_operands
== 2
444 && SET_SRC (set
) == op
445 && SET_DEST (set
) == recog_data
.operand
[1-i
])
447 validate_change (insn
, recog_data
.operand_loc
[i
],
448 gen_rtx_fmt_e (load_extend_op (GET_MODE (op
)),
451 validate_change (insn
, recog_data
.operand_loc
[1-i
],
452 gen_rtx_REG (word_mode
, REGNO (SET_DEST (set
))),
454 if (! apply_change_group ())
456 return reload_cse_simplify_operands (insn
, testreg
);
459 /* ??? There might be arithmetic operations with memory that are
460 safe to optimize, but is it worth the trouble? */
464 if (side_effects_p (op
))
466 v
= cselib_lookup (op
, recog_data
.operand_mode
[i
], 0, VOIDmode
);
470 for (l
= v
->locs
; l
; l
= l
->next
)
472 SET_HARD_REG_BIT (equiv_regs
[i
], REGNO (l
->loc
));
475 alternative_mask preferred
= get_preferred_alternatives (insn
);
476 for (i
= 0; i
< recog_data
.n_operands
; i
++)
482 op_alt_regno
[i
] = XALLOCAVEC (int, recog_data
.n_alternatives
);
483 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
484 op_alt_regno
[i
][j
] = -1;
486 p
= constraints
[i
] = recog_data
.constraints
[i
];
487 mode
= recog_data
.operand_mode
[i
];
489 /* Add the reject values for each alternative given by the constraints
498 alternative_reject
[j
] += 3;
500 alternative_reject
[j
] += 300;
503 /* We won't change operands which are already registers. We
504 also don't want to modify output operands. */
505 regno
= true_regnum (recog_data
.operand
[i
]);
507 || constraints
[i
][0] == '='
508 || constraints
[i
][0] == '+')
511 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
513 enum reg_class rclass
= NO_REGS
;
515 if (! TEST_HARD_REG_BIT (equiv_regs
[i
], regno
))
518 set_mode_and_regno (testreg
, mode
, regno
);
520 /* We found a register equal to this operand. Now look for all
521 alternatives that can accept this register and have not been
522 assigned a register they can use yet. */
532 rclass
= reg_class_subunion
[rclass
][GENERAL_REGS
];
537 = (reg_class_subunion
539 [reg_class_for_constraint (lookup_constraint (p
))]);
543 /* See if REGNO fits this alternative, and set it up as the
544 replacement register if we don't have one for this
545 alternative yet and the operand being replaced is not
546 a cheap CONST_INT. */
547 if (op_alt_regno
[i
][j
] == -1
548 && TEST_BIT (preferred
, j
)
549 && reg_fits_class_p (testreg
, rclass
, 0, mode
)
550 && (!CONST_INT_P (recog_data
.operand
[i
])
551 || (set_src_cost (recog_data
.operand
[i
], mode
,
552 optimize_bb_for_speed_p
553 (BLOCK_FOR_INSN (insn
)))
554 > set_src_cost (testreg
, mode
,
555 optimize_bb_for_speed_p
556 (BLOCK_FOR_INSN (insn
))))))
558 alternative_nregs
[j
]++;
559 op_alt_regno
[i
][j
] = regno
;
565 p
+= CONSTRAINT_LEN (c
, p
);
573 /* Record all alternatives which are better or equal to the currently
574 matching one in the alternative_order array. */
575 for (i
= j
= 0; i
< recog_data
.n_alternatives
; i
++)
576 if (alternative_reject
[i
] <= alternative_reject
[which_alternative
])
577 alternative_order
[j
++] = i
;
578 recog_data
.n_alternatives
= j
;
580 /* Sort it. Given a small number of alternatives, a dumb algorithm
581 won't hurt too much. */
582 for (i
= 0; i
< recog_data
.n_alternatives
- 1; i
++)
585 int best_reject
= alternative_reject
[alternative_order
[i
]];
586 int best_nregs
= alternative_nregs
[alternative_order
[i
]];
588 for (j
= i
+ 1; j
< recog_data
.n_alternatives
; j
++)
590 int this_reject
= alternative_reject
[alternative_order
[j
]];
591 int this_nregs
= alternative_nregs
[alternative_order
[j
]];
593 if (this_reject
< best_reject
594 || (this_reject
== best_reject
&& this_nregs
> best_nregs
))
597 best_reject
= this_reject
;
598 best_nregs
= this_nregs
;
602 std::swap (alternative_order
[best
], alternative_order
[i
]);
605 /* Substitute the operands as determined by op_alt_regno for the best
607 j
= alternative_order
[0];
609 for (i
= 0; i
< recog_data
.n_operands
; i
++)
611 machine_mode mode
= recog_data
.operand_mode
[i
];
612 if (op_alt_regno
[i
][j
] == -1)
615 validate_change (insn
, recog_data
.operand_loc
[i
],
616 gen_rtx_REG (mode
, op_alt_regno
[i
][j
]), 1);
619 for (i
= recog_data
.n_dups
- 1; i
>= 0; i
--)
621 int op
= recog_data
.dup_num
[i
];
622 machine_mode mode
= recog_data
.operand_mode
[op
];
624 if (op_alt_regno
[op
][j
] == -1)
627 validate_change (insn
, recog_data
.dup_loc
[i
],
628 gen_rtx_REG (mode
, op_alt_regno
[op
][j
]), 1);
631 return apply_change_group ();
634 /* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
636 This code might also be useful when reload gave up on reg+reg addressing
637 because of clashes between the return register and INDEX_REG_CLASS. */
639 /* The maximum number of uses of a register we can keep track of to
640 replace them with reg+reg addressing. */
641 #define RELOAD_COMBINE_MAX_USES 16
643 /* Describes a recorded use of a register. */
646 /* The insn where a register has been used. */
648 /* Points to the memory reference enclosing the use, if any, NULL_RTX
651 /* Location of the register within INSN. */
653 /* The reverse uid of the insn. */
657 /* If the register is used in some unknown fashion, USE_INDEX is negative.
658 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
659 indicates where it is first set or clobbered.
660 Otherwise, USE_INDEX is the index of the last encountered use of the
661 register (which is first among these we have seen since we scan backwards).
662 USE_RUID indicates the first encountered, i.e. last, of these uses.
663 If ALL_OFFSETS_MATCH is true, all encountered uses were inside a PLUS
664 with a constant offset; OFFSET contains this constant in that case.
665 STORE_RUID is always meaningful if we only want to use a value in a
666 register in a different place: it denotes the next insn in the insn
667 stream (i.e. the last encountered) that sets or clobbers the register.
668 REAL_STORE_RUID is similar, but clobbers are ignored when updating it. */
671 struct reg_use reg_use
[RELOAD_COMBINE_MAX_USES
];
677 bool all_offsets_match
;
678 } reg_state
[FIRST_PSEUDO_REGISTER
];
680 /* Reverse linear uid. This is increased in reload_combine while scanning
681 the instructions from last to first. It is used to set last_label_ruid
682 and the store_ruid / use_ruid fields in reg_state. */
683 static int reload_combine_ruid
;
685 /* The RUID of the last label we encountered in reload_combine. */
686 static int last_label_ruid
;
688 /* The RUID of the last jump we encountered in reload_combine. */
689 static int last_jump_ruid
;
691 /* The register numbers of the first and last index register. A value of
692 -1 in LAST_INDEX_REG indicates that we've previously computed these
693 values and found no suitable index registers. */
694 static int first_index_reg
= -1;
695 static int last_index_reg
;
697 #define LABEL_LIVE(LABEL) \
698 (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno])
700 /* Subroutine of reload_combine_split_ruids, called to fix up a single
701 ruid pointed to by *PRUID if it is higher than SPLIT_RUID. */
704 reload_combine_split_one_ruid (int *pruid
, int split_ruid
)
706 if (*pruid
> split_ruid
)
710 /* Called when we insert a new insn in a position we've already passed in
711 the scan. Examine all our state, increasing all ruids that are higher
712 than SPLIT_RUID by one in order to make room for a new insn. */
715 reload_combine_split_ruids (int split_ruid
)
719 reload_combine_split_one_ruid (&reload_combine_ruid
, split_ruid
);
720 reload_combine_split_one_ruid (&last_label_ruid
, split_ruid
);
721 reload_combine_split_one_ruid (&last_jump_ruid
, split_ruid
);
723 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
725 int j
, idx
= reg_state
[i
].use_index
;
726 reload_combine_split_one_ruid (®_state
[i
].use_ruid
, split_ruid
);
727 reload_combine_split_one_ruid (®_state
[i
].store_ruid
, split_ruid
);
728 reload_combine_split_one_ruid (®_state
[i
].real_store_ruid
,
732 for (j
= idx
; j
< RELOAD_COMBINE_MAX_USES
; j
++)
734 reload_combine_split_one_ruid (®_state
[i
].reg_use
[j
].ruid
,
740 /* Called when we are about to rescan a previously encountered insn with
741 reload_combine_note_use after modifying some part of it. This clears all
742 information about uses in that particular insn. */
745 reload_combine_purge_insn_uses (rtx_insn
*insn
)
749 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
751 int j
, k
, idx
= reg_state
[i
].use_index
;
754 j
= k
= RELOAD_COMBINE_MAX_USES
;
757 if (reg_state
[i
].reg_use
[j
].insn
!= insn
)
761 reg_state
[i
].reg_use
[k
] = reg_state
[i
].reg_use
[j
];
764 reg_state
[i
].use_index
= k
;
768 /* Called when we need to forget about all uses of REGNO after an insn
769 which is identified by RUID. */
772 reload_combine_purge_reg_uses_after_ruid (unsigned regno
, int ruid
)
774 int j
, k
, idx
= reg_state
[regno
].use_index
;
777 j
= k
= RELOAD_COMBINE_MAX_USES
;
780 if (reg_state
[regno
].reg_use
[j
].ruid
>= ruid
)
784 reg_state
[regno
].reg_use
[k
] = reg_state
[regno
].reg_use
[j
];
787 reg_state
[regno
].use_index
= k
;
790 /* Find the use of REGNO with the ruid that is highest among those
791 lower than RUID_LIMIT, and return it if it is the only use of this
792 reg in the insn. Return NULL otherwise. */
794 static struct reg_use
*
795 reload_combine_closest_single_use (unsigned regno
, int ruid_limit
)
797 int i
, best_ruid
= 0;
798 int use_idx
= reg_state
[regno
].use_index
;
799 struct reg_use
*retval
;
804 for (i
= use_idx
; i
< RELOAD_COMBINE_MAX_USES
; i
++)
806 struct reg_use
*use
= reg_state
[regno
].reg_use
+ i
;
807 int this_ruid
= use
->ruid
;
808 if (this_ruid
>= ruid_limit
)
810 if (this_ruid
> best_ruid
)
812 best_ruid
= this_ruid
;
815 else if (this_ruid
== best_ruid
)
818 if (last_label_ruid
>= best_ruid
)
823 /* After we've moved an add insn, fix up any debug insns that occur
824 between the old location of the add and the new location. REG is
825 the destination register of the add insn; REPLACEMENT is the
826 SET_SRC of the add. FROM and TO specify the range in which we
827 should make this change on debug insns. */
830 fixup_debug_insns (rtx reg
, rtx replacement
, rtx_insn
*from
, rtx_insn
*to
)
833 for (insn
= from
; insn
!= to
; insn
= NEXT_INSN (insn
))
837 if (!DEBUG_INSN_P (insn
))
840 t
= INSN_VAR_LOCATION_LOC (insn
);
841 t
= simplify_replace_rtx (t
, reg
, replacement
);
842 validate_change (insn
, &INSN_VAR_LOCATION_LOC (insn
), t
, 0);
846 /* Subroutine of reload_combine_recognize_const_pattern. Try to replace REG
847 with SRC in the insn described by USE, taking costs into account. Return
848 true if we made the replacement. */
851 try_replace_in_use (struct reg_use
*use
, rtx reg
, rtx src
)
853 rtx_insn
*use_insn
= use
->insn
;
854 rtx mem
= use
->containing_mem
;
855 bool speed
= optimize_bb_for_speed_p (BLOCK_FOR_INSN (use_insn
));
859 addr_space_t as
= MEM_ADDR_SPACE (mem
);
860 rtx oldaddr
= XEXP (mem
, 0);
861 rtx newaddr
= NULL_RTX
;
862 int old_cost
= address_cost (oldaddr
, GET_MODE (mem
), as
, speed
);
865 newaddr
= simplify_replace_rtx (oldaddr
, reg
, src
);
866 if (memory_address_addr_space_p (GET_MODE (mem
), newaddr
, as
))
868 XEXP (mem
, 0) = newaddr
;
869 new_cost
= address_cost (newaddr
, GET_MODE (mem
), as
, speed
);
870 XEXP (mem
, 0) = oldaddr
;
871 if (new_cost
<= old_cost
872 && validate_change (use_insn
,
873 &XEXP (mem
, 0), newaddr
, 0))
879 rtx new_set
= single_set (use_insn
);
881 && REG_P (SET_DEST (new_set
))
882 && GET_CODE (SET_SRC (new_set
)) == PLUS
883 && REG_P (XEXP (SET_SRC (new_set
), 0))
884 && CONSTANT_P (XEXP (SET_SRC (new_set
), 1)))
887 machine_mode mode
= GET_MODE (SET_DEST (new_set
));
888 int old_cost
= set_src_cost (SET_SRC (new_set
), mode
, speed
);
890 gcc_assert (rtx_equal_p (XEXP (SET_SRC (new_set
), 0), reg
));
891 new_src
= simplify_replace_rtx (SET_SRC (new_set
), reg
, src
);
893 if (set_src_cost (new_src
, mode
, speed
) <= old_cost
894 && validate_change (use_insn
, &SET_SRC (new_set
),
902 /* Called by reload_combine when scanning INSN. This function tries to detect
903 patterns where a constant is added to a register, and the result is used
905 Return true if no further processing is needed on INSN; false if it wasn't
906 recognized and should be handled normally. */
909 reload_combine_recognize_const_pattern (rtx_insn
*insn
)
911 int from_ruid
= reload_combine_ruid
;
912 rtx set
, pat
, reg
, src
, addreg
;
916 rtx_insn
*add_moved_after_insn
= NULL
;
917 int add_moved_after_ruid
= 0;
918 int clobbered_regno
= -1;
920 set
= single_set (insn
);
924 reg
= SET_DEST (set
);
927 || REG_NREGS (reg
) != 1
928 || GET_MODE (reg
) != Pmode
929 || reg
== stack_pointer_rtx
)
934 /* We look for a REG1 = REG2 + CONSTANT insn, followed by either
935 uses of REG1 inside an address, or inside another add insn. If
936 possible and profitable, merge the addition into subsequent
938 if (GET_CODE (src
) != PLUS
939 || !REG_P (XEXP (src
, 0))
940 || !CONSTANT_P (XEXP (src
, 1)))
943 addreg
= XEXP (src
, 0);
944 must_move_add
= rtx_equal_p (reg
, addreg
);
946 pat
= PATTERN (insn
);
947 if (must_move_add
&& set
!= pat
)
949 /* We have to be careful when moving the add; apart from the
950 single_set there may also be clobbers. Recognize one special
951 case, that of one clobber alongside the set (likely a clobber
952 of the CC register). */
953 gcc_assert (GET_CODE (PATTERN (insn
)) == PARALLEL
);
954 if (XVECLEN (pat
, 0) != 2 || XVECEXP (pat
, 0, 0) != set
955 || GET_CODE (XVECEXP (pat
, 0, 1)) != CLOBBER
956 || !REG_P (XEXP (XVECEXP (pat
, 0, 1), 0)))
958 clobbered_regno
= REGNO (XEXP (XVECEXP (pat
, 0, 1), 0));
963 use
= reload_combine_closest_single_use (regno
, from_ruid
);
966 /* Start the search for the next use from here. */
967 from_ruid
= use
->ruid
;
969 if (use
&& GET_MODE (*use
->usep
) == Pmode
)
971 bool delete_add
= false;
972 rtx_insn
*use_insn
= use
->insn
;
973 int use_ruid
= use
->ruid
;
975 /* Avoid moving the add insn past a jump. */
976 if (must_move_add
&& use_ruid
<= last_jump_ruid
)
979 /* If the add clobbers another hard reg in parallel, don't move
980 it past a real set of this hard reg. */
981 if (must_move_add
&& clobbered_regno
>= 0
982 && reg_state
[clobbered_regno
].real_store_ruid
>= use_ruid
)
985 /* Do not separate cc0 setter and cc0 user on HAVE_cc0 targets. */
986 if (HAVE_cc0
&& must_move_add
&& sets_cc0_p (PATTERN (use_insn
)))
989 gcc_assert (reg_state
[regno
].store_ruid
<= use_ruid
);
990 /* Avoid moving a use of ADDREG past a point where it is stored. */
991 if (reg_state
[REGNO (addreg
)].store_ruid
> use_ruid
)
994 /* We also must not move the addition past an insn that sets
995 the same register, unless we can combine two add insns. */
996 if (must_move_add
&& reg_state
[regno
].store_ruid
== use_ruid
)
998 if (use
->containing_mem
== NULL_RTX
)
1004 if (try_replace_in_use (use
, reg
, src
))
1006 reload_combine_purge_insn_uses (use_insn
);
1007 reload_combine_note_use (&PATTERN (use_insn
), use_insn
,
1008 use_ruid
, NULL_RTX
);
1012 fixup_debug_insns (reg
, src
, insn
, use_insn
);
1018 add_moved_after_insn
= use_insn
;
1019 add_moved_after_ruid
= use_ruid
;
1024 /* If we get here, we couldn't handle this use. */
1030 if (!must_move_add
|| add_moved_after_insn
== NULL_RTX
)
1031 /* Process the add normally. */
1034 fixup_debug_insns (reg
, src
, insn
, add_moved_after_insn
);
1036 reorder_insns (insn
, insn
, add_moved_after_insn
);
1037 reload_combine_purge_reg_uses_after_ruid (regno
, add_moved_after_ruid
);
1038 reload_combine_split_ruids (add_moved_after_ruid
- 1);
1039 reload_combine_note_use (&PATTERN (insn
), insn
,
1040 add_moved_after_ruid
, NULL_RTX
);
1041 reg_state
[regno
].store_ruid
= add_moved_after_ruid
;
1046 /* Called by reload_combine when scanning INSN. Try to detect a pattern we
1047 can handle and improve. Return true if no further processing is needed on
1048 INSN; false if it wasn't recognized and should be handled normally. */
1051 reload_combine_recognize_pattern (rtx_insn
*insn
)
1055 set
= single_set (insn
);
1056 if (set
== NULL_RTX
)
1059 reg
= SET_DEST (set
);
1060 src
= SET_SRC (set
);
1061 if (!REG_P (reg
) || REG_NREGS (reg
) != 1)
1064 unsigned int regno
= REGNO (reg
);
1065 machine_mode mode
= GET_MODE (reg
);
1067 if (reg_state
[regno
].use_index
< 0
1068 || reg_state
[regno
].use_index
>= RELOAD_COMBINE_MAX_USES
)
1071 for (int i
= reg_state
[regno
].use_index
;
1072 i
< RELOAD_COMBINE_MAX_USES
; i
++)
1074 struct reg_use
*use
= reg_state
[regno
].reg_use
+ i
;
1075 if (GET_MODE (*use
->usep
) != mode
)
1079 /* Look for (set (REGX) (CONST_INT))
1080 (set (REGX) (PLUS (REGX) (REGY)))
1082 ... (MEM (REGX)) ...
1084 (set (REGZ) (CONST_INT))
1086 ... (MEM (PLUS (REGZ) (REGY)))... .
1088 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
1089 and that we know all uses of REGX before it dies.
1090 Also, explicitly check that REGX != REGY; our life information
1091 does not yet show whether REGY changes in this insn. */
1093 if (GET_CODE (src
) == PLUS
1094 && reg_state
[regno
].all_offsets_match
1095 && last_index_reg
!= -1
1096 && REG_P (XEXP (src
, 1))
1097 && rtx_equal_p (XEXP (src
, 0), reg
)
1098 && !rtx_equal_p (XEXP (src
, 1), reg
)
1099 && last_label_ruid
< reg_state
[regno
].use_ruid
)
1101 rtx base
= XEXP (src
, 1);
1102 rtx_insn
*prev
= prev_nonnote_nondebug_insn (insn
);
1103 rtx prev_set
= prev
? single_set (prev
) : NULL_RTX
;
1104 rtx index_reg
= NULL_RTX
;
1105 rtx reg_sum
= NULL_RTX
;
1108 /* Now we need to set INDEX_REG to an index register (denoted as
1109 REGZ in the illustration above) and REG_SUM to the expression
1110 register+register that we want to use to substitute uses of REG
1111 (typically in MEMs) with. First check REG and BASE for being
1112 index registers; we can use them even if they are not dead. */
1113 if (TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
], regno
)
1114 || TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
],
1122 /* Otherwise, look for a free index register. Since we have
1123 checked above that neither REG nor BASE are index registers,
1124 if we find anything at all, it will be different from these
1126 for (i
= first_index_reg
; i
<= last_index_reg
; i
++)
1128 if (TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
], i
)
1129 && reg_state
[i
].use_index
== RELOAD_COMBINE_MAX_USES
1130 && reg_state
[i
].store_ruid
<= reg_state
[regno
].use_ruid
1131 && (call_used_regs
[i
] || df_regs_ever_live_p (i
))
1132 && (!frame_pointer_needed
|| i
!= HARD_FRAME_POINTER_REGNUM
)
1133 && !fixed_regs
[i
] && !global_regs
[i
]
1134 && hard_regno_nregs
[i
][GET_MODE (reg
)] == 1
1135 && targetm
.hard_regno_scratch_ok (i
))
1137 index_reg
= gen_rtx_REG (GET_MODE (reg
), i
);
1138 reg_sum
= gen_rtx_PLUS (GET_MODE (reg
), index_reg
, base
);
1144 /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
1145 (REGY), i.e. BASE, is not clobbered before the last use we'll
1149 && CONST_INT_P (SET_SRC (prev_set
))
1150 && rtx_equal_p (SET_DEST (prev_set
), reg
)
1151 && (reg_state
[REGNO (base
)].store_ruid
1152 <= reg_state
[regno
].use_ruid
))
1154 /* Change destination register and, if necessary, the constant
1155 value in PREV, the constant loading instruction. */
1156 validate_change (prev
, &SET_DEST (prev_set
), index_reg
, 1);
1157 if (reg_state
[regno
].offset
!= const0_rtx
)
1158 validate_change (prev
,
1159 &SET_SRC (prev_set
),
1160 GEN_INT (INTVAL (SET_SRC (prev_set
))
1161 + INTVAL (reg_state
[regno
].offset
)),
1164 /* Now for every use of REG that we have recorded, replace REG
1166 for (i
= reg_state
[regno
].use_index
;
1167 i
< RELOAD_COMBINE_MAX_USES
; i
++)
1168 validate_unshare_change (reg_state
[regno
].reg_use
[i
].insn
,
1169 reg_state
[regno
].reg_use
[i
].usep
,
1170 /* Each change must have its own
1174 if (apply_change_group ())
1176 struct reg_use
*lowest_ruid
= NULL
;
1178 /* For every new use of REG_SUM, we have to record the use
1179 of BASE therein, i.e. operand 1. */
1180 for (i
= reg_state
[regno
].use_index
;
1181 i
< RELOAD_COMBINE_MAX_USES
; i
++)
1183 struct reg_use
*use
= reg_state
[regno
].reg_use
+ i
;
1184 reload_combine_note_use (&XEXP (*use
->usep
, 1), use
->insn
,
1185 use
->ruid
, use
->containing_mem
);
1186 if (lowest_ruid
== NULL
|| use
->ruid
< lowest_ruid
->ruid
)
1190 fixup_debug_insns (reg
, reg_sum
, insn
, lowest_ruid
->insn
);
1192 /* Delete the reg-reg addition. */
1195 if (reg_state
[regno
].offset
!= const0_rtx
1196 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
1198 && remove_reg_equal_equiv_notes (prev
))
1199 df_notes_rescan (prev
);
1201 reg_state
[regno
].use_index
= RELOAD_COMBINE_MAX_USES
;
1210 reload_combine (void)
1212 rtx_insn
*insn
, *prev
;
1215 int min_labelno
, n_labels
;
1216 HARD_REG_SET ever_live_at_start
, *label_live
;
1218 /* To avoid wasting too much time later searching for an index register,
1219 determine the minimum and maximum index register numbers. */
1220 if (INDEX_REG_CLASS
== NO_REGS
)
1221 last_index_reg
= -1;
1222 else if (first_index_reg
== -1 && last_index_reg
== 0)
1224 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1225 if (TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
], r
))
1227 if (first_index_reg
== -1)
1228 first_index_reg
= r
;
1233 /* If no index register is available, we can quit now. Set LAST_INDEX_REG
1234 to -1 so we'll know to quit early the next time we get here. */
1235 if (first_index_reg
== -1)
1237 last_index_reg
= -1;
1242 /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime
1243 information is a bit fuzzy immediately after reload, but it's
1244 still good enough to determine which registers are live at a jump
1246 min_labelno
= get_first_label_num ();
1247 n_labels
= max_label_num () - min_labelno
;
1248 label_live
= XNEWVEC (HARD_REG_SET
, n_labels
);
1249 CLEAR_HARD_REG_SET (ever_live_at_start
);
1251 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
1253 insn
= BB_HEAD (bb
);
1257 bitmap live_in
= df_get_live_in (bb
);
1259 REG_SET_TO_HARD_REG_SET (live
, live_in
);
1260 compute_use_by_pseudos (&live
, live_in
);
1261 COPY_HARD_REG_SET (LABEL_LIVE (insn
), live
);
1262 IOR_HARD_REG_SET (ever_live_at_start
, live
);
1266 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
1267 last_label_ruid
= last_jump_ruid
= reload_combine_ruid
= 0;
1268 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1270 reg_state
[r
].store_ruid
= 0;
1271 reg_state
[r
].real_store_ruid
= 0;
1273 reg_state
[r
].use_index
= -1;
1275 reg_state
[r
].use_index
= RELOAD_COMBINE_MAX_USES
;
1278 for (insn
= get_last_insn (); insn
; insn
= prev
)
1280 bool control_flow_insn
;
1283 prev
= PREV_INSN (insn
);
1285 /* We cannot do our optimization across labels. Invalidating all the use
1286 information we have would be costly, so we just note where the label
1287 is and then later disable any optimization that would cross it. */
1289 last_label_ruid
= reload_combine_ruid
;
1290 else if (BARRIER_P (insn
))
1292 /* Crossing a barrier resets all the use information. */
1293 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1294 if (! fixed_regs
[r
])
1295 reg_state
[r
].use_index
= RELOAD_COMBINE_MAX_USES
;
1297 else if (INSN_P (insn
) && volatile_insn_p (PATTERN (insn
)))
1298 /* Optimizations across insns being marked as volatile must be
1299 prevented. All the usage information is invalidated
1301 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1303 && reg_state
[r
].use_index
!= RELOAD_COMBINE_MAX_USES
)
1304 reg_state
[r
].use_index
= -1;
1306 if (! NONDEBUG_INSN_P (insn
))
1309 reload_combine_ruid
++;
1311 control_flow_insn
= control_flow_insn_p (insn
);
1312 if (control_flow_insn
)
1313 last_jump_ruid
= reload_combine_ruid
;
1315 if (reload_combine_recognize_const_pattern (insn
)
1316 || reload_combine_recognize_pattern (insn
))
1319 note_stores (PATTERN (insn
), reload_combine_note_store
, NULL
);
1324 HARD_REG_SET used_regs
;
1326 get_call_reg_set_usage (insn
, &used_regs
, call_used_reg_set
);
1328 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1329 if (TEST_HARD_REG_BIT (used_regs
, r
))
1331 reg_state
[r
].use_index
= RELOAD_COMBINE_MAX_USES
;
1332 reg_state
[r
].store_ruid
= reload_combine_ruid
;
1335 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
;
1336 link
= XEXP (link
, 1))
1338 rtx setuse
= XEXP (link
, 0);
1339 rtx usage_rtx
= XEXP (setuse
, 0);
1340 if ((GET_CODE (setuse
) == USE
|| GET_CODE (setuse
) == CLOBBER
)
1341 && REG_P (usage_rtx
))
1343 unsigned int end_regno
= END_REGNO (usage_rtx
);
1344 for (unsigned int i
= REGNO (usage_rtx
); i
< end_regno
; ++i
)
1345 if (GET_CODE (XEXP (link
, 0)) == CLOBBER
)
1347 reg_state
[i
].use_index
= RELOAD_COMBINE_MAX_USES
;
1348 reg_state
[i
].store_ruid
= reload_combine_ruid
;
1351 reg_state
[i
].use_index
= -1;
1356 if (control_flow_insn
&& !ANY_RETURN_P (PATTERN (insn
)))
1358 /* Non-spill registers might be used at the call destination in
1359 some unknown fashion, so we have to mark the unknown use. */
1362 if ((condjump_p (insn
) || condjump_in_parallel_p (insn
))
1363 && JUMP_LABEL (insn
))
1365 if (ANY_RETURN_P (JUMP_LABEL (insn
)))
1368 live
= &LABEL_LIVE (JUMP_LABEL (insn
));
1371 live
= &ever_live_at_start
;
1374 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1375 if (TEST_HARD_REG_BIT (*live
, r
))
1376 reg_state
[r
].use_index
= -1;
1379 reload_combine_note_use (&PATTERN (insn
), insn
, reload_combine_ruid
,
1382 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
1384 if (REG_NOTE_KIND (note
) == REG_INC
&& REG_P (XEXP (note
, 0)))
1386 int regno
= REGNO (XEXP (note
, 0));
1387 reg_state
[regno
].store_ruid
= reload_combine_ruid
;
1388 reg_state
[regno
].real_store_ruid
= reload_combine_ruid
;
1389 reg_state
[regno
].use_index
= -1;
1397 /* Check if DST is a register or a subreg of a register; if it is,
1398 update store_ruid, real_store_ruid and use_index in the reg_state
1399 structure accordingly. Called via note_stores from reload_combine. */
1402 reload_combine_note_store (rtx dst
, const_rtx set
, void *data ATTRIBUTE_UNUSED
)
1406 machine_mode mode
= GET_MODE (dst
);
1408 if (GET_CODE (dst
) == SUBREG
)
1410 regno
= subreg_regno_offset (REGNO (SUBREG_REG (dst
)),
1411 GET_MODE (SUBREG_REG (dst
)),
1414 dst
= SUBREG_REG (dst
);
1417 /* Some targets do argument pushes without adding REG_INC notes. */
1421 dst
= XEXP (dst
, 0);
1422 if (GET_CODE (dst
) == PRE_INC
|| GET_CODE (dst
) == POST_INC
1423 || GET_CODE (dst
) == PRE_DEC
|| GET_CODE (dst
) == POST_DEC
1424 || GET_CODE (dst
) == PRE_MODIFY
|| GET_CODE (dst
) == POST_MODIFY
)
1426 unsigned int end_regno
= END_REGNO (XEXP (dst
, 0));
1427 for (unsigned int i
= REGNO (XEXP (dst
, 0)); i
< end_regno
; ++i
)
1429 /* We could probably do better, but for now mark the register
1430 as used in an unknown fashion and set/clobbered at this
1432 reg_state
[i
].use_index
= -1;
1433 reg_state
[i
].store_ruid
= reload_combine_ruid
;
1434 reg_state
[i
].real_store_ruid
= reload_combine_ruid
;
1443 regno
+= REGNO (dst
);
1445 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
1446 careful with registers / register parts that are not full words.
1447 Similarly for ZERO_EXTRACT. */
1448 if (GET_CODE (SET_DEST (set
)) == ZERO_EXTRACT
1449 || GET_CODE (SET_DEST (set
)) == STRICT_LOW_PART
)
1451 for (i
= hard_regno_nregs
[regno
][mode
] - 1 + regno
; i
>= regno
; i
--)
1453 reg_state
[i
].use_index
= -1;
1454 reg_state
[i
].store_ruid
= reload_combine_ruid
;
1455 reg_state
[i
].real_store_ruid
= reload_combine_ruid
;
1460 for (i
= hard_regno_nregs
[regno
][mode
] - 1 + regno
; i
>= regno
; i
--)
1462 reg_state
[i
].store_ruid
= reload_combine_ruid
;
1463 if (GET_CODE (set
) == SET
)
1464 reg_state
[i
].real_store_ruid
= reload_combine_ruid
;
1465 reg_state
[i
].use_index
= RELOAD_COMBINE_MAX_USES
;
1470 /* XP points to a piece of rtl that has to be checked for any uses of
1472 *XP is the pattern of INSN, or a part of it.
1473 Called from reload_combine, and recursively by itself. */
1475 reload_combine_note_use (rtx
*xp
, rtx_insn
*insn
, int ruid
, rtx containing_mem
)
1478 enum rtx_code code
= x
->code
;
1481 rtx offset
= const0_rtx
; /* For the REG case below. */
1486 if (REG_P (SET_DEST (x
)))
1488 reload_combine_note_use (&SET_SRC (x
), insn
, ruid
, NULL_RTX
);
1494 /* If this is the USE of a return value, we can't change it. */
1495 if (REG_P (XEXP (x
, 0)) && REG_FUNCTION_VALUE_P (XEXP (x
, 0)))
1497 /* Mark the return register as used in an unknown fashion. */
1498 rtx reg
= XEXP (x
, 0);
1499 unsigned int end_regno
= END_REGNO (reg
);
1500 for (unsigned int regno
= REGNO (reg
); regno
< end_regno
; ++regno
)
1501 reg_state
[regno
].use_index
= -1;
1507 if (REG_P (SET_DEST (x
)))
1509 /* No spurious CLOBBERs of pseudo registers may remain. */
1510 gcc_assert (REGNO (SET_DEST (x
)) < FIRST_PSEUDO_REGISTER
);
1516 /* We are interested in (plus (reg) (const_int)) . */
1517 if (!REG_P (XEXP (x
, 0))
1518 || !CONST_INT_P (XEXP (x
, 1)))
1520 offset
= XEXP (x
, 1);
1525 int regno
= REGNO (x
);
1529 /* No spurious USEs of pseudo registers may remain. */
1530 gcc_assert (regno
< FIRST_PSEUDO_REGISTER
);
1532 nregs
= REG_NREGS (x
);
1534 /* We can't substitute into multi-hard-reg uses. */
1537 while (--nregs
>= 0)
1538 reg_state
[regno
+ nregs
].use_index
= -1;
1542 /* We may be called to update uses in previously seen insns.
1543 Don't add uses beyond the last store we saw. */
1544 if (ruid
< reg_state
[regno
].store_ruid
)
1547 /* If this register is already used in some unknown fashion, we
1549 If we decrement the index from zero to -1, we can't store more
1550 uses, so this register becomes used in an unknown fashion. */
1551 use_index
= --reg_state
[regno
].use_index
;
1555 if (use_index
== RELOAD_COMBINE_MAX_USES
- 1)
1557 /* This is the first use of this register we have seen since we
1558 marked it as dead. */
1559 reg_state
[regno
].offset
= offset
;
1560 reg_state
[regno
].all_offsets_match
= true;
1561 reg_state
[regno
].use_ruid
= ruid
;
1565 if (reg_state
[regno
].use_ruid
> ruid
)
1566 reg_state
[regno
].use_ruid
= ruid
;
1568 if (! rtx_equal_p (offset
, reg_state
[regno
].offset
))
1569 reg_state
[regno
].all_offsets_match
= false;
1572 reg_state
[regno
].reg_use
[use_index
].insn
= insn
;
1573 reg_state
[regno
].reg_use
[use_index
].ruid
= ruid
;
1574 reg_state
[regno
].reg_use
[use_index
].containing_mem
= containing_mem
;
1575 reg_state
[regno
].reg_use
[use_index
].usep
= xp
;
1587 /* Recursively process the components of X. */
1588 fmt
= GET_RTX_FORMAT (code
);
1589 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1592 reload_combine_note_use (&XEXP (x
, i
), insn
, ruid
, containing_mem
);
1593 else if (fmt
[i
] == 'E')
1595 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1596 reload_combine_note_use (&XVECEXP (x
, i
, j
), insn
, ruid
,
1602 /* See if we can reduce the cost of a constant by replacing a move
1603 with an add. We track situations in which a register is set to a
1604 constant or to a register plus a constant. */
1605 /* We cannot do our optimization across labels. Invalidating all the
1606 information about register contents we have would be costly, so we
1607 use move2add_last_label_luid to note where the label is and then
1608 later disable any optimization that would cross it.
1609 reg_offset[n] / reg_base_reg[n] / reg_symbol_ref[n] / reg_mode[n]
1610 are only valid if reg_set_luid[n] is greater than
1611 move2add_last_label_luid.
1612 For a set that established a new (potential) base register with
1613 non-constant value, we use move2add_luid from the place where the
1614 setting insn is encountered; registers based off that base then
1615 get the same reg_set_luid. Constants all get
1616 move2add_last_label_luid + 1 as their reg_set_luid. */
1617 static int reg_set_luid
[FIRST_PSEUDO_REGISTER
];
1619 /* If reg_base_reg[n] is negative, register n has been set to
1620 reg_offset[n] or reg_symbol_ref[n] + reg_offset[n] in mode reg_mode[n].
1621 If reg_base_reg[n] is non-negative, register n has been set to the
1622 sum of reg_offset[n] and the value of register reg_base_reg[n]
1623 before reg_set_luid[n], calculated in mode reg_mode[n] .
1624 For multi-hard-register registers, all but the first one are
1625 recorded as BLKmode in reg_mode. Setting reg_mode to VOIDmode
1626 marks it as invalid. */
1627 static HOST_WIDE_INT reg_offset
[FIRST_PSEUDO_REGISTER
];
1628 static int reg_base_reg
[FIRST_PSEUDO_REGISTER
];
1629 static rtx reg_symbol_ref
[FIRST_PSEUDO_REGISTER
];
1630 static machine_mode reg_mode
[FIRST_PSEUDO_REGISTER
];
1632 /* move2add_luid is linearly increased while scanning the instructions
1633 from first to last. It is used to set reg_set_luid in
1634 reload_cse_move2add and move2add_note_store. */
1635 static int move2add_luid
;
1637 /* move2add_last_label_luid is set whenever a label is found. Labels
1638 invalidate all previously collected reg_offset data. */
1639 static int move2add_last_label_luid
;
1641 /* ??? We don't know how zero / sign extension is handled, hence we
1642 can't go from a narrower to a wider mode. */
1643 #define MODES_OK_FOR_MOVE2ADD(OUTMODE, INMODE) \
1644 (GET_MODE_SIZE (OUTMODE) == GET_MODE_SIZE (INMODE) \
1645 || (GET_MODE_SIZE (OUTMODE) <= GET_MODE_SIZE (INMODE) \
1646 && TRULY_NOOP_TRUNCATION_MODES_P (OUTMODE, INMODE)))
1648 /* Record that REG is being set to a value with the mode of REG. */
1651 move2add_record_mode (rtx reg
)
1654 machine_mode mode
= GET_MODE (reg
);
1656 if (GET_CODE (reg
) == SUBREG
)
1658 regno
= subreg_regno (reg
);
1659 nregs
= subreg_nregs (reg
);
1661 else if (REG_P (reg
))
1663 regno
= REGNO (reg
);
1664 nregs
= REG_NREGS (reg
);
1668 for (int i
= nregs
- 1; i
> 0; i
--)
1669 reg_mode
[regno
+ i
] = BLKmode
;
1670 reg_mode
[regno
] = mode
;
1673 /* Record that REG is being set to the sum of SYM and OFF. */
1676 move2add_record_sym_value (rtx reg
, rtx sym
, rtx off
)
1678 int regno
= REGNO (reg
);
1680 move2add_record_mode (reg
);
1681 reg_set_luid
[regno
] = move2add_luid
;
1682 reg_base_reg
[regno
] = -1;
1683 reg_symbol_ref
[regno
] = sym
;
1684 reg_offset
[regno
] = INTVAL (off
);
1687 /* Check if REGNO contains a valid value in MODE. */
1690 move2add_valid_value_p (int regno
, machine_mode mode
)
1692 if (reg_set_luid
[regno
] <= move2add_last_label_luid
)
1695 if (mode
!= reg_mode
[regno
])
1697 if (!MODES_OK_FOR_MOVE2ADD (mode
, reg_mode
[regno
]))
1699 /* The value loaded into regno in reg_mode[regno] is also valid in
1700 mode after truncation only if (REG:mode regno) is the lowpart of
1701 (REG:reg_mode[regno] regno). Now, for big endian, the starting
1702 regno of the lowpart might be different. */
1703 int s_off
= subreg_lowpart_offset (mode
, reg_mode
[regno
]);
1704 s_off
= subreg_regno_offset (regno
, reg_mode
[regno
], s_off
, mode
);
1706 /* We could in principle adjust regno, check reg_mode[regno] to be
1707 BLKmode, and return s_off to the caller (vs. -1 for failure),
1708 but we currently have no callers that could make use of this
1713 for (int i
= hard_regno_nregs
[regno
][mode
] - 1; i
> 0; i
--)
1714 if (reg_mode
[regno
+ i
] != BLKmode
)
1719 /* This function is called with INSN that sets REG to (SYM + OFF),
1720 while REG is known to already have value (SYM + offset).
1721 This function tries to change INSN into an add instruction
1722 (set (REG) (plus (REG) (OFF - offset))) using the known value.
1723 It also updates the information about REG's known value.
1724 Return true if we made a change. */
1727 move2add_use_add2_insn (rtx reg
, rtx sym
, rtx off
, rtx_insn
*insn
)
1729 rtx pat
= PATTERN (insn
);
1730 rtx src
= SET_SRC (pat
);
1731 int regno
= REGNO (reg
);
1732 rtx new_src
= gen_int_mode (UINTVAL (off
) - reg_offset
[regno
],
1734 bool speed
= optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn
));
1735 bool changed
= false;
1737 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1738 use (set (reg) (reg)) instead.
1739 We don't delete this insn, nor do we convert it into a
1740 note, to avoid losing register notes or the return
1741 value flag. jump2 already knows how to get rid of
1743 if (new_src
== const0_rtx
)
1745 /* If the constants are different, this is a
1746 truncation, that, if turned into (set (reg)
1747 (reg)), would be discarded. Maybe we should
1748 try a truncMN pattern? */
1749 if (INTVAL (off
) == reg_offset
[regno
])
1750 changed
= validate_change (insn
, &SET_SRC (pat
), reg
, 0);
1754 struct full_rtx_costs oldcst
, newcst
;
1755 rtx tem
= gen_rtx_PLUS (GET_MODE (reg
), reg
, new_src
);
1757 get_full_set_rtx_cost (pat
, &oldcst
);
1758 SET_SRC (pat
) = tem
;
1759 get_full_set_rtx_cost (pat
, &newcst
);
1760 SET_SRC (pat
) = src
;
1762 if (costs_lt_p (&newcst
, &oldcst
, speed
)
1763 && have_add2_insn (reg
, new_src
))
1764 changed
= validate_change (insn
, &SET_SRC (pat
), tem
, 0);
1765 else if (sym
== NULL_RTX
&& GET_MODE (reg
) != BImode
)
1767 machine_mode narrow_mode
;
1768 for (narrow_mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1769 narrow_mode
!= VOIDmode
1770 && narrow_mode
!= GET_MODE (reg
);
1771 narrow_mode
= GET_MODE_WIDER_MODE (narrow_mode
))
1773 if (have_insn_for (STRICT_LOW_PART
, narrow_mode
)
1774 && ((reg_offset
[regno
] & ~GET_MODE_MASK (narrow_mode
))
1775 == (INTVAL (off
) & ~GET_MODE_MASK (narrow_mode
))))
1777 rtx narrow_reg
= gen_lowpart_common (narrow_mode
, reg
);
1778 rtx narrow_src
= gen_int_mode (INTVAL (off
),
1781 = gen_rtx_SET (gen_rtx_STRICT_LOW_PART (VOIDmode
,
1784 get_full_set_rtx_cost (new_set
, &newcst
);
1785 if (costs_lt_p (&newcst
, &oldcst
, speed
))
1787 changed
= validate_change (insn
, &PATTERN (insn
),
1796 move2add_record_sym_value (reg
, sym
, off
);
1801 /* This function is called with INSN that sets REG to (SYM + OFF),
1802 but REG doesn't have known value (SYM + offset). This function
1803 tries to find another register which is known to already have
1804 value (SYM + offset) and change INSN into an add instruction
1805 (set (REG) (plus (the found register) (OFF - offset))) if such
1806 a register is found. It also updates the information about
1808 Return true iff we made a change. */
1811 move2add_use_add3_insn (rtx reg
, rtx sym
, rtx off
, rtx_insn
*insn
)
1813 rtx pat
= PATTERN (insn
);
1814 rtx src
= SET_SRC (pat
);
1815 int regno
= REGNO (reg
);
1817 bool speed
= optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn
));
1819 bool changed
= false;
1820 struct full_rtx_costs oldcst
, newcst
, mincst
;
1823 init_costs_to_max (&mincst
);
1824 get_full_set_rtx_cost (pat
, &oldcst
);
1826 plus_expr
= gen_rtx_PLUS (GET_MODE (reg
), reg
, const0_rtx
);
1827 SET_SRC (pat
) = plus_expr
;
1829 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1830 if (move2add_valid_value_p (i
, GET_MODE (reg
))
1831 && reg_base_reg
[i
] < 0
1832 && reg_symbol_ref
[i
] != NULL_RTX
1833 && rtx_equal_p (sym
, reg_symbol_ref
[i
]))
1835 rtx new_src
= gen_int_mode (UINTVAL (off
) - reg_offset
[i
],
1837 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1838 use (set (reg) (reg)) instead.
1839 We don't delete this insn, nor do we convert it into a
1840 note, to avoid losing register notes or the return
1841 value flag. jump2 already knows how to get rid of
1843 if (new_src
== const0_rtx
)
1845 init_costs_to_zero (&mincst
);
1851 XEXP (plus_expr
, 1) = new_src
;
1852 get_full_set_rtx_cost (pat
, &newcst
);
1854 if (costs_lt_p (&newcst
, &mincst
, speed
))
1861 SET_SRC (pat
) = src
;
1863 if (costs_lt_p (&mincst
, &oldcst
, speed
))
1867 tem
= gen_rtx_REG (GET_MODE (reg
), min_regno
);
1870 rtx new_src
= gen_int_mode (UINTVAL (off
) - reg_offset
[min_regno
],
1872 tem
= gen_rtx_PLUS (GET_MODE (reg
), tem
, new_src
);
1874 if (validate_change (insn
, &SET_SRC (pat
), tem
, 0))
1877 reg_set_luid
[regno
] = move2add_luid
;
1878 move2add_record_sym_value (reg
, sym
, off
);
1882 /* Convert move insns with constant inputs to additions if they are cheaper.
1883 Return true if any changes were made. */
1885 reload_cse_move2add (rtx_insn
*first
)
1889 bool changed
= false;
1891 for (i
= FIRST_PSEUDO_REGISTER
- 1; i
>= 0; i
--)
1893 reg_set_luid
[i
] = 0;
1895 reg_base_reg
[i
] = 0;
1896 reg_symbol_ref
[i
] = NULL_RTX
;
1897 reg_mode
[i
] = VOIDmode
;
1900 move2add_last_label_luid
= 0;
1902 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
), move2add_luid
++)
1908 move2add_last_label_luid
= move2add_luid
;
1909 /* We're going to increment move2add_luid twice after a
1910 label, so that we can use move2add_last_label_luid + 1 as
1911 the luid for constants. */
1915 if (! INSN_P (insn
))
1917 pat
= PATTERN (insn
);
1918 /* For simplicity, we only perform this optimization on
1919 straightforward SETs. */
1920 if (GET_CODE (pat
) == SET
1921 && REG_P (SET_DEST (pat
)))
1923 rtx reg
= SET_DEST (pat
);
1924 int regno
= REGNO (reg
);
1925 rtx src
= SET_SRC (pat
);
1927 /* Check if we have valid information on the contents of this
1928 register in the mode of REG. */
1929 if (move2add_valid_value_p (regno
, GET_MODE (reg
))
1930 && dbg_cnt (cse2_move2add
))
1932 /* Try to transform (set (REGX) (CONST_INT A))
1934 (set (REGX) (CONST_INT B))
1936 (set (REGX) (CONST_INT A))
1938 (set (REGX) (plus (REGX) (CONST_INT B-A)))
1940 (set (REGX) (CONST_INT A))
1942 (set (STRICT_LOW_PART (REGX)) (CONST_INT B))
1945 if (CONST_INT_P (src
)
1946 && reg_base_reg
[regno
] < 0
1947 && reg_symbol_ref
[regno
] == NULL_RTX
)
1949 changed
|= move2add_use_add2_insn (reg
, NULL_RTX
, src
, insn
);
1953 /* Try to transform (set (REGX) (REGY))
1954 (set (REGX) (PLUS (REGX) (CONST_INT A)))
1957 (set (REGX) (PLUS (REGX) (CONST_INT B)))
1960 (set (REGX) (PLUS (REGX) (CONST_INT A)))
1962 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
1963 else if (REG_P (src
)
1964 && reg_set_luid
[regno
] == reg_set_luid
[REGNO (src
)]
1965 && reg_base_reg
[regno
] == reg_base_reg
[REGNO (src
)]
1966 && move2add_valid_value_p (REGNO (src
), GET_MODE (reg
)))
1968 rtx_insn
*next
= next_nonnote_nondebug_insn (insn
);
1971 set
= single_set (next
);
1973 && SET_DEST (set
) == reg
1974 && GET_CODE (SET_SRC (set
)) == PLUS
1975 && XEXP (SET_SRC (set
), 0) == reg
1976 && CONST_INT_P (XEXP (SET_SRC (set
), 1)))
1978 rtx src3
= XEXP (SET_SRC (set
), 1);
1979 unsigned HOST_WIDE_INT added_offset
= UINTVAL (src3
);
1980 HOST_WIDE_INT base_offset
= reg_offset
[REGNO (src
)];
1981 HOST_WIDE_INT regno_offset
= reg_offset
[regno
];
1983 gen_int_mode (added_offset
1987 bool success
= false;
1988 bool speed
= optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn
));
1990 if (new_src
== const0_rtx
)
1991 /* See above why we create (set (reg) (reg)) here. */
1993 = validate_change (next
, &SET_SRC (set
), reg
, 0);
1996 rtx old_src
= SET_SRC (set
);
1997 struct full_rtx_costs oldcst
, newcst
;
1998 rtx tem
= gen_rtx_PLUS (GET_MODE (reg
), reg
, new_src
);
2000 get_full_set_rtx_cost (set
, &oldcst
);
2001 SET_SRC (set
) = tem
;
2002 get_full_set_src_cost (tem
, GET_MODE (reg
), &newcst
);
2003 SET_SRC (set
) = old_src
;
2004 costs_add_n_insns (&oldcst
, 1);
2006 if (costs_lt_p (&newcst
, &oldcst
, speed
)
2007 && have_add2_insn (reg
, new_src
))
2009 rtx newpat
= gen_rtx_SET (reg
, tem
);
2011 = validate_change (next
, &PATTERN (next
),
2019 move2add_record_mode (reg
);
2021 = trunc_int_for_mode (added_offset
+ base_offset
,
2029 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
2031 (set (REGY) (CONST (PLUS (SYMBOL_REF) (CONST_INT B))))
2033 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
2035 (set (REGY) (CONST (PLUS (REGX) (CONST_INT B-A)))) */
2036 if ((GET_CODE (src
) == SYMBOL_REF
2037 || (GET_CODE (src
) == CONST
2038 && GET_CODE (XEXP (src
, 0)) == PLUS
2039 && GET_CODE (XEXP (XEXP (src
, 0), 0)) == SYMBOL_REF
2040 && CONST_INT_P (XEXP (XEXP (src
, 0), 1))))
2041 && dbg_cnt (cse2_move2add
))
2045 if (GET_CODE (src
) == SYMBOL_REF
)
2052 sym
= XEXP (XEXP (src
, 0), 0);
2053 off
= XEXP (XEXP (src
, 0), 1);
2056 /* If the reg already contains the value which is sum of
2057 sym and some constant value, we can use an add2 insn. */
2058 if (move2add_valid_value_p (regno
, GET_MODE (reg
))
2059 && reg_base_reg
[regno
] < 0
2060 && reg_symbol_ref
[regno
] != NULL_RTX
2061 && rtx_equal_p (sym
, reg_symbol_ref
[regno
]))
2062 changed
|= move2add_use_add2_insn (reg
, sym
, off
, insn
);
2064 /* Otherwise, we have to find a register whose value is sum
2065 of sym and some constant value. */
2067 changed
|= move2add_use_add3_insn (reg
, sym
, off
, insn
);
2073 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
2075 if (REG_NOTE_KIND (note
) == REG_INC
2076 && REG_P (XEXP (note
, 0)))
2078 /* Reset the information about this register. */
2079 int regno
= REGNO (XEXP (note
, 0));
2080 if (regno
< FIRST_PSEUDO_REGISTER
)
2082 move2add_record_mode (XEXP (note
, 0));
2083 reg_mode
[regno
] = VOIDmode
;
2087 note_stores (PATTERN (insn
), move2add_note_store
, insn
);
2089 /* If INSN is a conditional branch, we try to extract an
2090 implicit set out of it. */
2091 if (any_condjump_p (insn
))
2093 rtx cnd
= fis_get_condition (insn
);
2096 && GET_CODE (cnd
) == NE
2097 && REG_P (XEXP (cnd
, 0))
2098 && !reg_set_p (XEXP (cnd
, 0), insn
)
2099 /* The following two checks, which are also in
2100 move2add_note_store, are intended to reduce the
2101 number of calls to gen_rtx_SET to avoid memory
2102 allocation if possible. */
2103 && SCALAR_INT_MODE_P (GET_MODE (XEXP (cnd
, 0)))
2104 && REG_NREGS (XEXP (cnd
, 0)) == 1
2105 && CONST_INT_P (XEXP (cnd
, 1)))
2108 gen_rtx_SET (XEXP (cnd
, 0), XEXP (cnd
, 1));
2109 move2add_note_store (SET_DEST (implicit_set
), implicit_set
, insn
);
2113 /* If this is a CALL_INSN, all call used registers are stored with
2119 for (i
= FIRST_PSEUDO_REGISTER
- 1; i
>= 0; i
--)
2121 if (call_used_regs
[i
])
2122 /* Reset the information about this register. */
2123 reg_mode
[i
] = VOIDmode
;
2126 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
;
2127 link
= XEXP (link
, 1))
2129 rtx setuse
= XEXP (link
, 0);
2130 rtx usage_rtx
= XEXP (setuse
, 0);
2131 if (GET_CODE (setuse
) == CLOBBER
2132 && REG_P (usage_rtx
))
2134 unsigned int end_regno
= END_REGNO (usage_rtx
);
2135 for (unsigned int r
= REGNO (usage_rtx
); r
< end_regno
; ++r
)
2136 /* Reset the information about this register. */
2137 reg_mode
[r
] = VOIDmode
;
2145 /* SET is a SET or CLOBBER that sets DST. DATA is the insn which
2147 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
2148 Called from reload_cse_move2add via note_stores. */
2151 move2add_note_store (rtx dst
, const_rtx set
, void *data
)
2153 rtx_insn
*insn
= (rtx_insn
*) data
;
2154 unsigned int regno
= 0;
2155 machine_mode mode
= GET_MODE (dst
);
2157 /* Some targets do argument pushes without adding REG_INC notes. */
2161 dst
= XEXP (dst
, 0);
2162 if (GET_CODE (dst
) == PRE_INC
|| GET_CODE (dst
) == POST_INC
2163 || GET_CODE (dst
) == PRE_DEC
|| GET_CODE (dst
) == POST_DEC
)
2164 reg_mode
[REGNO (XEXP (dst
, 0))] = VOIDmode
;
2168 if (GET_CODE (dst
) == SUBREG
)
2169 regno
= subreg_regno (dst
);
2170 else if (REG_P (dst
))
2171 regno
= REGNO (dst
);
2175 if (SCALAR_INT_MODE_P (mode
)
2176 && GET_CODE (set
) == SET
)
2178 rtx note
, sym
= NULL_RTX
;
2181 note
= find_reg_equal_equiv_note (insn
);
2182 if (note
&& GET_CODE (XEXP (note
, 0)) == SYMBOL_REF
)
2184 sym
= XEXP (note
, 0);
2187 else if (note
&& GET_CODE (XEXP (note
, 0)) == CONST
2188 && GET_CODE (XEXP (XEXP (note
, 0), 0)) == PLUS
2189 && GET_CODE (XEXP (XEXP (XEXP (note
, 0), 0), 0)) == SYMBOL_REF
2190 && CONST_INT_P (XEXP (XEXP (XEXP (note
, 0), 0), 1)))
2192 sym
= XEXP (XEXP (XEXP (note
, 0), 0), 0);
2193 off
= XEXP (XEXP (XEXP (note
, 0), 0), 1);
2196 if (sym
!= NULL_RTX
)
2198 move2add_record_sym_value (dst
, sym
, off
);
2203 if (SCALAR_INT_MODE_P (mode
)
2204 && GET_CODE (set
) == SET
2205 && GET_CODE (SET_DEST (set
)) != ZERO_EXTRACT
2206 && GET_CODE (SET_DEST (set
)) != STRICT_LOW_PART
)
2208 rtx src
= SET_SRC (set
);
2210 unsigned HOST_WIDE_INT offset
;
2213 switch (GET_CODE (src
))
2216 if (REG_P (XEXP (src
, 0)))
2218 base_reg
= XEXP (src
, 0);
2220 if (CONST_INT_P (XEXP (src
, 1)))
2221 offset
= UINTVAL (XEXP (src
, 1));
2222 else if (REG_P (XEXP (src
, 1))
2223 && move2add_valid_value_p (REGNO (XEXP (src
, 1)), mode
))
2225 if (reg_base_reg
[REGNO (XEXP (src
, 1))] < 0
2226 && reg_symbol_ref
[REGNO (XEXP (src
, 1))] == NULL_RTX
)
2227 offset
= reg_offset
[REGNO (XEXP (src
, 1))];
2228 /* Maybe the first register is known to be a
2230 else if (move2add_valid_value_p (REGNO (base_reg
), mode
)
2231 && reg_base_reg
[REGNO (base_reg
)] < 0
2232 && reg_symbol_ref
[REGNO (base_reg
)] == NULL_RTX
)
2234 offset
= reg_offset
[REGNO (base_reg
)];
2235 base_reg
= XEXP (src
, 1);
2254 /* Start tracking the register as a constant. */
2255 reg_base_reg
[regno
] = -1;
2256 reg_symbol_ref
[regno
] = NULL_RTX
;
2257 reg_offset
[regno
] = INTVAL (SET_SRC (set
));
2258 /* We assign the same luid to all registers set to constants. */
2259 reg_set_luid
[regno
] = move2add_last_label_luid
+ 1;
2260 move2add_record_mode (dst
);
2267 base_regno
= REGNO (base_reg
);
2268 /* If information about the base register is not valid, set it
2269 up as a new base register, pretending its value is known
2270 starting from the current insn. */
2271 if (!move2add_valid_value_p (base_regno
, mode
))
2273 reg_base_reg
[base_regno
] = base_regno
;
2274 reg_symbol_ref
[base_regno
] = NULL_RTX
;
2275 reg_offset
[base_regno
] = 0;
2276 reg_set_luid
[base_regno
] = move2add_luid
;
2277 gcc_assert (GET_MODE (base_reg
) == mode
);
2278 move2add_record_mode (base_reg
);
2281 /* Copy base information from our base register. */
2282 reg_set_luid
[regno
] = reg_set_luid
[base_regno
];
2283 reg_base_reg
[regno
] = reg_base_reg
[base_regno
];
2284 reg_symbol_ref
[regno
] = reg_symbol_ref
[base_regno
];
2286 /* Compute the sum of the offsets or constants. */
2288 = trunc_int_for_mode (offset
+ reg_offset
[base_regno
], mode
);
2290 move2add_record_mode (dst
);
2295 /* Invalidate the contents of the register. */
2296 move2add_record_mode (dst
);
2297 reg_mode
[regno
] = VOIDmode
;
2303 const pass_data pass_data_postreload_cse
=
2305 RTL_PASS
, /* type */
2306 "postreload", /* name */
2307 OPTGROUP_NONE
, /* optinfo_flags */
2308 TV_RELOAD_CSE_REGS
, /* tv_id */
2309 0, /* properties_required */
2310 0, /* properties_provided */
2311 0, /* properties_destroyed */
2312 0, /* todo_flags_start */
2313 TODO_df_finish
, /* todo_flags_finish */
2316 class pass_postreload_cse
: public rtl_opt_pass
2319 pass_postreload_cse (gcc::context
*ctxt
)
2320 : rtl_opt_pass (pass_data_postreload_cse
, ctxt
)
2323 /* opt_pass methods: */
2324 virtual bool gate (function
*) { return (optimize
> 0 && reload_completed
); }
2326 virtual unsigned int execute (function
*);
2328 }; // class pass_postreload_cse
2331 pass_postreload_cse::execute (function
*fun
)
2333 if (!dbg_cnt (postreload_cse
))
2336 /* Do a very simple CSE pass over just the hard registers. */
2337 reload_cse_regs (get_insns ());
2338 /* Reload_cse_regs can eliminate potentially-trapping MEMs.
2339 Remove any EH edges associated with them. */
2340 if (fun
->can_throw_non_call_exceptions
2341 && purge_all_dead_edges ())
2350 make_pass_postreload_cse (gcc::context
*ctxt
)
2352 return new pass_postreload_cse (ctxt
);