1 /* Perform simple optimizations to clean up the result of reload.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "hard-reg-set.h"
30 #include "insn-config.h"
40 #include "basic-block.h"
44 #include "diagnostic-core.h"
48 #include "tree-pass.h"
52 static int reload_cse_noop_set_p (rtx
);
53 static bool reload_cse_simplify (rtx_insn
*, rtx
);
54 static void reload_cse_regs_1 (void);
55 static int reload_cse_simplify_set (rtx
, rtx_insn
*);
56 static int reload_cse_simplify_operands (rtx_insn
*, rtx
);
58 static void reload_combine (void);
59 static void reload_combine_note_use (rtx
*, rtx_insn
*, int, rtx
);
60 static void reload_combine_note_store (rtx
, const_rtx
, void *);
62 static bool reload_cse_move2add (rtx_insn
*);
63 static void move2add_note_store (rtx
, const_rtx
, void *);
65 /* Call cse / combine like post-reload optimization phases.
66 FIRST is the first instruction. */
69 reload_cse_regs (rtx_insn
*first ATTRIBUTE_UNUSED
)
74 moves_converted
= reload_cse_move2add (first
);
75 if (flag_expensive_optimizations
)
83 /* See whether a single set SET is a noop. */
85 reload_cse_noop_set_p (rtx set
)
87 if (cselib_reg_set_mode (SET_DEST (set
)) != GET_MODE (SET_DEST (set
)))
90 return rtx_equal_for_cselib_p (SET_DEST (set
), SET_SRC (set
));
93 /* Try to simplify INSN. Return true if the CFG may have changed. */
95 reload_cse_simplify (rtx_insn
*insn
, rtx testreg
)
97 rtx body
= PATTERN (insn
);
98 basic_block insn_bb
= BLOCK_FOR_INSN (insn
);
99 unsigned insn_bb_succs
= EDGE_COUNT (insn_bb
->succs
);
101 if (GET_CODE (body
) == SET
)
105 /* Simplify even if we may think it is a no-op.
106 We may think a memory load of a value smaller than WORD_SIZE
107 is redundant because we haven't taken into account possible
108 implicit extension. reload_cse_simplify_set() will bring
109 this out, so it's safer to simplify before we delete. */
110 count
+= reload_cse_simplify_set (body
, insn
);
112 if (!count
&& reload_cse_noop_set_p (body
))
114 rtx value
= SET_DEST (body
);
116 && ! REG_FUNCTION_VALUE_P (value
))
118 if (check_for_inc_dec (insn
))
119 delete_insn_and_edges (insn
);
120 /* We're done with this insn. */
125 apply_change_group ();
127 reload_cse_simplify_operands (insn
, testreg
);
129 else if (GET_CODE (body
) == PARALLEL
)
133 rtx value
= NULL_RTX
;
135 /* Registers mentioned in the clobber list for an asm cannot be reused
136 within the body of the asm. Invalidate those registers now so that
137 we don't try to substitute values for them. */
138 if (asm_noperands (body
) >= 0)
140 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
142 rtx part
= XVECEXP (body
, 0, i
);
143 if (GET_CODE (part
) == CLOBBER
&& REG_P (XEXP (part
, 0)))
144 cselib_invalidate_rtx (XEXP (part
, 0));
148 /* If every action in a PARALLEL is a noop, we can delete
149 the entire PARALLEL. */
150 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
152 rtx part
= XVECEXP (body
, 0, i
);
153 if (GET_CODE (part
) == SET
)
155 if (! reload_cse_noop_set_p (part
))
157 if (REG_P (SET_DEST (part
))
158 && REG_FUNCTION_VALUE_P (SET_DEST (part
)))
162 value
= SET_DEST (part
);
165 else if (GET_CODE (part
) != CLOBBER
)
171 if (check_for_inc_dec (insn
))
172 delete_insn_and_edges (insn
);
173 /* We're done with this insn. */
177 /* It's not a no-op, but we can try to simplify it. */
178 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
179 if (GET_CODE (XVECEXP (body
, 0, i
)) == SET
)
180 count
+= reload_cse_simplify_set (XVECEXP (body
, 0, i
), insn
);
183 apply_change_group ();
185 reload_cse_simplify_operands (insn
, testreg
);
189 return (EDGE_COUNT (insn_bb
->succs
) != insn_bb_succs
);
192 /* Do a very simple CSE pass over the hard registers.
194 This function detects no-op moves where we happened to assign two
195 different pseudo-registers to the same hard register, and then
196 copied one to the other. Reload will generate a useless
197 instruction copying a register to itself.
199 This function also detects cases where we load a value from memory
200 into two different registers, and (if memory is more expensive than
201 registers) changes it to simply copy the first register into the
204 Another optimization is performed that scans the operands of each
205 instruction to see whether the value is already available in a
206 hard register. It then replaces the operand with the hard register
207 if possible, much like an optional reload would. */
210 reload_cse_regs_1 (void)
212 bool cfg_changed
= false;
215 rtx testreg
= gen_rtx_REG (VOIDmode
, -1);
217 cselib_init (CSELIB_RECORD_MEMORY
);
218 init_alias_analysis ();
220 FOR_EACH_BB_FN (bb
, cfun
)
221 FOR_BB_INSNS (bb
, insn
)
224 cfg_changed
|= reload_cse_simplify (insn
, testreg
);
226 cselib_process_insn (insn
);
230 end_alias_analysis ();
236 /* Try to simplify a single SET instruction. SET is the set pattern.
237 INSN is the instruction it came from.
238 This function only handles one case: if we set a register to a value
239 which is not a register, we try to find that value in some other register
240 and change the set into a register copy. */
243 reload_cse_simplify_set (rtx set
, rtx_insn
*insn
)
251 struct elt_loc_list
*l
;
252 #ifdef LOAD_EXTEND_OP
253 enum rtx_code extend_op
= UNKNOWN
;
255 bool speed
= optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn
));
257 dreg
= true_regnum (SET_DEST (set
));
262 if (side_effects_p (src
) || true_regnum (src
) >= 0)
265 dclass
= REGNO_REG_CLASS (dreg
);
267 #ifdef LOAD_EXTEND_OP
268 /* When replacing a memory with a register, we need to honor assumptions
269 that combine made wrt the contents of sign bits. We'll do this by
270 generating an extend instruction instead of a reg->reg copy. Thus
271 the destination must be a register that we can widen. */
273 && GET_MODE_BITSIZE (GET_MODE (src
)) < BITS_PER_WORD
274 && (extend_op
= LOAD_EXTEND_OP (GET_MODE (src
))) != UNKNOWN
275 && !REG_P (SET_DEST (set
)))
279 val
= cselib_lookup (src
, GET_MODE (SET_DEST (set
)), 0, VOIDmode
);
283 /* If memory loads are cheaper than register copies, don't change them. */
285 old_cost
= memory_move_cost (GET_MODE (src
), dclass
, true);
286 else if (REG_P (src
))
287 old_cost
= register_move_cost (GET_MODE (src
),
288 REGNO_REG_CLASS (REGNO (src
)), dclass
);
290 old_cost
= set_src_cost (src
, speed
);
292 for (l
= val
->locs
; l
; l
= l
->next
)
294 rtx this_rtx
= l
->loc
;
297 if (CONSTANT_P (this_rtx
) && ! references_value_p (this_rtx
, 0))
299 #ifdef LOAD_EXTEND_OP
300 if (extend_op
!= UNKNOWN
)
304 if (!CONST_SCALAR_INT_P (this_rtx
))
310 result
= wide_int::from (std::make_pair (this_rtx
,
312 BITS_PER_WORD
, UNSIGNED
);
315 result
= wide_int::from (std::make_pair (this_rtx
,
317 BITS_PER_WORD
, SIGNED
);
322 this_rtx
= immed_wide_int_const (result
, word_mode
);
325 this_cost
= set_src_cost (this_rtx
, speed
);
327 else if (REG_P (this_rtx
))
329 #ifdef LOAD_EXTEND_OP
330 if (extend_op
!= UNKNOWN
)
332 this_rtx
= gen_rtx_fmt_e (extend_op
, word_mode
, this_rtx
);
333 this_cost
= set_src_cost (this_rtx
, speed
);
337 this_cost
= register_move_cost (GET_MODE (this_rtx
),
338 REGNO_REG_CLASS (REGNO (this_rtx
)),
344 /* If equal costs, prefer registers over anything else. That
345 tends to lead to smaller instructions on some machines. */
346 if (this_cost
< old_cost
347 || (this_cost
== old_cost
349 && !REG_P (SET_SRC (set
))))
351 #ifdef LOAD_EXTEND_OP
352 if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set
))) < BITS_PER_WORD
353 && extend_op
!= UNKNOWN
354 #ifdef CANNOT_CHANGE_MODE_CLASS
355 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set
)),
357 REGNO_REG_CLASS (REGNO (SET_DEST (set
))))
361 rtx wide_dest
= gen_rtx_REG (word_mode
, REGNO (SET_DEST (set
)));
362 ORIGINAL_REGNO (wide_dest
) = ORIGINAL_REGNO (SET_DEST (set
));
363 validate_change (insn
, &SET_DEST (set
), wide_dest
, 1);
367 validate_unshare_change (insn
, &SET_SRC (set
), this_rtx
, 1);
368 old_cost
= this_cost
, did_change
= 1;
375 /* Try to replace operands in INSN with equivalent values that are already
376 in registers. This can be viewed as optional reloading.
378 For each non-register operand in the insn, see if any hard regs are
379 known to be equivalent to that operand. Record the alternatives which
380 can accept these hard registers. Among all alternatives, select the
381 ones which are better or equal to the one currently matching, where
382 "better" is in terms of '?' and '!' constraints. Among the remaining
383 alternatives, select the one which replaces most operands with
387 reload_cse_simplify_operands (rtx_insn
*insn
, rtx testreg
)
391 /* For each operand, all registers that are equivalent to it. */
392 HARD_REG_SET equiv_regs
[MAX_RECOG_OPERANDS
];
394 const char *constraints
[MAX_RECOG_OPERANDS
];
396 /* Vector recording how bad an alternative is. */
397 int *alternative_reject
;
398 /* Vector recording how many registers can be introduced by choosing
400 int *alternative_nregs
;
401 /* Array of vectors recording, for each operand and each alternative,
402 which hard register to substitute, or -1 if the operand should be
404 int *op_alt_regno
[MAX_RECOG_OPERANDS
];
405 /* Array of alternatives, sorted in order of decreasing desirability. */
406 int *alternative_order
;
408 extract_constrain_insn (insn
);
410 if (recog_data
.n_alternatives
== 0 || recog_data
.n_operands
== 0)
413 alternative_reject
= XALLOCAVEC (int, recog_data
.n_alternatives
);
414 alternative_nregs
= XALLOCAVEC (int, recog_data
.n_alternatives
);
415 alternative_order
= XALLOCAVEC (int, recog_data
.n_alternatives
);
416 memset (alternative_reject
, 0, recog_data
.n_alternatives
* sizeof (int));
417 memset (alternative_nregs
, 0, recog_data
.n_alternatives
* sizeof (int));
419 /* For each operand, find out which regs are equivalent. */
420 for (i
= 0; i
< recog_data
.n_operands
; i
++)
423 struct elt_loc_list
*l
;
426 CLEAR_HARD_REG_SET (equiv_regs
[i
]);
428 /* cselib blows up on CODE_LABELs. Trying to fix that doesn't seem
429 right, so avoid the problem here. Likewise if we have a constant
430 and the insn pattern doesn't tell us the mode we need. */
431 if (LABEL_P (recog_data
.operand
[i
])
432 || (CONSTANT_P (recog_data
.operand
[i
])
433 && recog_data
.operand_mode
[i
] == VOIDmode
))
436 op
= recog_data
.operand
[i
];
437 #ifdef LOAD_EXTEND_OP
439 && GET_MODE_BITSIZE (GET_MODE (op
)) < BITS_PER_WORD
440 && LOAD_EXTEND_OP (GET_MODE (op
)) != UNKNOWN
)
442 rtx set
= single_set (insn
);
444 /* We might have multiple sets, some of which do implicit
445 extension. Punt on this for now. */
448 /* If the destination is also a MEM or a STRICT_LOW_PART, no
450 Also, if there is an explicit extension, we don't have to
451 worry about an implicit one. */
452 else if (MEM_P (SET_DEST (set
))
453 || GET_CODE (SET_DEST (set
)) == STRICT_LOW_PART
454 || GET_CODE (SET_SRC (set
)) == ZERO_EXTEND
455 || GET_CODE (SET_SRC (set
)) == SIGN_EXTEND
)
456 ; /* Continue ordinary processing. */
457 #ifdef CANNOT_CHANGE_MODE_CLASS
458 /* If the register cannot change mode to word_mode, it follows that
459 it cannot have been used in word_mode. */
460 else if (REG_P (SET_DEST (set
))
461 && CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set
)),
463 REGNO_REG_CLASS (REGNO (SET_DEST (set
)))))
464 ; /* Continue ordinary processing. */
466 /* If this is a straight load, make the extension explicit. */
467 else if (REG_P (SET_DEST (set
))
468 && recog_data
.n_operands
== 2
469 && SET_SRC (set
) == op
470 && SET_DEST (set
) == recog_data
.operand
[1-i
])
472 validate_change (insn
, recog_data
.operand_loc
[i
],
473 gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (op
)),
476 validate_change (insn
, recog_data
.operand_loc
[1-i
],
477 gen_rtx_REG (word_mode
, REGNO (SET_DEST (set
))),
479 if (! apply_change_group ())
481 return reload_cse_simplify_operands (insn
, testreg
);
484 /* ??? There might be arithmetic operations with memory that are
485 safe to optimize, but is it worth the trouble? */
488 #endif /* LOAD_EXTEND_OP */
489 if (side_effects_p (op
))
491 v
= cselib_lookup (op
, recog_data
.operand_mode
[i
], 0, VOIDmode
);
495 for (l
= v
->locs
; l
; l
= l
->next
)
497 SET_HARD_REG_BIT (equiv_regs
[i
], REGNO (l
->loc
));
500 alternative_mask preferred
= get_preferred_alternatives (insn
);
501 for (i
= 0; i
< recog_data
.n_operands
; i
++)
503 enum machine_mode mode
;
507 op_alt_regno
[i
] = XALLOCAVEC (int, recog_data
.n_alternatives
);
508 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
509 op_alt_regno
[i
][j
] = -1;
511 p
= constraints
[i
] = recog_data
.constraints
[i
];
512 mode
= recog_data
.operand_mode
[i
];
514 /* Add the reject values for each alternative given by the constraints
523 alternative_reject
[j
] += 3;
525 alternative_reject
[j
] += 300;
528 /* We won't change operands which are already registers. We
529 also don't want to modify output operands. */
530 regno
= true_regnum (recog_data
.operand
[i
]);
532 || constraints
[i
][0] == '='
533 || constraints
[i
][0] == '+')
536 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
538 enum reg_class rclass
= NO_REGS
;
540 if (! TEST_HARD_REG_BIT (equiv_regs
[i
], regno
))
543 SET_REGNO_RAW (testreg
, regno
);
544 PUT_MODE (testreg
, mode
);
546 /* We found a register equal to this operand. Now look for all
547 alternatives that can accept this register and have not been
548 assigned a register they can use yet. */
558 rclass
= reg_class_subunion
[rclass
][GENERAL_REGS
];
563 = (reg_class_subunion
565 [reg_class_for_constraint (lookup_constraint (p
))]);
569 /* See if REGNO fits this alternative, and set it up as the
570 replacement register if we don't have one for this
571 alternative yet and the operand being replaced is not
572 a cheap CONST_INT. */
573 if (op_alt_regno
[i
][j
] == -1
574 && TEST_BIT (preferred
, j
)
575 && reg_fits_class_p (testreg
, rclass
, 0, mode
)
576 && (!CONST_INT_P (recog_data
.operand
[i
])
577 || (set_src_cost (recog_data
.operand
[i
],
578 optimize_bb_for_speed_p
579 (BLOCK_FOR_INSN (insn
)))
580 > set_src_cost (testreg
,
581 optimize_bb_for_speed_p
582 (BLOCK_FOR_INSN (insn
))))))
584 alternative_nregs
[j
]++;
585 op_alt_regno
[i
][j
] = regno
;
591 p
+= CONSTRAINT_LEN (c
, p
);
599 /* Record all alternatives which are better or equal to the currently
600 matching one in the alternative_order array. */
601 for (i
= j
= 0; i
< recog_data
.n_alternatives
; i
++)
602 if (alternative_reject
[i
] <= alternative_reject
[which_alternative
])
603 alternative_order
[j
++] = i
;
604 recog_data
.n_alternatives
= j
;
606 /* Sort it. Given a small number of alternatives, a dumb algorithm
607 won't hurt too much. */
608 for (i
= 0; i
< recog_data
.n_alternatives
- 1; i
++)
611 int best_reject
= alternative_reject
[alternative_order
[i
]];
612 int best_nregs
= alternative_nregs
[alternative_order
[i
]];
615 for (j
= i
+ 1; j
< recog_data
.n_alternatives
; j
++)
617 int this_reject
= alternative_reject
[alternative_order
[j
]];
618 int this_nregs
= alternative_nregs
[alternative_order
[j
]];
620 if (this_reject
< best_reject
621 || (this_reject
== best_reject
&& this_nregs
> best_nregs
))
624 best_reject
= this_reject
;
625 best_nregs
= this_nregs
;
629 tmp
= alternative_order
[best
];
630 alternative_order
[best
] = alternative_order
[i
];
631 alternative_order
[i
] = tmp
;
634 /* Substitute the operands as determined by op_alt_regno for the best
636 j
= alternative_order
[0];
638 for (i
= 0; i
< recog_data
.n_operands
; i
++)
640 enum machine_mode mode
= recog_data
.operand_mode
[i
];
641 if (op_alt_regno
[i
][j
] == -1)
644 validate_change (insn
, recog_data
.operand_loc
[i
],
645 gen_rtx_REG (mode
, op_alt_regno
[i
][j
]), 1);
648 for (i
= recog_data
.n_dups
- 1; i
>= 0; i
--)
650 int op
= recog_data
.dup_num
[i
];
651 enum machine_mode mode
= recog_data
.operand_mode
[op
];
653 if (op_alt_regno
[op
][j
] == -1)
656 validate_change (insn
, recog_data
.dup_loc
[i
],
657 gen_rtx_REG (mode
, op_alt_regno
[op
][j
]), 1);
660 return apply_change_group ();
663 /* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
665 This code might also be useful when reload gave up on reg+reg addressing
666 because of clashes between the return register and INDEX_REG_CLASS. */
668 /* The maximum number of uses of a register we can keep track of to
669 replace them with reg+reg addressing. */
670 #define RELOAD_COMBINE_MAX_USES 16
672 /* Describes a recorded use of a register. */
675 /* The insn where a register has been used. */
677 /* Points to the memory reference enclosing the use, if any, NULL_RTX
680 /* Location of the register within INSN. */
682 /* The reverse uid of the insn. */
686 /* If the register is used in some unknown fashion, USE_INDEX is negative.
687 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
688 indicates where it is first set or clobbered.
689 Otherwise, USE_INDEX is the index of the last encountered use of the
690 register (which is first among these we have seen since we scan backwards).
691 USE_RUID indicates the first encountered, i.e. last, of these uses.
692 If ALL_OFFSETS_MATCH is true, all encountered uses were inside a PLUS
693 with a constant offset; OFFSET contains this constant in that case.
694 STORE_RUID is always meaningful if we only want to use a value in a
695 register in a different place: it denotes the next insn in the insn
696 stream (i.e. the last encountered) that sets or clobbers the register.
697 REAL_STORE_RUID is similar, but clobbers are ignored when updating it. */
700 struct reg_use reg_use
[RELOAD_COMBINE_MAX_USES
];
706 bool all_offsets_match
;
707 } reg_state
[FIRST_PSEUDO_REGISTER
];
709 /* Reverse linear uid. This is increased in reload_combine while scanning
710 the instructions from last to first. It is used to set last_label_ruid
711 and the store_ruid / use_ruid fields in reg_state. */
712 static int reload_combine_ruid
;
714 /* The RUID of the last label we encountered in reload_combine. */
715 static int last_label_ruid
;
717 /* The RUID of the last jump we encountered in reload_combine. */
718 static int last_jump_ruid
;
720 /* The register numbers of the first and last index register. A value of
721 -1 in LAST_INDEX_REG indicates that we've previously computed these
722 values and found no suitable index registers. */
723 static int first_index_reg
= -1;
724 static int last_index_reg
;
726 #define LABEL_LIVE(LABEL) \
727 (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno])
729 /* Subroutine of reload_combine_split_ruids, called to fix up a single
730 ruid pointed to by *PRUID if it is higher than SPLIT_RUID. */
733 reload_combine_split_one_ruid (int *pruid
, int split_ruid
)
735 if (*pruid
> split_ruid
)
739 /* Called when we insert a new insn in a position we've already passed in
740 the scan. Examine all our state, increasing all ruids that are higher
741 than SPLIT_RUID by one in order to make room for a new insn. */
744 reload_combine_split_ruids (int split_ruid
)
748 reload_combine_split_one_ruid (&reload_combine_ruid
, split_ruid
);
749 reload_combine_split_one_ruid (&last_label_ruid
, split_ruid
);
750 reload_combine_split_one_ruid (&last_jump_ruid
, split_ruid
);
752 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
754 int j
, idx
= reg_state
[i
].use_index
;
755 reload_combine_split_one_ruid (®_state
[i
].use_ruid
, split_ruid
);
756 reload_combine_split_one_ruid (®_state
[i
].store_ruid
, split_ruid
);
757 reload_combine_split_one_ruid (®_state
[i
].real_store_ruid
,
761 for (j
= idx
; j
< RELOAD_COMBINE_MAX_USES
; j
++)
763 reload_combine_split_one_ruid (®_state
[i
].reg_use
[j
].ruid
,
769 /* Called when we are about to rescan a previously encountered insn with
770 reload_combine_note_use after modifying some part of it. This clears all
771 information about uses in that particular insn. */
774 reload_combine_purge_insn_uses (rtx_insn
*insn
)
778 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
780 int j
, k
, idx
= reg_state
[i
].use_index
;
783 j
= k
= RELOAD_COMBINE_MAX_USES
;
786 if (reg_state
[i
].reg_use
[j
].insn
!= insn
)
790 reg_state
[i
].reg_use
[k
] = reg_state
[i
].reg_use
[j
];
793 reg_state
[i
].use_index
= k
;
797 /* Called when we need to forget about all uses of REGNO after an insn
798 which is identified by RUID. */
801 reload_combine_purge_reg_uses_after_ruid (unsigned regno
, int ruid
)
803 int j
, k
, idx
= reg_state
[regno
].use_index
;
806 j
= k
= RELOAD_COMBINE_MAX_USES
;
809 if (reg_state
[regno
].reg_use
[j
].ruid
>= ruid
)
813 reg_state
[regno
].reg_use
[k
] = reg_state
[regno
].reg_use
[j
];
816 reg_state
[regno
].use_index
= k
;
819 /* Find the use of REGNO with the ruid that is highest among those
820 lower than RUID_LIMIT, and return it if it is the only use of this
821 reg in the insn. Return NULL otherwise. */
823 static struct reg_use
*
824 reload_combine_closest_single_use (unsigned regno
, int ruid_limit
)
826 int i
, best_ruid
= 0;
827 int use_idx
= reg_state
[regno
].use_index
;
828 struct reg_use
*retval
;
833 for (i
= use_idx
; i
< RELOAD_COMBINE_MAX_USES
; i
++)
835 struct reg_use
*use
= reg_state
[regno
].reg_use
+ i
;
836 int this_ruid
= use
->ruid
;
837 if (this_ruid
>= ruid_limit
)
839 if (this_ruid
> best_ruid
)
841 best_ruid
= this_ruid
;
844 else if (this_ruid
== best_ruid
)
847 if (last_label_ruid
>= best_ruid
)
852 /* After we've moved an add insn, fix up any debug insns that occur
853 between the old location of the add and the new location. REG is
854 the destination register of the add insn; REPLACEMENT is the
855 SET_SRC of the add. FROM and TO specify the range in which we
856 should make this change on debug insns. */
859 fixup_debug_insns (rtx reg
, rtx replacement
, rtx_insn
*from
, rtx_insn
*to
)
862 for (insn
= from
; insn
!= to
; insn
= NEXT_INSN (insn
))
866 if (!DEBUG_INSN_P (insn
))
869 t
= INSN_VAR_LOCATION_LOC (insn
);
870 t
= simplify_replace_rtx (t
, reg
, replacement
);
871 validate_change (insn
, &INSN_VAR_LOCATION_LOC (insn
), t
, 0);
875 /* Subroutine of reload_combine_recognize_const_pattern. Try to replace REG
876 with SRC in the insn described by USE, taking costs into account. Return
877 true if we made the replacement. */
880 try_replace_in_use (struct reg_use
*use
, rtx reg
, rtx src
)
882 rtx_insn
*use_insn
= use
->insn
;
883 rtx mem
= use
->containing_mem
;
884 bool speed
= optimize_bb_for_speed_p (BLOCK_FOR_INSN (use_insn
));
888 addr_space_t as
= MEM_ADDR_SPACE (mem
);
889 rtx oldaddr
= XEXP (mem
, 0);
890 rtx newaddr
= NULL_RTX
;
891 int old_cost
= address_cost (oldaddr
, GET_MODE (mem
), as
, speed
);
894 newaddr
= simplify_replace_rtx (oldaddr
, reg
, src
);
895 if (memory_address_addr_space_p (GET_MODE (mem
), newaddr
, as
))
897 XEXP (mem
, 0) = newaddr
;
898 new_cost
= address_cost (newaddr
, GET_MODE (mem
), as
, speed
);
899 XEXP (mem
, 0) = oldaddr
;
900 if (new_cost
<= old_cost
901 && validate_change (use_insn
,
902 &XEXP (mem
, 0), newaddr
, 0))
908 rtx new_set
= single_set (use_insn
);
910 && REG_P (SET_DEST (new_set
))
911 && GET_CODE (SET_SRC (new_set
)) == PLUS
912 && REG_P (XEXP (SET_SRC (new_set
), 0))
913 && CONSTANT_P (XEXP (SET_SRC (new_set
), 1)))
916 int old_cost
= set_src_cost (SET_SRC (new_set
), speed
);
918 gcc_assert (rtx_equal_p (XEXP (SET_SRC (new_set
), 0), reg
));
919 new_src
= simplify_replace_rtx (SET_SRC (new_set
), reg
, src
);
921 if (set_src_cost (new_src
, speed
) <= old_cost
922 && validate_change (use_insn
, &SET_SRC (new_set
),
930 /* Called by reload_combine when scanning INSN. This function tries to detect
931 patterns where a constant is added to a register, and the result is used
933 Return true if no further processing is needed on INSN; false if it wasn't
934 recognized and should be handled normally. */
937 reload_combine_recognize_const_pattern (rtx_insn
*insn
)
939 int from_ruid
= reload_combine_ruid
;
940 rtx set
, pat
, reg
, src
, addreg
;
944 rtx_insn
*add_moved_after_insn
= NULL
;
945 int add_moved_after_ruid
= 0;
946 int clobbered_regno
= -1;
948 set
= single_set (insn
);
952 reg
= SET_DEST (set
);
955 || hard_regno_nregs
[REGNO (reg
)][GET_MODE (reg
)] != 1
956 || GET_MODE (reg
) != Pmode
957 || reg
== stack_pointer_rtx
)
962 /* We look for a REG1 = REG2 + CONSTANT insn, followed by either
963 uses of REG1 inside an address, or inside another add insn. If
964 possible and profitable, merge the addition into subsequent
966 if (GET_CODE (src
) != PLUS
967 || !REG_P (XEXP (src
, 0))
968 || !CONSTANT_P (XEXP (src
, 1)))
971 addreg
= XEXP (src
, 0);
972 must_move_add
= rtx_equal_p (reg
, addreg
);
974 pat
= PATTERN (insn
);
975 if (must_move_add
&& set
!= pat
)
977 /* We have to be careful when moving the add; apart from the
978 single_set there may also be clobbers. Recognize one special
979 case, that of one clobber alongside the set (likely a clobber
980 of the CC register). */
981 gcc_assert (GET_CODE (PATTERN (insn
)) == PARALLEL
);
982 if (XVECLEN (pat
, 0) != 2 || XVECEXP (pat
, 0, 0) != set
983 || GET_CODE (XVECEXP (pat
, 0, 1)) != CLOBBER
984 || !REG_P (XEXP (XVECEXP (pat
, 0, 1), 0)))
986 clobbered_regno
= REGNO (XEXP (XVECEXP (pat
, 0, 1), 0));
991 use
= reload_combine_closest_single_use (regno
, from_ruid
);
994 /* Start the search for the next use from here. */
995 from_ruid
= use
->ruid
;
997 if (use
&& GET_MODE (*use
->usep
) == Pmode
)
999 bool delete_add
= false;
1000 rtx_insn
*use_insn
= use
->insn
;
1001 int use_ruid
= use
->ruid
;
1003 /* Avoid moving the add insn past a jump. */
1004 if (must_move_add
&& use_ruid
<= last_jump_ruid
)
1007 /* If the add clobbers another hard reg in parallel, don't move
1008 it past a real set of this hard reg. */
1009 if (must_move_add
&& clobbered_regno
>= 0
1010 && reg_state
[clobbered_regno
].real_store_ruid
>= use_ruid
)
1014 /* Do not separate cc0 setter and cc0 user on HAVE_cc0 targets. */
1015 if (must_move_add
&& sets_cc0_p (PATTERN (use_insn
)))
1019 gcc_assert (reg_state
[regno
].store_ruid
<= use_ruid
);
1020 /* Avoid moving a use of ADDREG past a point where it is stored. */
1021 if (reg_state
[REGNO (addreg
)].store_ruid
> use_ruid
)
1024 /* We also must not move the addition past an insn that sets
1025 the same register, unless we can combine two add insns. */
1026 if (must_move_add
&& reg_state
[regno
].store_ruid
== use_ruid
)
1028 if (use
->containing_mem
== NULL_RTX
)
1034 if (try_replace_in_use (use
, reg
, src
))
1036 reload_combine_purge_insn_uses (use_insn
);
1037 reload_combine_note_use (&PATTERN (use_insn
), use_insn
,
1038 use_ruid
, NULL_RTX
);
1042 fixup_debug_insns (reg
, src
, insn
, use_insn
);
1048 add_moved_after_insn
= use_insn
;
1049 add_moved_after_ruid
= use_ruid
;
1054 /* If we get here, we couldn't handle this use. */
1060 if (!must_move_add
|| add_moved_after_insn
== NULL_RTX
)
1061 /* Process the add normally. */
1064 fixup_debug_insns (reg
, src
, insn
, add_moved_after_insn
);
1066 reorder_insns (insn
, insn
, add_moved_after_insn
);
1067 reload_combine_purge_reg_uses_after_ruid (regno
, add_moved_after_ruid
);
1068 reload_combine_split_ruids (add_moved_after_ruid
- 1);
1069 reload_combine_note_use (&PATTERN (insn
), insn
,
1070 add_moved_after_ruid
, NULL_RTX
);
1071 reg_state
[regno
].store_ruid
= add_moved_after_ruid
;
1076 /* Called by reload_combine when scanning INSN. Try to detect a pattern we
1077 can handle and improve. Return true if no further processing is needed on
1078 INSN; false if it wasn't recognized and should be handled normally. */
1081 reload_combine_recognize_pattern (rtx_insn
*insn
)
1086 set
= single_set (insn
);
1087 if (set
== NULL_RTX
)
1090 reg
= SET_DEST (set
);
1091 src
= SET_SRC (set
);
1093 || hard_regno_nregs
[REGNO (reg
)][GET_MODE (reg
)] != 1)
1096 regno
= REGNO (reg
);
1098 /* Look for (set (REGX) (CONST_INT))
1099 (set (REGX) (PLUS (REGX) (REGY)))
1101 ... (MEM (REGX)) ...
1103 (set (REGZ) (CONST_INT))
1105 ... (MEM (PLUS (REGZ) (REGY)))... .
1107 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
1108 and that we know all uses of REGX before it dies.
1109 Also, explicitly check that REGX != REGY; our life information
1110 does not yet show whether REGY changes in this insn. */
1112 if (GET_CODE (src
) == PLUS
1113 && reg_state
[regno
].all_offsets_match
1114 && last_index_reg
!= -1
1115 && REG_P (XEXP (src
, 1))
1116 && rtx_equal_p (XEXP (src
, 0), reg
)
1117 && !rtx_equal_p (XEXP (src
, 1), reg
)
1118 && reg_state
[regno
].use_index
>= 0
1119 && reg_state
[regno
].use_index
< RELOAD_COMBINE_MAX_USES
1120 && last_label_ruid
< reg_state
[regno
].use_ruid
)
1122 rtx base
= XEXP (src
, 1);
1123 rtx_insn
*prev
= prev_nonnote_nondebug_insn (insn
);
1124 rtx prev_set
= prev
? single_set (prev
) : NULL_RTX
;
1125 rtx index_reg
= NULL_RTX
;
1126 rtx reg_sum
= NULL_RTX
;
1129 /* Now we need to set INDEX_REG to an index register (denoted as
1130 REGZ in the illustration above) and REG_SUM to the expression
1131 register+register that we want to use to substitute uses of REG
1132 (typically in MEMs) with. First check REG and BASE for being
1133 index registers; we can use them even if they are not dead. */
1134 if (TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
], regno
)
1135 || TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
],
1143 /* Otherwise, look for a free index register. Since we have
1144 checked above that neither REG nor BASE are index registers,
1145 if we find anything at all, it will be different from these
1147 for (i
= first_index_reg
; i
<= last_index_reg
; i
++)
1149 if (TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
], i
)
1150 && reg_state
[i
].use_index
== RELOAD_COMBINE_MAX_USES
1151 && reg_state
[i
].store_ruid
<= reg_state
[regno
].use_ruid
1152 && (call_used_regs
[i
] || df_regs_ever_live_p (i
))
1153 && (!frame_pointer_needed
|| i
!= HARD_FRAME_POINTER_REGNUM
)
1154 && !fixed_regs
[i
] && !global_regs
[i
]
1155 && hard_regno_nregs
[i
][GET_MODE (reg
)] == 1
1156 && targetm
.hard_regno_scratch_ok (i
))
1158 index_reg
= gen_rtx_REG (GET_MODE (reg
), i
);
1159 reg_sum
= gen_rtx_PLUS (GET_MODE (reg
), index_reg
, base
);
1165 /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
1166 (REGY), i.e. BASE, is not clobbered before the last use we'll
1170 && CONST_INT_P (SET_SRC (prev_set
))
1171 && rtx_equal_p (SET_DEST (prev_set
), reg
)
1172 && (reg_state
[REGNO (base
)].store_ruid
1173 <= reg_state
[regno
].use_ruid
))
1175 /* Change destination register and, if necessary, the constant
1176 value in PREV, the constant loading instruction. */
1177 validate_change (prev
, &SET_DEST (prev_set
), index_reg
, 1);
1178 if (reg_state
[regno
].offset
!= const0_rtx
)
1179 validate_change (prev
,
1180 &SET_SRC (prev_set
),
1181 GEN_INT (INTVAL (SET_SRC (prev_set
))
1182 + INTVAL (reg_state
[regno
].offset
)),
1185 /* Now for every use of REG that we have recorded, replace REG
1187 for (i
= reg_state
[regno
].use_index
;
1188 i
< RELOAD_COMBINE_MAX_USES
; i
++)
1189 validate_unshare_change (reg_state
[regno
].reg_use
[i
].insn
,
1190 reg_state
[regno
].reg_use
[i
].usep
,
1191 /* Each change must have its own
1195 if (apply_change_group ())
1197 struct reg_use
*lowest_ruid
= NULL
;
1199 /* For every new use of REG_SUM, we have to record the use
1200 of BASE therein, i.e. operand 1. */
1201 for (i
= reg_state
[regno
].use_index
;
1202 i
< RELOAD_COMBINE_MAX_USES
; i
++)
1204 struct reg_use
*use
= reg_state
[regno
].reg_use
+ i
;
1205 reload_combine_note_use (&XEXP (*use
->usep
, 1), use
->insn
,
1206 use
->ruid
, use
->containing_mem
);
1207 if (lowest_ruid
== NULL
|| use
->ruid
< lowest_ruid
->ruid
)
1211 fixup_debug_insns (reg
, reg_sum
, insn
, lowest_ruid
->insn
);
1213 /* Delete the reg-reg addition. */
1216 if (reg_state
[regno
].offset
!= const0_rtx
)
1217 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
1219 remove_reg_equal_equiv_notes (prev
);
1221 reg_state
[regno
].use_index
= RELOAD_COMBINE_MAX_USES
;
1230 reload_combine (void)
1232 rtx_insn
*insn
, *prev
;
1235 int min_labelno
, n_labels
;
1236 HARD_REG_SET ever_live_at_start
, *label_live
;
1238 /* To avoid wasting too much time later searching for an index register,
1239 determine the minimum and maximum index register numbers. */
1240 if (INDEX_REG_CLASS
== NO_REGS
)
1241 last_index_reg
= -1;
1242 else if (first_index_reg
== -1 && last_index_reg
== 0)
1244 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1245 if (TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
], r
))
1247 if (first_index_reg
== -1)
1248 first_index_reg
= r
;
1253 /* If no index register is available, we can quit now. Set LAST_INDEX_REG
1254 to -1 so we'll know to quit early the next time we get here. */
1255 if (first_index_reg
== -1)
1257 last_index_reg
= -1;
1262 /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime
1263 information is a bit fuzzy immediately after reload, but it's
1264 still good enough to determine which registers are live at a jump
1266 min_labelno
= get_first_label_num ();
1267 n_labels
= max_label_num () - min_labelno
;
1268 label_live
= XNEWVEC (HARD_REG_SET
, n_labels
);
1269 CLEAR_HARD_REG_SET (ever_live_at_start
);
1271 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
1273 insn
= BB_HEAD (bb
);
1277 bitmap live_in
= df_get_live_in (bb
);
1279 REG_SET_TO_HARD_REG_SET (live
, live_in
);
1280 compute_use_by_pseudos (&live
, live_in
);
1281 COPY_HARD_REG_SET (LABEL_LIVE (insn
), live
);
1282 IOR_HARD_REG_SET (ever_live_at_start
, live
);
1286 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
1287 last_label_ruid
= last_jump_ruid
= reload_combine_ruid
= 0;
1288 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1290 reg_state
[r
].store_ruid
= 0;
1291 reg_state
[r
].real_store_ruid
= 0;
1293 reg_state
[r
].use_index
= -1;
1295 reg_state
[r
].use_index
= RELOAD_COMBINE_MAX_USES
;
1298 for (insn
= get_last_insn (); insn
; insn
= prev
)
1300 bool control_flow_insn
;
1303 prev
= PREV_INSN (insn
);
1305 /* We cannot do our optimization across labels. Invalidating all the use
1306 information we have would be costly, so we just note where the label
1307 is and then later disable any optimization that would cross it. */
1309 last_label_ruid
= reload_combine_ruid
;
1310 else if (BARRIER_P (insn
))
1312 /* Crossing a barrier resets all the use information. */
1313 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1314 if (! fixed_regs
[r
])
1315 reg_state
[r
].use_index
= RELOAD_COMBINE_MAX_USES
;
1317 else if (INSN_P (insn
) && volatile_insn_p (PATTERN (insn
)))
1318 /* Optimizations across insns being marked as volatile must be
1319 prevented. All the usage information is invalidated
1321 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1323 && reg_state
[r
].use_index
!= RELOAD_COMBINE_MAX_USES
)
1324 reg_state
[r
].use_index
= -1;
1326 if (! NONDEBUG_INSN_P (insn
))
1329 reload_combine_ruid
++;
1331 control_flow_insn
= control_flow_insn_p (insn
);
1332 if (control_flow_insn
)
1333 last_jump_ruid
= reload_combine_ruid
;
1335 if (reload_combine_recognize_const_pattern (insn
)
1336 || reload_combine_recognize_pattern (insn
))
1339 note_stores (PATTERN (insn
), reload_combine_note_store
, NULL
);
1345 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1346 if (call_used_regs
[r
])
1348 reg_state
[r
].use_index
= RELOAD_COMBINE_MAX_USES
;
1349 reg_state
[r
].store_ruid
= reload_combine_ruid
;
1352 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
;
1353 link
= XEXP (link
, 1))
1355 rtx setuse
= XEXP (link
, 0);
1356 rtx usage_rtx
= XEXP (setuse
, 0);
1357 if ((GET_CODE (setuse
) == USE
|| GET_CODE (setuse
) == CLOBBER
)
1358 && REG_P (usage_rtx
))
1361 unsigned int start_reg
= REGNO (usage_rtx
);
1362 unsigned int num_regs
1363 = hard_regno_nregs
[start_reg
][GET_MODE (usage_rtx
)];
1364 unsigned int end_reg
= start_reg
+ num_regs
- 1;
1365 for (i
= start_reg
; i
<= end_reg
; i
++)
1366 if (GET_CODE (XEXP (link
, 0)) == CLOBBER
)
1368 reg_state
[i
].use_index
= RELOAD_COMBINE_MAX_USES
;
1369 reg_state
[i
].store_ruid
= reload_combine_ruid
;
1372 reg_state
[i
].use_index
= -1;
1377 if (control_flow_insn
&& !ANY_RETURN_P (PATTERN (insn
)))
1379 /* Non-spill registers might be used at the call destination in
1380 some unknown fashion, so we have to mark the unknown use. */
1383 if ((condjump_p (insn
) || condjump_in_parallel_p (insn
))
1384 && JUMP_LABEL (insn
))
1386 if (ANY_RETURN_P (JUMP_LABEL (insn
)))
1389 live
= &LABEL_LIVE (JUMP_LABEL (insn
));
1392 live
= &ever_live_at_start
;
1395 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1396 if (TEST_HARD_REG_BIT (*live
, r
))
1397 reg_state
[r
].use_index
= -1;
1400 reload_combine_note_use (&PATTERN (insn
), insn
, reload_combine_ruid
,
1403 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
1405 if (REG_NOTE_KIND (note
) == REG_INC
&& REG_P (XEXP (note
, 0)))
1407 int regno
= REGNO (XEXP (note
, 0));
1408 reg_state
[regno
].store_ruid
= reload_combine_ruid
;
1409 reg_state
[regno
].real_store_ruid
= reload_combine_ruid
;
1410 reg_state
[regno
].use_index
= -1;
1418 /* Check if DST is a register or a subreg of a register; if it is,
1419 update store_ruid, real_store_ruid and use_index in the reg_state
1420 structure accordingly. Called via note_stores from reload_combine. */
1423 reload_combine_note_store (rtx dst
, const_rtx set
, void *data ATTRIBUTE_UNUSED
)
1427 enum machine_mode mode
= GET_MODE (dst
);
1429 if (GET_CODE (dst
) == SUBREG
)
1431 regno
= subreg_regno_offset (REGNO (SUBREG_REG (dst
)),
1432 GET_MODE (SUBREG_REG (dst
)),
1435 dst
= SUBREG_REG (dst
);
1438 /* Some targets do argument pushes without adding REG_INC notes. */
1442 dst
= XEXP (dst
, 0);
1443 if (GET_CODE (dst
) == PRE_INC
|| GET_CODE (dst
) == POST_INC
1444 || GET_CODE (dst
) == PRE_DEC
|| GET_CODE (dst
) == POST_DEC
1445 || GET_CODE (dst
) == PRE_MODIFY
|| GET_CODE (dst
) == POST_MODIFY
)
1447 regno
= REGNO (XEXP (dst
, 0));
1448 mode
= GET_MODE (XEXP (dst
, 0));
1449 for (i
= hard_regno_nregs
[regno
][mode
] - 1 + regno
; i
>= regno
; i
--)
1451 /* We could probably do better, but for now mark the register
1452 as used in an unknown fashion and set/clobbered at this
1454 reg_state
[i
].use_index
= -1;
1455 reg_state
[i
].store_ruid
= reload_combine_ruid
;
1456 reg_state
[i
].real_store_ruid
= reload_combine_ruid
;
1465 regno
+= REGNO (dst
);
1467 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
1468 careful with registers / register parts that are not full words.
1469 Similarly for ZERO_EXTRACT. */
1470 if (GET_CODE (SET_DEST (set
)) == ZERO_EXTRACT
1471 || GET_CODE (SET_DEST (set
)) == STRICT_LOW_PART
)
1473 for (i
= hard_regno_nregs
[regno
][mode
] - 1 + regno
; i
>= regno
; i
--)
1475 reg_state
[i
].use_index
= -1;
1476 reg_state
[i
].store_ruid
= reload_combine_ruid
;
1477 reg_state
[i
].real_store_ruid
= reload_combine_ruid
;
1482 for (i
= hard_regno_nregs
[regno
][mode
] - 1 + regno
; i
>= regno
; i
--)
1484 reg_state
[i
].store_ruid
= reload_combine_ruid
;
1485 if (GET_CODE (set
) == SET
)
1486 reg_state
[i
].real_store_ruid
= reload_combine_ruid
;
1487 reg_state
[i
].use_index
= RELOAD_COMBINE_MAX_USES
;
1492 /* XP points to a piece of rtl that has to be checked for any uses of
1494 *XP is the pattern of INSN, or a part of it.
1495 Called from reload_combine, and recursively by itself. */
1497 reload_combine_note_use (rtx
*xp
, rtx_insn
*insn
, int ruid
, rtx containing_mem
)
1500 enum rtx_code code
= x
->code
;
1503 rtx offset
= const0_rtx
; /* For the REG case below. */
1508 if (REG_P (SET_DEST (x
)))
1510 reload_combine_note_use (&SET_SRC (x
), insn
, ruid
, NULL_RTX
);
1516 /* If this is the USE of a return value, we can't change it. */
1517 if (REG_P (XEXP (x
, 0)) && REG_FUNCTION_VALUE_P (XEXP (x
, 0)))
1519 /* Mark the return register as used in an unknown fashion. */
1520 rtx reg
= XEXP (x
, 0);
1521 int regno
= REGNO (reg
);
1522 int nregs
= hard_regno_nregs
[regno
][GET_MODE (reg
)];
1524 while (--nregs
>= 0)
1525 reg_state
[regno
+ nregs
].use_index
= -1;
1531 if (REG_P (SET_DEST (x
)))
1533 /* No spurious CLOBBERs of pseudo registers may remain. */
1534 gcc_assert (REGNO (SET_DEST (x
)) < FIRST_PSEUDO_REGISTER
);
1540 /* We are interested in (plus (reg) (const_int)) . */
1541 if (!REG_P (XEXP (x
, 0))
1542 || !CONST_INT_P (XEXP (x
, 1)))
1544 offset
= XEXP (x
, 1);
1549 int regno
= REGNO (x
);
1553 /* No spurious USEs of pseudo registers may remain. */
1554 gcc_assert (regno
< FIRST_PSEUDO_REGISTER
);
1556 nregs
= hard_regno_nregs
[regno
][GET_MODE (x
)];
1558 /* We can't substitute into multi-hard-reg uses. */
1561 while (--nregs
>= 0)
1562 reg_state
[regno
+ nregs
].use_index
= -1;
1566 /* We may be called to update uses in previously seen insns.
1567 Don't add uses beyond the last store we saw. */
1568 if (ruid
< reg_state
[regno
].store_ruid
)
1571 /* If this register is already used in some unknown fashion, we
1573 If we decrement the index from zero to -1, we can't store more
1574 uses, so this register becomes used in an unknown fashion. */
1575 use_index
= --reg_state
[regno
].use_index
;
1579 if (use_index
== RELOAD_COMBINE_MAX_USES
- 1)
1581 /* This is the first use of this register we have seen since we
1582 marked it as dead. */
1583 reg_state
[regno
].offset
= offset
;
1584 reg_state
[regno
].all_offsets_match
= true;
1585 reg_state
[regno
].use_ruid
= ruid
;
1589 if (reg_state
[regno
].use_ruid
> ruid
)
1590 reg_state
[regno
].use_ruid
= ruid
;
1592 if (! rtx_equal_p (offset
, reg_state
[regno
].offset
))
1593 reg_state
[regno
].all_offsets_match
= false;
1596 reg_state
[regno
].reg_use
[use_index
].insn
= insn
;
1597 reg_state
[regno
].reg_use
[use_index
].ruid
= ruid
;
1598 reg_state
[regno
].reg_use
[use_index
].containing_mem
= containing_mem
;
1599 reg_state
[regno
].reg_use
[use_index
].usep
= xp
;
1611 /* Recursively process the components of X. */
1612 fmt
= GET_RTX_FORMAT (code
);
1613 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1616 reload_combine_note_use (&XEXP (x
, i
), insn
, ruid
, containing_mem
);
1617 else if (fmt
[i
] == 'E')
1619 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1620 reload_combine_note_use (&XVECEXP (x
, i
, j
), insn
, ruid
,
1626 /* See if we can reduce the cost of a constant by replacing a move
1627 with an add. We track situations in which a register is set to a
1628 constant or to a register plus a constant. */
1629 /* We cannot do our optimization across labels. Invalidating all the
1630 information about register contents we have would be costly, so we
1631 use move2add_last_label_luid to note where the label is and then
1632 later disable any optimization that would cross it.
1633 reg_offset[n] / reg_base_reg[n] / reg_symbol_ref[n] / reg_mode[n]
1634 are only valid if reg_set_luid[n] is greater than
1635 move2add_last_label_luid.
1636 For a set that established a new (potential) base register with
1637 non-constant value, we use move2add_luid from the place where the
1638 setting insn is encountered; registers based off that base then
1639 get the same reg_set_luid. Constants all get
1640 move2add_last_label_luid + 1 as their reg_set_luid. */
1641 static int reg_set_luid
[FIRST_PSEUDO_REGISTER
];
1643 /* If reg_base_reg[n] is negative, register n has been set to
1644 reg_offset[n] or reg_symbol_ref[n] + reg_offset[n] in mode reg_mode[n].
1645 If reg_base_reg[n] is non-negative, register n has been set to the
1646 sum of reg_offset[n] and the value of register reg_base_reg[n]
1647 before reg_set_luid[n], calculated in mode reg_mode[n] .
1648 For multi-hard-register registers, all but the first one are
1649 recorded as BLKmode in reg_mode. Setting reg_mode to VOIDmode
1650 marks it as invalid. */
1651 static HOST_WIDE_INT reg_offset
[FIRST_PSEUDO_REGISTER
];
1652 static int reg_base_reg
[FIRST_PSEUDO_REGISTER
];
1653 static rtx reg_symbol_ref
[FIRST_PSEUDO_REGISTER
];
1654 static enum machine_mode reg_mode
[FIRST_PSEUDO_REGISTER
];
1656 /* move2add_luid is linearly increased while scanning the instructions
1657 from first to last. It is used to set reg_set_luid in
1658 reload_cse_move2add and move2add_note_store. */
1659 static int move2add_luid
;
1661 /* move2add_last_label_luid is set whenever a label is found. Labels
1662 invalidate all previously collected reg_offset data. */
1663 static int move2add_last_label_luid
;
1665 /* ??? We don't know how zero / sign extension is handled, hence we
1666 can't go from a narrower to a wider mode. */
1667 #define MODES_OK_FOR_MOVE2ADD(OUTMODE, INMODE) \
1668 (GET_MODE_SIZE (OUTMODE) == GET_MODE_SIZE (INMODE) \
1669 || (GET_MODE_SIZE (OUTMODE) <= GET_MODE_SIZE (INMODE) \
1670 && TRULY_NOOP_TRUNCATION_MODES_P (OUTMODE, INMODE)))
1672 /* Record that REG is being set to a value with the mode of REG. */
1675 move2add_record_mode (rtx reg
)
1678 enum machine_mode mode
= GET_MODE (reg
);
1680 if (GET_CODE (reg
) == SUBREG
)
1682 regno
= subreg_regno (reg
);
1683 nregs
= subreg_nregs (reg
);
1685 else if (REG_P (reg
))
1687 regno
= REGNO (reg
);
1688 nregs
= hard_regno_nregs
[regno
][mode
];
1692 for (int i
= nregs
- 1; i
> 0; i
--)
1693 reg_mode
[regno
+ i
] = BLKmode
;
1694 reg_mode
[regno
] = mode
;
1697 /* Record that REG is being set to the sum of SYM and OFF. */
1700 move2add_record_sym_value (rtx reg
, rtx sym
, rtx off
)
1702 int regno
= REGNO (reg
);
1704 move2add_record_mode (reg
);
1705 reg_set_luid
[regno
] = move2add_luid
;
1706 reg_base_reg
[regno
] = -1;
1707 reg_symbol_ref
[regno
] = sym
;
1708 reg_offset
[regno
] = INTVAL (off
);
1711 /* Check if REGNO contains a valid value in MODE. */
1714 move2add_valid_value_p (int regno
, enum machine_mode mode
)
1716 if (reg_set_luid
[regno
] <= move2add_last_label_luid
)
1719 if (mode
!= reg_mode
[regno
])
1721 if (!MODES_OK_FOR_MOVE2ADD (mode
, reg_mode
[regno
]))
1723 /* The value loaded into regno in reg_mode[regno] is also valid in
1724 mode after truncation only if (REG:mode regno) is the lowpart of
1725 (REG:reg_mode[regno] regno). Now, for big endian, the starting
1726 regno of the lowpart might be different. */
1727 int s_off
= subreg_lowpart_offset (mode
, reg_mode
[regno
]);
1728 s_off
= subreg_regno_offset (regno
, reg_mode
[regno
], s_off
, mode
);
1730 /* We could in principle adjust regno, check reg_mode[regno] to be
1731 BLKmode, and return s_off to the caller (vs. -1 for failure),
1732 but we currently have no callers that could make use of this
1737 for (int i
= hard_regno_nregs
[regno
][mode
] - 1; i
> 0; i
--)
1738 if (reg_mode
[regno
+ i
] != BLKmode
)
1743 /* This function is called with INSN that sets REG to (SYM + OFF),
1744 while REG is known to already have value (SYM + offset).
1745 This function tries to change INSN into an add instruction
1746 (set (REG) (plus (REG) (OFF - offset))) using the known value.
1747 It also updates the information about REG's known value.
1748 Return true if we made a change. */
1751 move2add_use_add2_insn (rtx reg
, rtx sym
, rtx off
, rtx_insn
*insn
)
1753 rtx pat
= PATTERN (insn
);
1754 rtx src
= SET_SRC (pat
);
1755 int regno
= REGNO (reg
);
1756 rtx new_src
= gen_int_mode (UINTVAL (off
) - reg_offset
[regno
],
1758 bool speed
= optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn
));
1759 bool changed
= false;
1761 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1762 use (set (reg) (reg)) instead.
1763 We don't delete this insn, nor do we convert it into a
1764 note, to avoid losing register notes or the return
1765 value flag. jump2 already knows how to get rid of
1767 if (new_src
== const0_rtx
)
1769 /* If the constants are different, this is a
1770 truncation, that, if turned into (set (reg)
1771 (reg)), would be discarded. Maybe we should
1772 try a truncMN pattern? */
1773 if (INTVAL (off
) == reg_offset
[regno
])
1774 changed
= validate_change (insn
, &SET_SRC (pat
), reg
, 0);
1778 struct full_rtx_costs oldcst
, newcst
;
1779 rtx tem
= gen_rtx_PLUS (GET_MODE (reg
), reg
, new_src
);
1781 get_full_set_rtx_cost (pat
, &oldcst
);
1782 SET_SRC (pat
) = tem
;
1783 get_full_set_rtx_cost (pat
, &newcst
);
1784 SET_SRC (pat
) = src
;
1786 if (costs_lt_p (&newcst
, &oldcst
, speed
)
1787 && have_add2_insn (reg
, new_src
))
1788 changed
= validate_change (insn
, &SET_SRC (pat
), tem
, 0);
1789 else if (sym
== NULL_RTX
&& GET_MODE (reg
) != BImode
)
1791 enum machine_mode narrow_mode
;
1792 for (narrow_mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1793 narrow_mode
!= VOIDmode
1794 && narrow_mode
!= GET_MODE (reg
);
1795 narrow_mode
= GET_MODE_WIDER_MODE (narrow_mode
))
1797 if (have_insn_for (STRICT_LOW_PART
, narrow_mode
)
1798 && ((reg_offset
[regno
] & ~GET_MODE_MASK (narrow_mode
))
1799 == (INTVAL (off
) & ~GET_MODE_MASK (narrow_mode
))))
1801 rtx narrow_reg
= gen_lowpart_common (narrow_mode
, reg
);
1802 rtx narrow_src
= gen_int_mode (INTVAL (off
),
1805 = gen_rtx_SET (VOIDmode
,
1806 gen_rtx_STRICT_LOW_PART (VOIDmode
,
1809 get_full_set_rtx_cost (new_set
, &newcst
);
1810 if (costs_lt_p (&newcst
, &oldcst
, speed
))
1812 changed
= validate_change (insn
, &PATTERN (insn
),
1821 move2add_record_sym_value (reg
, sym
, off
);
1826 /* This function is called with INSN that sets REG to (SYM + OFF),
1827 but REG doesn't have known value (SYM + offset). This function
1828 tries to find another register which is known to already have
1829 value (SYM + offset) and change INSN into an add instruction
1830 (set (REG) (plus (the found register) (OFF - offset))) if such
1831 a register is found. It also updates the information about
1833 Return true iff we made a change. */
1836 move2add_use_add3_insn (rtx reg
, rtx sym
, rtx off
, rtx_insn
*insn
)
1838 rtx pat
= PATTERN (insn
);
1839 rtx src
= SET_SRC (pat
);
1840 int regno
= REGNO (reg
);
1842 bool speed
= optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn
));
1844 bool changed
= false;
1845 struct full_rtx_costs oldcst
, newcst
, mincst
;
1848 init_costs_to_max (&mincst
);
1849 get_full_set_rtx_cost (pat
, &oldcst
);
1851 plus_expr
= gen_rtx_PLUS (GET_MODE (reg
), reg
, const0_rtx
);
1852 SET_SRC (pat
) = plus_expr
;
1854 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1855 if (move2add_valid_value_p (i
, GET_MODE (reg
))
1856 && reg_base_reg
[i
] < 0
1857 && reg_symbol_ref
[i
] != NULL_RTX
1858 && rtx_equal_p (sym
, reg_symbol_ref
[i
]))
1860 rtx new_src
= gen_int_mode (UINTVAL (off
) - reg_offset
[i
],
1862 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1863 use (set (reg) (reg)) instead.
1864 We don't delete this insn, nor do we convert it into a
1865 note, to avoid losing register notes or the return
1866 value flag. jump2 already knows how to get rid of
1868 if (new_src
== const0_rtx
)
1870 init_costs_to_zero (&mincst
);
1876 XEXP (plus_expr
, 1) = new_src
;
1877 get_full_set_rtx_cost (pat
, &newcst
);
1879 if (costs_lt_p (&newcst
, &mincst
, speed
))
1886 SET_SRC (pat
) = src
;
1888 if (costs_lt_p (&mincst
, &oldcst
, speed
))
1892 tem
= gen_rtx_REG (GET_MODE (reg
), min_regno
);
1895 rtx new_src
= gen_int_mode (UINTVAL (off
) - reg_offset
[min_regno
],
1897 tem
= gen_rtx_PLUS (GET_MODE (reg
), tem
, new_src
);
1899 if (validate_change (insn
, &SET_SRC (pat
), tem
, 0))
1902 reg_set_luid
[regno
] = move2add_luid
;
1903 move2add_record_sym_value (reg
, sym
, off
);
1907 /* Convert move insns with constant inputs to additions if they are cheaper.
1908 Return true if any changes were made. */
1910 reload_cse_move2add (rtx_insn
*first
)
1914 bool changed
= false;
1916 for (i
= FIRST_PSEUDO_REGISTER
- 1; i
>= 0; i
--)
1918 reg_set_luid
[i
] = 0;
1920 reg_base_reg
[i
] = 0;
1921 reg_symbol_ref
[i
] = NULL_RTX
;
1922 reg_mode
[i
] = VOIDmode
;
1925 move2add_last_label_luid
= 0;
1927 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
), move2add_luid
++)
1933 move2add_last_label_luid
= move2add_luid
;
1934 /* We're going to increment move2add_luid twice after a
1935 label, so that we can use move2add_last_label_luid + 1 as
1936 the luid for constants. */
1940 if (! INSN_P (insn
))
1942 pat
= PATTERN (insn
);
1943 /* For simplicity, we only perform this optimization on
1944 straightforward SETs. */
1945 if (GET_CODE (pat
) == SET
1946 && REG_P (SET_DEST (pat
)))
1948 rtx reg
= SET_DEST (pat
);
1949 int regno
= REGNO (reg
);
1950 rtx src
= SET_SRC (pat
);
1952 /* Check if we have valid information on the contents of this
1953 register in the mode of REG. */
1954 if (move2add_valid_value_p (regno
, GET_MODE (reg
))
1955 && dbg_cnt (cse2_move2add
))
1957 /* Try to transform (set (REGX) (CONST_INT A))
1959 (set (REGX) (CONST_INT B))
1961 (set (REGX) (CONST_INT A))
1963 (set (REGX) (plus (REGX) (CONST_INT B-A)))
1965 (set (REGX) (CONST_INT A))
1967 (set (STRICT_LOW_PART (REGX)) (CONST_INT B))
1970 if (CONST_INT_P (src
)
1971 && reg_base_reg
[regno
] < 0
1972 && reg_symbol_ref
[regno
] == NULL_RTX
)
1974 changed
|= move2add_use_add2_insn (reg
, NULL_RTX
, src
, insn
);
1978 /* Try to transform (set (REGX) (REGY))
1979 (set (REGX) (PLUS (REGX) (CONST_INT A)))
1982 (set (REGX) (PLUS (REGX) (CONST_INT B)))
1985 (set (REGX) (PLUS (REGX) (CONST_INT A)))
1987 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
1988 else if (REG_P (src
)
1989 && reg_set_luid
[regno
] == reg_set_luid
[REGNO (src
)]
1990 && reg_base_reg
[regno
] == reg_base_reg
[REGNO (src
)]
1991 && move2add_valid_value_p (REGNO (src
), GET_MODE (reg
)))
1993 rtx_insn
*next
= next_nonnote_nondebug_insn (insn
);
1996 set
= single_set (next
);
1998 && SET_DEST (set
) == reg
1999 && GET_CODE (SET_SRC (set
)) == PLUS
2000 && XEXP (SET_SRC (set
), 0) == reg
2001 && CONST_INT_P (XEXP (SET_SRC (set
), 1)))
2003 rtx src3
= XEXP (SET_SRC (set
), 1);
2004 unsigned HOST_WIDE_INT added_offset
= UINTVAL (src3
);
2005 HOST_WIDE_INT base_offset
= reg_offset
[REGNO (src
)];
2006 HOST_WIDE_INT regno_offset
= reg_offset
[regno
];
2008 gen_int_mode (added_offset
2012 bool success
= false;
2013 bool speed
= optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn
));
2015 if (new_src
== const0_rtx
)
2016 /* See above why we create (set (reg) (reg)) here. */
2018 = validate_change (next
, &SET_SRC (set
), reg
, 0);
2021 rtx old_src
= SET_SRC (set
);
2022 struct full_rtx_costs oldcst
, newcst
;
2023 rtx tem
= gen_rtx_PLUS (GET_MODE (reg
), reg
, new_src
);
2025 get_full_set_rtx_cost (set
, &oldcst
);
2026 SET_SRC (set
) = tem
;
2027 get_full_set_src_cost (tem
, &newcst
);
2028 SET_SRC (set
) = old_src
;
2029 costs_add_n_insns (&oldcst
, 1);
2031 if (costs_lt_p (&newcst
, &oldcst
, speed
)
2032 && have_add2_insn (reg
, new_src
))
2034 rtx newpat
= gen_rtx_SET (VOIDmode
, reg
, tem
);
2036 = validate_change (next
, &PATTERN (next
),
2044 move2add_record_mode (reg
);
2046 = trunc_int_for_mode (added_offset
+ base_offset
,
2054 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
2056 (set (REGY) (CONST (PLUS (SYMBOL_REF) (CONST_INT B))))
2058 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
2060 (set (REGY) (CONST (PLUS (REGX) (CONST_INT B-A)))) */
2061 if ((GET_CODE (src
) == SYMBOL_REF
2062 || (GET_CODE (src
) == CONST
2063 && GET_CODE (XEXP (src
, 0)) == PLUS
2064 && GET_CODE (XEXP (XEXP (src
, 0), 0)) == SYMBOL_REF
2065 && CONST_INT_P (XEXP (XEXP (src
, 0), 1))))
2066 && dbg_cnt (cse2_move2add
))
2070 if (GET_CODE (src
) == SYMBOL_REF
)
2077 sym
= XEXP (XEXP (src
, 0), 0);
2078 off
= XEXP (XEXP (src
, 0), 1);
2081 /* If the reg already contains the value which is sum of
2082 sym and some constant value, we can use an add2 insn. */
2083 if (move2add_valid_value_p (regno
, GET_MODE (reg
))
2084 && reg_base_reg
[regno
] < 0
2085 && reg_symbol_ref
[regno
] != NULL_RTX
2086 && rtx_equal_p (sym
, reg_symbol_ref
[regno
]))
2087 changed
|= move2add_use_add2_insn (reg
, sym
, off
, insn
);
2089 /* Otherwise, we have to find a register whose value is sum
2090 of sym and some constant value. */
2092 changed
|= move2add_use_add3_insn (reg
, sym
, off
, insn
);
2098 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
2100 if (REG_NOTE_KIND (note
) == REG_INC
2101 && REG_P (XEXP (note
, 0)))
2103 /* Reset the information about this register. */
2104 int regno
= REGNO (XEXP (note
, 0));
2105 if (regno
< FIRST_PSEUDO_REGISTER
)
2107 move2add_record_mode (XEXP (note
, 0));
2108 reg_mode
[regno
] = VOIDmode
;
2112 note_stores (PATTERN (insn
), move2add_note_store
, insn
);
2114 /* If INSN is a conditional branch, we try to extract an
2115 implicit set out of it. */
2116 if (any_condjump_p (insn
))
2118 rtx cnd
= fis_get_condition (insn
);
2121 && GET_CODE (cnd
) == NE
2122 && REG_P (XEXP (cnd
, 0))
2123 && !reg_set_p (XEXP (cnd
, 0), insn
)
2124 /* The following two checks, which are also in
2125 move2add_note_store, are intended to reduce the
2126 number of calls to gen_rtx_SET to avoid memory
2127 allocation if possible. */
2128 && SCALAR_INT_MODE_P (GET_MODE (XEXP (cnd
, 0)))
2129 && hard_regno_nregs
[REGNO (XEXP (cnd
, 0))][GET_MODE (XEXP (cnd
, 0))] == 1
2130 && CONST_INT_P (XEXP (cnd
, 1)))
2133 gen_rtx_SET (VOIDmode
, XEXP (cnd
, 0), XEXP (cnd
, 1));
2134 move2add_note_store (SET_DEST (implicit_set
), implicit_set
, insn
);
2138 /* If this is a CALL_INSN, all call used registers are stored with
2142 for (i
= FIRST_PSEUDO_REGISTER
- 1; i
>= 0; i
--)
2144 if (call_used_regs
[i
])
2145 /* Reset the information about this register. */
2146 reg_mode
[i
] = VOIDmode
;
2153 /* SET is a SET or CLOBBER that sets DST. DATA is the insn which
2155 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
2156 Called from reload_cse_move2add via note_stores. */
2159 move2add_note_store (rtx dst
, const_rtx set
, void *data
)
2161 rtx_insn
*insn
= (rtx_insn
*) data
;
2162 unsigned int regno
= 0;
2163 enum machine_mode mode
= GET_MODE (dst
);
2165 /* Some targets do argument pushes without adding REG_INC notes. */
2169 dst
= XEXP (dst
, 0);
2170 if (GET_CODE (dst
) == PRE_INC
|| GET_CODE (dst
) == POST_INC
2171 || GET_CODE (dst
) == PRE_DEC
|| GET_CODE (dst
) == POST_DEC
)
2172 reg_mode
[REGNO (XEXP (dst
, 0))] = VOIDmode
;
2176 if (GET_CODE (dst
) == SUBREG
)
2177 regno
= subreg_regno (dst
);
2178 else if (REG_P (dst
))
2179 regno
= REGNO (dst
);
2183 if (SCALAR_INT_MODE_P (mode
)
2184 && GET_CODE (set
) == SET
)
2186 rtx note
, sym
= NULL_RTX
;
2189 note
= find_reg_equal_equiv_note (insn
);
2190 if (note
&& GET_CODE (XEXP (note
, 0)) == SYMBOL_REF
)
2192 sym
= XEXP (note
, 0);
2195 else if (note
&& GET_CODE (XEXP (note
, 0)) == CONST
2196 && GET_CODE (XEXP (XEXP (note
, 0), 0)) == PLUS
2197 && GET_CODE (XEXP (XEXP (XEXP (note
, 0), 0), 0)) == SYMBOL_REF
2198 && CONST_INT_P (XEXP (XEXP (XEXP (note
, 0), 0), 1)))
2200 sym
= XEXP (XEXP (XEXP (note
, 0), 0), 0);
2201 off
= XEXP (XEXP (XEXP (note
, 0), 0), 1);
2204 if (sym
!= NULL_RTX
)
2206 move2add_record_sym_value (dst
, sym
, off
);
2211 if (SCALAR_INT_MODE_P (mode
)
2212 && GET_CODE (set
) == SET
2213 && GET_CODE (SET_DEST (set
)) != ZERO_EXTRACT
2214 && GET_CODE (SET_DEST (set
)) != STRICT_LOW_PART
)
2216 rtx src
= SET_SRC (set
);
2218 unsigned HOST_WIDE_INT offset
;
2221 switch (GET_CODE (src
))
2224 if (REG_P (XEXP (src
, 0)))
2226 base_reg
= XEXP (src
, 0);
2228 if (CONST_INT_P (XEXP (src
, 1)))
2229 offset
= UINTVAL (XEXP (src
, 1));
2230 else if (REG_P (XEXP (src
, 1))
2231 && move2add_valid_value_p (REGNO (XEXP (src
, 1)), mode
))
2233 if (reg_base_reg
[REGNO (XEXP (src
, 1))] < 0
2234 && reg_symbol_ref
[REGNO (XEXP (src
, 1))] == NULL_RTX
)
2235 offset
= reg_offset
[REGNO (XEXP (src
, 1))];
2236 /* Maybe the first register is known to be a
2238 else if (move2add_valid_value_p (REGNO (base_reg
), mode
)
2239 && reg_base_reg
[REGNO (base_reg
)] < 0
2240 && reg_symbol_ref
[REGNO (base_reg
)] == NULL_RTX
)
2242 offset
= reg_offset
[REGNO (base_reg
)];
2243 base_reg
= XEXP (src
, 1);
2262 /* Start tracking the register as a constant. */
2263 reg_base_reg
[regno
] = -1;
2264 reg_symbol_ref
[regno
] = NULL_RTX
;
2265 reg_offset
[regno
] = INTVAL (SET_SRC (set
));
2266 /* We assign the same luid to all registers set to constants. */
2267 reg_set_luid
[regno
] = move2add_last_label_luid
+ 1;
2268 move2add_record_mode (dst
);
2275 base_regno
= REGNO (base_reg
);
2276 /* If information about the base register is not valid, set it
2277 up as a new base register, pretending its value is known
2278 starting from the current insn. */
2279 if (!move2add_valid_value_p (base_regno
, mode
))
2281 reg_base_reg
[base_regno
] = base_regno
;
2282 reg_symbol_ref
[base_regno
] = NULL_RTX
;
2283 reg_offset
[base_regno
] = 0;
2284 reg_set_luid
[base_regno
] = move2add_luid
;
2285 gcc_assert (GET_MODE (base_reg
) == mode
);
2286 move2add_record_mode (base_reg
);
2289 /* Copy base information from our base register. */
2290 reg_set_luid
[regno
] = reg_set_luid
[base_regno
];
2291 reg_base_reg
[regno
] = reg_base_reg
[base_regno
];
2292 reg_symbol_ref
[regno
] = reg_symbol_ref
[base_regno
];
2294 /* Compute the sum of the offsets or constants. */
2296 = trunc_int_for_mode (offset
+ reg_offset
[base_regno
], mode
);
2298 move2add_record_mode (dst
);
2303 /* Invalidate the contents of the register. */
2304 move2add_record_mode (dst
);
2305 reg_mode
[regno
] = VOIDmode
;
2311 const pass_data pass_data_postreload_cse
=
2313 RTL_PASS
, /* type */
2314 "postreload", /* name */
2315 OPTGROUP_NONE
, /* optinfo_flags */
2316 TV_RELOAD_CSE_REGS
, /* tv_id */
2317 0, /* properties_required */
2318 0, /* properties_provided */
2319 0, /* properties_destroyed */
2320 0, /* todo_flags_start */
2321 TODO_df_finish
, /* todo_flags_finish */
2324 class pass_postreload_cse
: public rtl_opt_pass
2327 pass_postreload_cse (gcc::context
*ctxt
)
2328 : rtl_opt_pass (pass_data_postreload_cse
, ctxt
)
2331 /* opt_pass methods: */
2332 virtual bool gate (function
*) { return (optimize
> 0 && reload_completed
); }
2334 virtual unsigned int execute (function
*);
2336 }; // class pass_postreload_cse
2339 pass_postreload_cse::execute (function
*fun
)
2341 if (!dbg_cnt (postreload_cse
))
2344 /* Do a very simple CSE pass over just the hard registers. */
2345 reload_cse_regs (get_insns ());
2346 /* Reload_cse_regs can eliminate potentially-trapping MEMs.
2347 Remove any EH edges associated with them. */
2348 if (fun
->can_throw_non_call_exceptions
2349 && purge_all_dead_edges ())
2358 make_pass_postreload_cse (gcc::context
*ctxt
)
2360 return new pass_postreload_cse (ctxt
);