1 /* Perform simple optimizations to clean up the result of reload.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "hard-reg-set.h"
30 #include "insn-config.h"
38 #include "statistics.h"
39 #include "double-int.h"
41 #include "fixed-value.h"
54 #include "insn-codes.h"
58 #include "dominance.h"
62 #include "cfgcleanup.h"
63 #include "basic-block.h"
67 #include "diagnostic-core.h"
70 #include "tree-pass.h"
74 static int reload_cse_noop_set_p (rtx
);
75 static bool reload_cse_simplify (rtx_insn
*, rtx
);
76 static void reload_cse_regs_1 (void);
77 static int reload_cse_simplify_set (rtx
, rtx_insn
*);
78 static int reload_cse_simplify_operands (rtx_insn
*, rtx
);
80 static void reload_combine (void);
81 static void reload_combine_note_use (rtx
*, rtx_insn
*, int, rtx
);
82 static void reload_combine_note_store (rtx
, const_rtx
, void *);
84 static bool reload_cse_move2add (rtx_insn
*);
85 static void move2add_note_store (rtx
, const_rtx
, void *);
87 /* Call cse / combine like post-reload optimization phases.
88 FIRST is the first instruction. */
91 reload_cse_regs (rtx_insn
*first ATTRIBUTE_UNUSED
)
96 moves_converted
= reload_cse_move2add (first
);
97 if (flag_expensive_optimizations
)
101 reload_cse_regs_1 ();
105 /* See whether a single set SET is a noop. */
107 reload_cse_noop_set_p (rtx set
)
109 if (cselib_reg_set_mode (SET_DEST (set
)) != GET_MODE (SET_DEST (set
)))
112 return rtx_equal_for_cselib_p (SET_DEST (set
), SET_SRC (set
));
115 /* Try to simplify INSN. Return true if the CFG may have changed. */
117 reload_cse_simplify (rtx_insn
*insn
, rtx testreg
)
119 rtx body
= PATTERN (insn
);
120 basic_block insn_bb
= BLOCK_FOR_INSN (insn
);
121 unsigned insn_bb_succs
= EDGE_COUNT (insn_bb
->succs
);
123 if (GET_CODE (body
) == SET
)
127 /* Simplify even if we may think it is a no-op.
128 We may think a memory load of a value smaller than WORD_SIZE
129 is redundant because we haven't taken into account possible
130 implicit extension. reload_cse_simplify_set() will bring
131 this out, so it's safer to simplify before we delete. */
132 count
+= reload_cse_simplify_set (body
, insn
);
134 if (!count
&& reload_cse_noop_set_p (body
))
136 rtx value
= SET_DEST (body
);
138 && ! REG_FUNCTION_VALUE_P (value
))
140 if (check_for_inc_dec (insn
))
141 delete_insn_and_edges (insn
);
142 /* We're done with this insn. */
147 apply_change_group ();
149 reload_cse_simplify_operands (insn
, testreg
);
151 else if (GET_CODE (body
) == PARALLEL
)
155 rtx value
= NULL_RTX
;
157 /* Registers mentioned in the clobber list for an asm cannot be reused
158 within the body of the asm. Invalidate those registers now so that
159 we don't try to substitute values for them. */
160 if (asm_noperands (body
) >= 0)
162 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
164 rtx part
= XVECEXP (body
, 0, i
);
165 if (GET_CODE (part
) == CLOBBER
&& REG_P (XEXP (part
, 0)))
166 cselib_invalidate_rtx (XEXP (part
, 0));
170 /* If every action in a PARALLEL is a noop, we can delete
171 the entire PARALLEL. */
172 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
174 rtx part
= XVECEXP (body
, 0, i
);
175 if (GET_CODE (part
) == SET
)
177 if (! reload_cse_noop_set_p (part
))
179 if (REG_P (SET_DEST (part
))
180 && REG_FUNCTION_VALUE_P (SET_DEST (part
)))
184 value
= SET_DEST (part
);
187 else if (GET_CODE (part
) != CLOBBER
)
193 if (check_for_inc_dec (insn
))
194 delete_insn_and_edges (insn
);
195 /* We're done with this insn. */
199 /* It's not a no-op, but we can try to simplify it. */
200 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
201 if (GET_CODE (XVECEXP (body
, 0, i
)) == SET
)
202 count
+= reload_cse_simplify_set (XVECEXP (body
, 0, i
), insn
);
205 apply_change_group ();
207 reload_cse_simplify_operands (insn
, testreg
);
211 return (EDGE_COUNT (insn_bb
->succs
) != insn_bb_succs
);
214 /* Do a very simple CSE pass over the hard registers.
216 This function detects no-op moves where we happened to assign two
217 different pseudo-registers to the same hard register, and then
218 copied one to the other. Reload will generate a useless
219 instruction copying a register to itself.
221 This function also detects cases where we load a value from memory
222 into two different registers, and (if memory is more expensive than
223 registers) changes it to simply copy the first register into the
226 Another optimization is performed that scans the operands of each
227 instruction to see whether the value is already available in a
228 hard register. It then replaces the operand with the hard register
229 if possible, much like an optional reload would. */
232 reload_cse_regs_1 (void)
234 bool cfg_changed
= false;
237 rtx testreg
= gen_rtx_REG (VOIDmode
, -1);
239 cselib_init (CSELIB_RECORD_MEMORY
);
240 init_alias_analysis ();
242 FOR_EACH_BB_FN (bb
, cfun
)
243 FOR_BB_INSNS (bb
, insn
)
246 cfg_changed
|= reload_cse_simplify (insn
, testreg
);
248 cselib_process_insn (insn
);
252 end_alias_analysis ();
258 /* Try to simplify a single SET instruction. SET is the set pattern.
259 INSN is the instruction it came from.
260 This function only handles one case: if we set a register to a value
261 which is not a register, we try to find that value in some other register
262 and change the set into a register copy. */
265 reload_cse_simplify_set (rtx set
, rtx_insn
*insn
)
273 struct elt_loc_list
*l
;
274 #ifdef LOAD_EXTEND_OP
275 enum rtx_code extend_op
= UNKNOWN
;
277 bool speed
= optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn
));
279 dreg
= true_regnum (SET_DEST (set
));
284 if (side_effects_p (src
) || true_regnum (src
) >= 0)
287 dclass
= REGNO_REG_CLASS (dreg
);
289 #ifdef LOAD_EXTEND_OP
290 /* When replacing a memory with a register, we need to honor assumptions
291 that combine made wrt the contents of sign bits. We'll do this by
292 generating an extend instruction instead of a reg->reg copy. Thus
293 the destination must be a register that we can widen. */
295 && GET_MODE_BITSIZE (GET_MODE (src
)) < BITS_PER_WORD
296 && (extend_op
= LOAD_EXTEND_OP (GET_MODE (src
))) != UNKNOWN
297 && !REG_P (SET_DEST (set
)))
301 val
= cselib_lookup (src
, GET_MODE (SET_DEST (set
)), 0, VOIDmode
);
305 /* If memory loads are cheaper than register copies, don't change them. */
307 old_cost
= memory_move_cost (GET_MODE (src
), dclass
, true);
308 else if (REG_P (src
))
309 old_cost
= register_move_cost (GET_MODE (src
),
310 REGNO_REG_CLASS (REGNO (src
)), dclass
);
312 old_cost
= set_src_cost (src
, speed
);
314 for (l
= val
->locs
; l
; l
= l
->next
)
316 rtx this_rtx
= l
->loc
;
319 if (CONSTANT_P (this_rtx
) && ! references_value_p (this_rtx
, 0))
321 #ifdef LOAD_EXTEND_OP
322 if (extend_op
!= UNKNOWN
)
326 if (!CONST_SCALAR_INT_P (this_rtx
))
332 result
= wide_int::from (std::make_pair (this_rtx
,
334 BITS_PER_WORD
, UNSIGNED
);
337 result
= wide_int::from (std::make_pair (this_rtx
,
339 BITS_PER_WORD
, SIGNED
);
344 this_rtx
= immed_wide_int_const (result
, word_mode
);
347 this_cost
= set_src_cost (this_rtx
, speed
);
349 else if (REG_P (this_rtx
))
351 #ifdef LOAD_EXTEND_OP
352 if (extend_op
!= UNKNOWN
)
354 this_rtx
= gen_rtx_fmt_e (extend_op
, word_mode
, this_rtx
);
355 this_cost
= set_src_cost (this_rtx
, speed
);
359 this_cost
= register_move_cost (GET_MODE (this_rtx
),
360 REGNO_REG_CLASS (REGNO (this_rtx
)),
366 /* If equal costs, prefer registers over anything else. That
367 tends to lead to smaller instructions on some machines. */
368 if (this_cost
< old_cost
369 || (this_cost
== old_cost
371 && !REG_P (SET_SRC (set
))))
373 #ifdef LOAD_EXTEND_OP
374 if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set
))) < BITS_PER_WORD
375 && extend_op
!= UNKNOWN
376 #ifdef CANNOT_CHANGE_MODE_CLASS
377 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set
)),
379 REGNO_REG_CLASS (REGNO (SET_DEST (set
))))
383 rtx wide_dest
= gen_rtx_REG (word_mode
, REGNO (SET_DEST (set
)));
384 ORIGINAL_REGNO (wide_dest
) = ORIGINAL_REGNO (SET_DEST (set
));
385 validate_change (insn
, &SET_DEST (set
), wide_dest
, 1);
389 validate_unshare_change (insn
, &SET_SRC (set
), this_rtx
, 1);
390 old_cost
= this_cost
, did_change
= 1;
397 /* Try to replace operands in INSN with equivalent values that are already
398 in registers. This can be viewed as optional reloading.
400 For each non-register operand in the insn, see if any hard regs are
401 known to be equivalent to that operand. Record the alternatives which
402 can accept these hard registers. Among all alternatives, select the
403 ones which are better or equal to the one currently matching, where
404 "better" is in terms of '?' and '!' constraints. Among the remaining
405 alternatives, select the one which replaces most operands with
409 reload_cse_simplify_operands (rtx_insn
*insn
, rtx testreg
)
413 /* For each operand, all registers that are equivalent to it. */
414 HARD_REG_SET equiv_regs
[MAX_RECOG_OPERANDS
];
416 const char *constraints
[MAX_RECOG_OPERANDS
];
418 /* Vector recording how bad an alternative is. */
419 int *alternative_reject
;
420 /* Vector recording how many registers can be introduced by choosing
422 int *alternative_nregs
;
423 /* Array of vectors recording, for each operand and each alternative,
424 which hard register to substitute, or -1 if the operand should be
426 int *op_alt_regno
[MAX_RECOG_OPERANDS
];
427 /* Array of alternatives, sorted in order of decreasing desirability. */
428 int *alternative_order
;
430 extract_constrain_insn (insn
);
432 if (recog_data
.n_alternatives
== 0 || recog_data
.n_operands
== 0)
435 alternative_reject
= XALLOCAVEC (int, recog_data
.n_alternatives
);
436 alternative_nregs
= XALLOCAVEC (int, recog_data
.n_alternatives
);
437 alternative_order
= XALLOCAVEC (int, recog_data
.n_alternatives
);
438 memset (alternative_reject
, 0, recog_data
.n_alternatives
* sizeof (int));
439 memset (alternative_nregs
, 0, recog_data
.n_alternatives
* sizeof (int));
441 /* For each operand, find out which regs are equivalent. */
442 for (i
= 0; i
< recog_data
.n_operands
; i
++)
445 struct elt_loc_list
*l
;
448 CLEAR_HARD_REG_SET (equiv_regs
[i
]);
450 /* cselib blows up on CODE_LABELs. Trying to fix that doesn't seem
451 right, so avoid the problem here. Likewise if we have a constant
452 and the insn pattern doesn't tell us the mode we need. */
453 if (LABEL_P (recog_data
.operand
[i
])
454 || (CONSTANT_P (recog_data
.operand
[i
])
455 && recog_data
.operand_mode
[i
] == VOIDmode
))
458 op
= recog_data
.operand
[i
];
459 #ifdef LOAD_EXTEND_OP
461 && GET_MODE_BITSIZE (GET_MODE (op
)) < BITS_PER_WORD
462 && LOAD_EXTEND_OP (GET_MODE (op
)) != UNKNOWN
)
464 rtx set
= single_set (insn
);
466 /* We might have multiple sets, some of which do implicit
467 extension. Punt on this for now. */
470 /* If the destination is also a MEM or a STRICT_LOW_PART, no
472 Also, if there is an explicit extension, we don't have to
473 worry about an implicit one. */
474 else if (MEM_P (SET_DEST (set
))
475 || GET_CODE (SET_DEST (set
)) == STRICT_LOW_PART
476 || GET_CODE (SET_SRC (set
)) == ZERO_EXTEND
477 || GET_CODE (SET_SRC (set
)) == SIGN_EXTEND
)
478 ; /* Continue ordinary processing. */
479 #ifdef CANNOT_CHANGE_MODE_CLASS
480 /* If the register cannot change mode to word_mode, it follows that
481 it cannot have been used in word_mode. */
482 else if (REG_P (SET_DEST (set
))
483 && CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set
)),
485 REGNO_REG_CLASS (REGNO (SET_DEST (set
)))))
486 ; /* Continue ordinary processing. */
488 /* If this is a straight load, make the extension explicit. */
489 else if (REG_P (SET_DEST (set
))
490 && recog_data
.n_operands
== 2
491 && SET_SRC (set
) == op
492 && SET_DEST (set
) == recog_data
.operand
[1-i
])
494 validate_change (insn
, recog_data
.operand_loc
[i
],
495 gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (op
)),
498 validate_change (insn
, recog_data
.operand_loc
[1-i
],
499 gen_rtx_REG (word_mode
, REGNO (SET_DEST (set
))),
501 if (! apply_change_group ())
503 return reload_cse_simplify_operands (insn
, testreg
);
506 /* ??? There might be arithmetic operations with memory that are
507 safe to optimize, but is it worth the trouble? */
510 #endif /* LOAD_EXTEND_OP */
511 if (side_effects_p (op
))
513 v
= cselib_lookup (op
, recog_data
.operand_mode
[i
], 0, VOIDmode
);
517 for (l
= v
->locs
; l
; l
= l
->next
)
519 SET_HARD_REG_BIT (equiv_regs
[i
], REGNO (l
->loc
));
522 alternative_mask preferred
= get_preferred_alternatives (insn
);
523 for (i
= 0; i
< recog_data
.n_operands
; i
++)
529 op_alt_regno
[i
] = XALLOCAVEC (int, recog_data
.n_alternatives
);
530 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
531 op_alt_regno
[i
][j
] = -1;
533 p
= constraints
[i
] = recog_data
.constraints
[i
];
534 mode
= recog_data
.operand_mode
[i
];
536 /* Add the reject values for each alternative given by the constraints
545 alternative_reject
[j
] += 3;
547 alternative_reject
[j
] += 300;
550 /* We won't change operands which are already registers. We
551 also don't want to modify output operands. */
552 regno
= true_regnum (recog_data
.operand
[i
]);
554 || constraints
[i
][0] == '='
555 || constraints
[i
][0] == '+')
558 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
560 enum reg_class rclass
= NO_REGS
;
562 if (! TEST_HARD_REG_BIT (equiv_regs
[i
], regno
))
565 SET_REGNO_RAW (testreg
, regno
);
566 PUT_MODE (testreg
, mode
);
568 /* We found a register equal to this operand. Now look for all
569 alternatives that can accept this register and have not been
570 assigned a register they can use yet. */
580 rclass
= reg_class_subunion
[rclass
][GENERAL_REGS
];
585 = (reg_class_subunion
587 [reg_class_for_constraint (lookup_constraint (p
))]);
591 /* See if REGNO fits this alternative, and set it up as the
592 replacement register if we don't have one for this
593 alternative yet and the operand being replaced is not
594 a cheap CONST_INT. */
595 if (op_alt_regno
[i
][j
] == -1
596 && TEST_BIT (preferred
, j
)
597 && reg_fits_class_p (testreg
, rclass
, 0, mode
)
598 && (!CONST_INT_P (recog_data
.operand
[i
])
599 || (set_src_cost (recog_data
.operand
[i
],
600 optimize_bb_for_speed_p
601 (BLOCK_FOR_INSN (insn
)))
602 > set_src_cost (testreg
,
603 optimize_bb_for_speed_p
604 (BLOCK_FOR_INSN (insn
))))))
606 alternative_nregs
[j
]++;
607 op_alt_regno
[i
][j
] = regno
;
613 p
+= CONSTRAINT_LEN (c
, p
);
621 /* Record all alternatives which are better or equal to the currently
622 matching one in the alternative_order array. */
623 for (i
= j
= 0; i
< recog_data
.n_alternatives
; i
++)
624 if (alternative_reject
[i
] <= alternative_reject
[which_alternative
])
625 alternative_order
[j
++] = i
;
626 recog_data
.n_alternatives
= j
;
628 /* Sort it. Given a small number of alternatives, a dumb algorithm
629 won't hurt too much. */
630 for (i
= 0; i
< recog_data
.n_alternatives
- 1; i
++)
633 int best_reject
= alternative_reject
[alternative_order
[i
]];
634 int best_nregs
= alternative_nregs
[alternative_order
[i
]];
637 for (j
= i
+ 1; j
< recog_data
.n_alternatives
; j
++)
639 int this_reject
= alternative_reject
[alternative_order
[j
]];
640 int this_nregs
= alternative_nregs
[alternative_order
[j
]];
642 if (this_reject
< best_reject
643 || (this_reject
== best_reject
&& this_nregs
> best_nregs
))
646 best_reject
= this_reject
;
647 best_nregs
= this_nregs
;
651 tmp
= alternative_order
[best
];
652 alternative_order
[best
] = alternative_order
[i
];
653 alternative_order
[i
] = tmp
;
656 /* Substitute the operands as determined by op_alt_regno for the best
658 j
= alternative_order
[0];
660 for (i
= 0; i
< recog_data
.n_operands
; i
++)
662 machine_mode mode
= recog_data
.operand_mode
[i
];
663 if (op_alt_regno
[i
][j
] == -1)
666 validate_change (insn
, recog_data
.operand_loc
[i
],
667 gen_rtx_REG (mode
, op_alt_regno
[i
][j
]), 1);
670 for (i
= recog_data
.n_dups
- 1; i
>= 0; i
--)
672 int op
= recog_data
.dup_num
[i
];
673 machine_mode mode
= recog_data
.operand_mode
[op
];
675 if (op_alt_regno
[op
][j
] == -1)
678 validate_change (insn
, recog_data
.dup_loc
[i
],
679 gen_rtx_REG (mode
, op_alt_regno
[op
][j
]), 1);
682 return apply_change_group ();
685 /* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
687 This code might also be useful when reload gave up on reg+reg addressing
688 because of clashes between the return register and INDEX_REG_CLASS. */
690 /* The maximum number of uses of a register we can keep track of to
691 replace them with reg+reg addressing. */
692 #define RELOAD_COMBINE_MAX_USES 16
694 /* Describes a recorded use of a register. */
697 /* The insn where a register has been used. */
699 /* Points to the memory reference enclosing the use, if any, NULL_RTX
702 /* Location of the register within INSN. */
704 /* The reverse uid of the insn. */
708 /* If the register is used in some unknown fashion, USE_INDEX is negative.
709 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
710 indicates where it is first set or clobbered.
711 Otherwise, USE_INDEX is the index of the last encountered use of the
712 register (which is first among these we have seen since we scan backwards).
713 USE_RUID indicates the first encountered, i.e. last, of these uses.
714 If ALL_OFFSETS_MATCH is true, all encountered uses were inside a PLUS
715 with a constant offset; OFFSET contains this constant in that case.
716 STORE_RUID is always meaningful if we only want to use a value in a
717 register in a different place: it denotes the next insn in the insn
718 stream (i.e. the last encountered) that sets or clobbers the register.
719 REAL_STORE_RUID is similar, but clobbers are ignored when updating it. */
722 struct reg_use reg_use
[RELOAD_COMBINE_MAX_USES
];
728 bool all_offsets_match
;
729 } reg_state
[FIRST_PSEUDO_REGISTER
];
731 /* Reverse linear uid. This is increased in reload_combine while scanning
732 the instructions from last to first. It is used to set last_label_ruid
733 and the store_ruid / use_ruid fields in reg_state. */
734 static int reload_combine_ruid
;
736 /* The RUID of the last label we encountered in reload_combine. */
737 static int last_label_ruid
;
739 /* The RUID of the last jump we encountered in reload_combine. */
740 static int last_jump_ruid
;
742 /* The register numbers of the first and last index register. A value of
743 -1 in LAST_INDEX_REG indicates that we've previously computed these
744 values and found no suitable index registers. */
745 static int first_index_reg
= -1;
746 static int last_index_reg
;
748 #define LABEL_LIVE(LABEL) \
749 (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno])
751 /* Subroutine of reload_combine_split_ruids, called to fix up a single
752 ruid pointed to by *PRUID if it is higher than SPLIT_RUID. */
755 reload_combine_split_one_ruid (int *pruid
, int split_ruid
)
757 if (*pruid
> split_ruid
)
761 /* Called when we insert a new insn in a position we've already passed in
762 the scan. Examine all our state, increasing all ruids that are higher
763 than SPLIT_RUID by one in order to make room for a new insn. */
766 reload_combine_split_ruids (int split_ruid
)
770 reload_combine_split_one_ruid (&reload_combine_ruid
, split_ruid
);
771 reload_combine_split_one_ruid (&last_label_ruid
, split_ruid
);
772 reload_combine_split_one_ruid (&last_jump_ruid
, split_ruid
);
774 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
776 int j
, idx
= reg_state
[i
].use_index
;
777 reload_combine_split_one_ruid (®_state
[i
].use_ruid
, split_ruid
);
778 reload_combine_split_one_ruid (®_state
[i
].store_ruid
, split_ruid
);
779 reload_combine_split_one_ruid (®_state
[i
].real_store_ruid
,
783 for (j
= idx
; j
< RELOAD_COMBINE_MAX_USES
; j
++)
785 reload_combine_split_one_ruid (®_state
[i
].reg_use
[j
].ruid
,
791 /* Called when we are about to rescan a previously encountered insn with
792 reload_combine_note_use after modifying some part of it. This clears all
793 information about uses in that particular insn. */
796 reload_combine_purge_insn_uses (rtx_insn
*insn
)
800 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
802 int j
, k
, idx
= reg_state
[i
].use_index
;
805 j
= k
= RELOAD_COMBINE_MAX_USES
;
808 if (reg_state
[i
].reg_use
[j
].insn
!= insn
)
812 reg_state
[i
].reg_use
[k
] = reg_state
[i
].reg_use
[j
];
815 reg_state
[i
].use_index
= k
;
819 /* Called when we need to forget about all uses of REGNO after an insn
820 which is identified by RUID. */
823 reload_combine_purge_reg_uses_after_ruid (unsigned regno
, int ruid
)
825 int j
, k
, idx
= reg_state
[regno
].use_index
;
828 j
= k
= RELOAD_COMBINE_MAX_USES
;
831 if (reg_state
[regno
].reg_use
[j
].ruid
>= ruid
)
835 reg_state
[regno
].reg_use
[k
] = reg_state
[regno
].reg_use
[j
];
838 reg_state
[regno
].use_index
= k
;
841 /* Find the use of REGNO with the ruid that is highest among those
842 lower than RUID_LIMIT, and return it if it is the only use of this
843 reg in the insn. Return NULL otherwise. */
845 static struct reg_use
*
846 reload_combine_closest_single_use (unsigned regno
, int ruid_limit
)
848 int i
, best_ruid
= 0;
849 int use_idx
= reg_state
[regno
].use_index
;
850 struct reg_use
*retval
;
855 for (i
= use_idx
; i
< RELOAD_COMBINE_MAX_USES
; i
++)
857 struct reg_use
*use
= reg_state
[regno
].reg_use
+ i
;
858 int this_ruid
= use
->ruid
;
859 if (this_ruid
>= ruid_limit
)
861 if (this_ruid
> best_ruid
)
863 best_ruid
= this_ruid
;
866 else if (this_ruid
== best_ruid
)
869 if (last_label_ruid
>= best_ruid
)
874 /* After we've moved an add insn, fix up any debug insns that occur
875 between the old location of the add and the new location. REG is
876 the destination register of the add insn; REPLACEMENT is the
877 SET_SRC of the add. FROM and TO specify the range in which we
878 should make this change on debug insns. */
881 fixup_debug_insns (rtx reg
, rtx replacement
, rtx_insn
*from
, rtx_insn
*to
)
884 for (insn
= from
; insn
!= to
; insn
= NEXT_INSN (insn
))
888 if (!DEBUG_INSN_P (insn
))
891 t
= INSN_VAR_LOCATION_LOC (insn
);
892 t
= simplify_replace_rtx (t
, reg
, replacement
);
893 validate_change (insn
, &INSN_VAR_LOCATION_LOC (insn
), t
, 0);
897 /* Subroutine of reload_combine_recognize_const_pattern. Try to replace REG
898 with SRC in the insn described by USE, taking costs into account. Return
899 true if we made the replacement. */
902 try_replace_in_use (struct reg_use
*use
, rtx reg
, rtx src
)
904 rtx_insn
*use_insn
= use
->insn
;
905 rtx mem
= use
->containing_mem
;
906 bool speed
= optimize_bb_for_speed_p (BLOCK_FOR_INSN (use_insn
));
910 addr_space_t as
= MEM_ADDR_SPACE (mem
);
911 rtx oldaddr
= XEXP (mem
, 0);
912 rtx newaddr
= NULL_RTX
;
913 int old_cost
= address_cost (oldaddr
, GET_MODE (mem
), as
, speed
);
916 newaddr
= simplify_replace_rtx (oldaddr
, reg
, src
);
917 if (memory_address_addr_space_p (GET_MODE (mem
), newaddr
, as
))
919 XEXP (mem
, 0) = newaddr
;
920 new_cost
= address_cost (newaddr
, GET_MODE (mem
), as
, speed
);
921 XEXP (mem
, 0) = oldaddr
;
922 if (new_cost
<= old_cost
923 && validate_change (use_insn
,
924 &XEXP (mem
, 0), newaddr
, 0))
930 rtx new_set
= single_set (use_insn
);
932 && REG_P (SET_DEST (new_set
))
933 && GET_CODE (SET_SRC (new_set
)) == PLUS
934 && REG_P (XEXP (SET_SRC (new_set
), 0))
935 && CONSTANT_P (XEXP (SET_SRC (new_set
), 1)))
938 int old_cost
= set_src_cost (SET_SRC (new_set
), speed
);
940 gcc_assert (rtx_equal_p (XEXP (SET_SRC (new_set
), 0), reg
));
941 new_src
= simplify_replace_rtx (SET_SRC (new_set
), reg
, src
);
943 if (set_src_cost (new_src
, speed
) <= old_cost
944 && validate_change (use_insn
, &SET_SRC (new_set
),
952 /* Called by reload_combine when scanning INSN. This function tries to detect
953 patterns where a constant is added to a register, and the result is used
955 Return true if no further processing is needed on INSN; false if it wasn't
956 recognized and should be handled normally. */
959 reload_combine_recognize_const_pattern (rtx_insn
*insn
)
961 int from_ruid
= reload_combine_ruid
;
962 rtx set
, pat
, reg
, src
, addreg
;
966 rtx_insn
*add_moved_after_insn
= NULL
;
967 int add_moved_after_ruid
= 0;
968 int clobbered_regno
= -1;
970 set
= single_set (insn
);
974 reg
= SET_DEST (set
);
977 || hard_regno_nregs
[REGNO (reg
)][GET_MODE (reg
)] != 1
978 || GET_MODE (reg
) != Pmode
979 || reg
== stack_pointer_rtx
)
984 /* We look for a REG1 = REG2 + CONSTANT insn, followed by either
985 uses of REG1 inside an address, or inside another add insn. If
986 possible and profitable, merge the addition into subsequent
988 if (GET_CODE (src
) != PLUS
989 || !REG_P (XEXP (src
, 0))
990 || !CONSTANT_P (XEXP (src
, 1)))
993 addreg
= XEXP (src
, 0);
994 must_move_add
= rtx_equal_p (reg
, addreg
);
996 pat
= PATTERN (insn
);
997 if (must_move_add
&& set
!= pat
)
999 /* We have to be careful when moving the add; apart from the
1000 single_set there may also be clobbers. Recognize one special
1001 case, that of one clobber alongside the set (likely a clobber
1002 of the CC register). */
1003 gcc_assert (GET_CODE (PATTERN (insn
)) == PARALLEL
);
1004 if (XVECLEN (pat
, 0) != 2 || XVECEXP (pat
, 0, 0) != set
1005 || GET_CODE (XVECEXP (pat
, 0, 1)) != CLOBBER
1006 || !REG_P (XEXP (XVECEXP (pat
, 0, 1), 0)))
1008 clobbered_regno
= REGNO (XEXP (XVECEXP (pat
, 0, 1), 0));
1013 use
= reload_combine_closest_single_use (regno
, from_ruid
);
1016 /* Start the search for the next use from here. */
1017 from_ruid
= use
->ruid
;
1019 if (use
&& GET_MODE (*use
->usep
) == Pmode
)
1021 bool delete_add
= false;
1022 rtx_insn
*use_insn
= use
->insn
;
1023 int use_ruid
= use
->ruid
;
1025 /* Avoid moving the add insn past a jump. */
1026 if (must_move_add
&& use_ruid
<= last_jump_ruid
)
1029 /* If the add clobbers another hard reg in parallel, don't move
1030 it past a real set of this hard reg. */
1031 if (must_move_add
&& clobbered_regno
>= 0
1032 && reg_state
[clobbered_regno
].real_store_ruid
>= use_ruid
)
1036 /* Do not separate cc0 setter and cc0 user on HAVE_cc0 targets. */
1037 if (must_move_add
&& sets_cc0_p (PATTERN (use_insn
)))
1041 gcc_assert (reg_state
[regno
].store_ruid
<= use_ruid
);
1042 /* Avoid moving a use of ADDREG past a point where it is stored. */
1043 if (reg_state
[REGNO (addreg
)].store_ruid
> use_ruid
)
1046 /* We also must not move the addition past an insn that sets
1047 the same register, unless we can combine two add insns. */
1048 if (must_move_add
&& reg_state
[regno
].store_ruid
== use_ruid
)
1050 if (use
->containing_mem
== NULL_RTX
)
1056 if (try_replace_in_use (use
, reg
, src
))
1058 reload_combine_purge_insn_uses (use_insn
);
1059 reload_combine_note_use (&PATTERN (use_insn
), use_insn
,
1060 use_ruid
, NULL_RTX
);
1064 fixup_debug_insns (reg
, src
, insn
, use_insn
);
1070 add_moved_after_insn
= use_insn
;
1071 add_moved_after_ruid
= use_ruid
;
1076 /* If we get here, we couldn't handle this use. */
1082 if (!must_move_add
|| add_moved_after_insn
== NULL_RTX
)
1083 /* Process the add normally. */
1086 fixup_debug_insns (reg
, src
, insn
, add_moved_after_insn
);
1088 reorder_insns (insn
, insn
, add_moved_after_insn
);
1089 reload_combine_purge_reg_uses_after_ruid (regno
, add_moved_after_ruid
);
1090 reload_combine_split_ruids (add_moved_after_ruid
- 1);
1091 reload_combine_note_use (&PATTERN (insn
), insn
,
1092 add_moved_after_ruid
, NULL_RTX
);
1093 reg_state
[regno
].store_ruid
= add_moved_after_ruid
;
1098 /* Called by reload_combine when scanning INSN. Try to detect a pattern we
1099 can handle and improve. Return true if no further processing is needed on
1100 INSN; false if it wasn't recognized and should be handled normally. */
1103 reload_combine_recognize_pattern (rtx_insn
*insn
)
1108 set
= single_set (insn
);
1109 if (set
== NULL_RTX
)
1112 reg
= SET_DEST (set
);
1113 src
= SET_SRC (set
);
1115 || hard_regno_nregs
[REGNO (reg
)][GET_MODE (reg
)] != 1)
1118 regno
= REGNO (reg
);
1120 /* Look for (set (REGX) (CONST_INT))
1121 (set (REGX) (PLUS (REGX) (REGY)))
1123 ... (MEM (REGX)) ...
1125 (set (REGZ) (CONST_INT))
1127 ... (MEM (PLUS (REGZ) (REGY)))... .
1129 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
1130 and that we know all uses of REGX before it dies.
1131 Also, explicitly check that REGX != REGY; our life information
1132 does not yet show whether REGY changes in this insn. */
1134 if (GET_CODE (src
) == PLUS
1135 && reg_state
[regno
].all_offsets_match
1136 && last_index_reg
!= -1
1137 && REG_P (XEXP (src
, 1))
1138 && rtx_equal_p (XEXP (src
, 0), reg
)
1139 && !rtx_equal_p (XEXP (src
, 1), reg
)
1140 && reg_state
[regno
].use_index
>= 0
1141 && reg_state
[regno
].use_index
< RELOAD_COMBINE_MAX_USES
1142 && last_label_ruid
< reg_state
[regno
].use_ruid
)
1144 rtx base
= XEXP (src
, 1);
1145 rtx_insn
*prev
= prev_nonnote_nondebug_insn (insn
);
1146 rtx prev_set
= prev
? single_set (prev
) : NULL_RTX
;
1147 rtx index_reg
= NULL_RTX
;
1148 rtx reg_sum
= NULL_RTX
;
1151 /* Now we need to set INDEX_REG to an index register (denoted as
1152 REGZ in the illustration above) and REG_SUM to the expression
1153 register+register that we want to use to substitute uses of REG
1154 (typically in MEMs) with. First check REG and BASE for being
1155 index registers; we can use them even if they are not dead. */
1156 if (TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
], regno
)
1157 || TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
],
1165 /* Otherwise, look for a free index register. Since we have
1166 checked above that neither REG nor BASE are index registers,
1167 if we find anything at all, it will be different from these
1169 for (i
= first_index_reg
; i
<= last_index_reg
; i
++)
1171 if (TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
], i
)
1172 && reg_state
[i
].use_index
== RELOAD_COMBINE_MAX_USES
1173 && reg_state
[i
].store_ruid
<= reg_state
[regno
].use_ruid
1174 && (call_used_regs
[i
] || df_regs_ever_live_p (i
))
1175 && (!frame_pointer_needed
|| i
!= HARD_FRAME_POINTER_REGNUM
)
1176 && !fixed_regs
[i
] && !global_regs
[i
]
1177 && hard_regno_nregs
[i
][GET_MODE (reg
)] == 1
1178 && targetm
.hard_regno_scratch_ok (i
))
1180 index_reg
= gen_rtx_REG (GET_MODE (reg
), i
);
1181 reg_sum
= gen_rtx_PLUS (GET_MODE (reg
), index_reg
, base
);
1187 /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
1188 (REGY), i.e. BASE, is not clobbered before the last use we'll
1192 && CONST_INT_P (SET_SRC (prev_set
))
1193 && rtx_equal_p (SET_DEST (prev_set
), reg
)
1194 && (reg_state
[REGNO (base
)].store_ruid
1195 <= reg_state
[regno
].use_ruid
))
1197 /* Change destination register and, if necessary, the constant
1198 value in PREV, the constant loading instruction. */
1199 validate_change (prev
, &SET_DEST (prev_set
), index_reg
, 1);
1200 if (reg_state
[regno
].offset
!= const0_rtx
)
1201 validate_change (prev
,
1202 &SET_SRC (prev_set
),
1203 GEN_INT (INTVAL (SET_SRC (prev_set
))
1204 + INTVAL (reg_state
[regno
].offset
)),
1207 /* Now for every use of REG that we have recorded, replace REG
1209 for (i
= reg_state
[regno
].use_index
;
1210 i
< RELOAD_COMBINE_MAX_USES
; i
++)
1211 validate_unshare_change (reg_state
[regno
].reg_use
[i
].insn
,
1212 reg_state
[regno
].reg_use
[i
].usep
,
1213 /* Each change must have its own
1217 if (apply_change_group ())
1219 struct reg_use
*lowest_ruid
= NULL
;
1221 /* For every new use of REG_SUM, we have to record the use
1222 of BASE therein, i.e. operand 1. */
1223 for (i
= reg_state
[regno
].use_index
;
1224 i
< RELOAD_COMBINE_MAX_USES
; i
++)
1226 struct reg_use
*use
= reg_state
[regno
].reg_use
+ i
;
1227 reload_combine_note_use (&XEXP (*use
->usep
, 1), use
->insn
,
1228 use
->ruid
, use
->containing_mem
);
1229 if (lowest_ruid
== NULL
|| use
->ruid
< lowest_ruid
->ruid
)
1233 fixup_debug_insns (reg
, reg_sum
, insn
, lowest_ruid
->insn
);
1235 /* Delete the reg-reg addition. */
1238 if (reg_state
[regno
].offset
!= const0_rtx
)
1239 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
1241 remove_reg_equal_equiv_notes (prev
);
1243 reg_state
[regno
].use_index
= RELOAD_COMBINE_MAX_USES
;
1252 reload_combine (void)
1254 rtx_insn
*insn
, *prev
;
1257 int min_labelno
, n_labels
;
1258 HARD_REG_SET ever_live_at_start
, *label_live
;
1260 /* To avoid wasting too much time later searching for an index register,
1261 determine the minimum and maximum index register numbers. */
1262 if (INDEX_REG_CLASS
== NO_REGS
)
1263 last_index_reg
= -1;
1264 else if (first_index_reg
== -1 && last_index_reg
== 0)
1266 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1267 if (TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
], r
))
1269 if (first_index_reg
== -1)
1270 first_index_reg
= r
;
1275 /* If no index register is available, we can quit now. Set LAST_INDEX_REG
1276 to -1 so we'll know to quit early the next time we get here. */
1277 if (first_index_reg
== -1)
1279 last_index_reg
= -1;
1284 /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime
1285 information is a bit fuzzy immediately after reload, but it's
1286 still good enough to determine which registers are live at a jump
1288 min_labelno
= get_first_label_num ();
1289 n_labels
= max_label_num () - min_labelno
;
1290 label_live
= XNEWVEC (HARD_REG_SET
, n_labels
);
1291 CLEAR_HARD_REG_SET (ever_live_at_start
);
1293 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
1295 insn
= BB_HEAD (bb
);
1299 bitmap live_in
= df_get_live_in (bb
);
1301 REG_SET_TO_HARD_REG_SET (live
, live_in
);
1302 compute_use_by_pseudos (&live
, live_in
);
1303 COPY_HARD_REG_SET (LABEL_LIVE (insn
), live
);
1304 IOR_HARD_REG_SET (ever_live_at_start
, live
);
1308 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
1309 last_label_ruid
= last_jump_ruid
= reload_combine_ruid
= 0;
1310 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1312 reg_state
[r
].store_ruid
= 0;
1313 reg_state
[r
].real_store_ruid
= 0;
1315 reg_state
[r
].use_index
= -1;
1317 reg_state
[r
].use_index
= RELOAD_COMBINE_MAX_USES
;
1320 for (insn
= get_last_insn (); insn
; insn
= prev
)
1322 bool control_flow_insn
;
1325 prev
= PREV_INSN (insn
);
1327 /* We cannot do our optimization across labels. Invalidating all the use
1328 information we have would be costly, so we just note where the label
1329 is and then later disable any optimization that would cross it. */
1331 last_label_ruid
= reload_combine_ruid
;
1332 else if (BARRIER_P (insn
))
1334 /* Crossing a barrier resets all the use information. */
1335 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1336 if (! fixed_regs
[r
])
1337 reg_state
[r
].use_index
= RELOAD_COMBINE_MAX_USES
;
1339 else if (INSN_P (insn
) && volatile_insn_p (PATTERN (insn
)))
1340 /* Optimizations across insns being marked as volatile must be
1341 prevented. All the usage information is invalidated
1343 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1345 && reg_state
[r
].use_index
!= RELOAD_COMBINE_MAX_USES
)
1346 reg_state
[r
].use_index
= -1;
1348 if (! NONDEBUG_INSN_P (insn
))
1351 reload_combine_ruid
++;
1353 control_flow_insn
= control_flow_insn_p (insn
);
1354 if (control_flow_insn
)
1355 last_jump_ruid
= reload_combine_ruid
;
1357 if (reload_combine_recognize_const_pattern (insn
)
1358 || reload_combine_recognize_pattern (insn
))
1361 note_stores (PATTERN (insn
), reload_combine_note_store
, NULL
);
1367 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1368 if (call_used_regs
[r
])
1370 reg_state
[r
].use_index
= RELOAD_COMBINE_MAX_USES
;
1371 reg_state
[r
].store_ruid
= reload_combine_ruid
;
1374 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
;
1375 link
= XEXP (link
, 1))
1377 rtx setuse
= XEXP (link
, 0);
1378 rtx usage_rtx
= XEXP (setuse
, 0);
1379 if ((GET_CODE (setuse
) == USE
|| GET_CODE (setuse
) == CLOBBER
)
1380 && REG_P (usage_rtx
))
1383 unsigned int start_reg
= REGNO (usage_rtx
);
1384 unsigned int num_regs
1385 = hard_regno_nregs
[start_reg
][GET_MODE (usage_rtx
)];
1386 unsigned int end_reg
= start_reg
+ num_regs
- 1;
1387 for (i
= start_reg
; i
<= end_reg
; i
++)
1388 if (GET_CODE (XEXP (link
, 0)) == CLOBBER
)
1390 reg_state
[i
].use_index
= RELOAD_COMBINE_MAX_USES
;
1391 reg_state
[i
].store_ruid
= reload_combine_ruid
;
1394 reg_state
[i
].use_index
= -1;
1399 if (control_flow_insn
&& !ANY_RETURN_P (PATTERN (insn
)))
1401 /* Non-spill registers might be used at the call destination in
1402 some unknown fashion, so we have to mark the unknown use. */
1405 if ((condjump_p (insn
) || condjump_in_parallel_p (insn
))
1406 && JUMP_LABEL (insn
))
1408 if (ANY_RETURN_P (JUMP_LABEL (insn
)))
1411 live
= &LABEL_LIVE (JUMP_LABEL (insn
));
1414 live
= &ever_live_at_start
;
1417 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1418 if (TEST_HARD_REG_BIT (*live
, r
))
1419 reg_state
[r
].use_index
= -1;
1422 reload_combine_note_use (&PATTERN (insn
), insn
, reload_combine_ruid
,
1425 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
1427 if (REG_NOTE_KIND (note
) == REG_INC
&& REG_P (XEXP (note
, 0)))
1429 int regno
= REGNO (XEXP (note
, 0));
1430 reg_state
[regno
].store_ruid
= reload_combine_ruid
;
1431 reg_state
[regno
].real_store_ruid
= reload_combine_ruid
;
1432 reg_state
[regno
].use_index
= -1;
1440 /* Check if DST is a register or a subreg of a register; if it is,
1441 update store_ruid, real_store_ruid and use_index in the reg_state
1442 structure accordingly. Called via note_stores from reload_combine. */
1445 reload_combine_note_store (rtx dst
, const_rtx set
, void *data ATTRIBUTE_UNUSED
)
1449 machine_mode mode
= GET_MODE (dst
);
1451 if (GET_CODE (dst
) == SUBREG
)
1453 regno
= subreg_regno_offset (REGNO (SUBREG_REG (dst
)),
1454 GET_MODE (SUBREG_REG (dst
)),
1457 dst
= SUBREG_REG (dst
);
1460 /* Some targets do argument pushes without adding REG_INC notes. */
1464 dst
= XEXP (dst
, 0);
1465 if (GET_CODE (dst
) == PRE_INC
|| GET_CODE (dst
) == POST_INC
1466 || GET_CODE (dst
) == PRE_DEC
|| GET_CODE (dst
) == POST_DEC
1467 || GET_CODE (dst
) == PRE_MODIFY
|| GET_CODE (dst
) == POST_MODIFY
)
1469 regno
= REGNO (XEXP (dst
, 0));
1470 mode
= GET_MODE (XEXP (dst
, 0));
1471 for (i
= hard_regno_nregs
[regno
][mode
] - 1 + regno
; i
>= regno
; i
--)
1473 /* We could probably do better, but for now mark the register
1474 as used in an unknown fashion and set/clobbered at this
1476 reg_state
[i
].use_index
= -1;
1477 reg_state
[i
].store_ruid
= reload_combine_ruid
;
1478 reg_state
[i
].real_store_ruid
= reload_combine_ruid
;
1487 regno
+= REGNO (dst
);
1489 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
1490 careful with registers / register parts that are not full words.
1491 Similarly for ZERO_EXTRACT. */
1492 if (GET_CODE (SET_DEST (set
)) == ZERO_EXTRACT
1493 || GET_CODE (SET_DEST (set
)) == STRICT_LOW_PART
)
1495 for (i
= hard_regno_nregs
[regno
][mode
] - 1 + regno
; i
>= regno
; i
--)
1497 reg_state
[i
].use_index
= -1;
1498 reg_state
[i
].store_ruid
= reload_combine_ruid
;
1499 reg_state
[i
].real_store_ruid
= reload_combine_ruid
;
1504 for (i
= hard_regno_nregs
[regno
][mode
] - 1 + regno
; i
>= regno
; i
--)
1506 reg_state
[i
].store_ruid
= reload_combine_ruid
;
1507 if (GET_CODE (set
) == SET
)
1508 reg_state
[i
].real_store_ruid
= reload_combine_ruid
;
1509 reg_state
[i
].use_index
= RELOAD_COMBINE_MAX_USES
;
1514 /* XP points to a piece of rtl that has to be checked for any uses of
1516 *XP is the pattern of INSN, or a part of it.
1517 Called from reload_combine, and recursively by itself. */
1519 reload_combine_note_use (rtx
*xp
, rtx_insn
*insn
, int ruid
, rtx containing_mem
)
1522 enum rtx_code code
= x
->code
;
1525 rtx offset
= const0_rtx
; /* For the REG case below. */
1530 if (REG_P (SET_DEST (x
)))
1532 reload_combine_note_use (&SET_SRC (x
), insn
, ruid
, NULL_RTX
);
1538 /* If this is the USE of a return value, we can't change it. */
1539 if (REG_P (XEXP (x
, 0)) && REG_FUNCTION_VALUE_P (XEXP (x
, 0)))
1541 /* Mark the return register as used in an unknown fashion. */
1542 rtx reg
= XEXP (x
, 0);
1543 int regno
= REGNO (reg
);
1544 int nregs
= hard_regno_nregs
[regno
][GET_MODE (reg
)];
1546 while (--nregs
>= 0)
1547 reg_state
[regno
+ nregs
].use_index
= -1;
1553 if (REG_P (SET_DEST (x
)))
1555 /* No spurious CLOBBERs of pseudo registers may remain. */
1556 gcc_assert (REGNO (SET_DEST (x
)) < FIRST_PSEUDO_REGISTER
);
1562 /* We are interested in (plus (reg) (const_int)) . */
1563 if (!REG_P (XEXP (x
, 0))
1564 || !CONST_INT_P (XEXP (x
, 1)))
1566 offset
= XEXP (x
, 1);
1571 int regno
= REGNO (x
);
1575 /* No spurious USEs of pseudo registers may remain. */
1576 gcc_assert (regno
< FIRST_PSEUDO_REGISTER
);
1578 nregs
= hard_regno_nregs
[regno
][GET_MODE (x
)];
1580 /* We can't substitute into multi-hard-reg uses. */
1583 while (--nregs
>= 0)
1584 reg_state
[regno
+ nregs
].use_index
= -1;
1588 /* We may be called to update uses in previously seen insns.
1589 Don't add uses beyond the last store we saw. */
1590 if (ruid
< reg_state
[regno
].store_ruid
)
1593 /* If this register is already used in some unknown fashion, we
1595 If we decrement the index from zero to -1, we can't store more
1596 uses, so this register becomes used in an unknown fashion. */
1597 use_index
= --reg_state
[regno
].use_index
;
1601 if (use_index
== RELOAD_COMBINE_MAX_USES
- 1)
1603 /* This is the first use of this register we have seen since we
1604 marked it as dead. */
1605 reg_state
[regno
].offset
= offset
;
1606 reg_state
[regno
].all_offsets_match
= true;
1607 reg_state
[regno
].use_ruid
= ruid
;
1611 if (reg_state
[regno
].use_ruid
> ruid
)
1612 reg_state
[regno
].use_ruid
= ruid
;
1614 if (! rtx_equal_p (offset
, reg_state
[regno
].offset
))
1615 reg_state
[regno
].all_offsets_match
= false;
1618 reg_state
[regno
].reg_use
[use_index
].insn
= insn
;
1619 reg_state
[regno
].reg_use
[use_index
].ruid
= ruid
;
1620 reg_state
[regno
].reg_use
[use_index
].containing_mem
= containing_mem
;
1621 reg_state
[regno
].reg_use
[use_index
].usep
= xp
;
1633 /* Recursively process the components of X. */
1634 fmt
= GET_RTX_FORMAT (code
);
1635 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1638 reload_combine_note_use (&XEXP (x
, i
), insn
, ruid
, containing_mem
);
1639 else if (fmt
[i
] == 'E')
1641 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1642 reload_combine_note_use (&XVECEXP (x
, i
, j
), insn
, ruid
,
1648 /* See if we can reduce the cost of a constant by replacing a move
1649 with an add. We track situations in which a register is set to a
1650 constant or to a register plus a constant. */
1651 /* We cannot do our optimization across labels. Invalidating all the
1652 information about register contents we have would be costly, so we
1653 use move2add_last_label_luid to note where the label is and then
1654 later disable any optimization that would cross it.
1655 reg_offset[n] / reg_base_reg[n] / reg_symbol_ref[n] / reg_mode[n]
1656 are only valid if reg_set_luid[n] is greater than
1657 move2add_last_label_luid.
1658 For a set that established a new (potential) base register with
1659 non-constant value, we use move2add_luid from the place where the
1660 setting insn is encountered; registers based off that base then
1661 get the same reg_set_luid. Constants all get
1662 move2add_last_label_luid + 1 as their reg_set_luid. */
1663 static int reg_set_luid
[FIRST_PSEUDO_REGISTER
];
1665 /* If reg_base_reg[n] is negative, register n has been set to
1666 reg_offset[n] or reg_symbol_ref[n] + reg_offset[n] in mode reg_mode[n].
1667 If reg_base_reg[n] is non-negative, register n has been set to the
1668 sum of reg_offset[n] and the value of register reg_base_reg[n]
1669 before reg_set_luid[n], calculated in mode reg_mode[n] .
1670 For multi-hard-register registers, all but the first one are
1671 recorded as BLKmode in reg_mode. Setting reg_mode to VOIDmode
1672 marks it as invalid. */
1673 static HOST_WIDE_INT reg_offset
[FIRST_PSEUDO_REGISTER
];
1674 static int reg_base_reg
[FIRST_PSEUDO_REGISTER
];
1675 static rtx reg_symbol_ref
[FIRST_PSEUDO_REGISTER
];
1676 static machine_mode reg_mode
[FIRST_PSEUDO_REGISTER
];
1678 /* move2add_luid is linearly increased while scanning the instructions
1679 from first to last. It is used to set reg_set_luid in
1680 reload_cse_move2add and move2add_note_store. */
1681 static int move2add_luid
;
1683 /* move2add_last_label_luid is set whenever a label is found. Labels
1684 invalidate all previously collected reg_offset data. */
1685 static int move2add_last_label_luid
;
1687 /* ??? We don't know how zero / sign extension is handled, hence we
1688 can't go from a narrower to a wider mode. */
1689 #define MODES_OK_FOR_MOVE2ADD(OUTMODE, INMODE) \
1690 (GET_MODE_SIZE (OUTMODE) == GET_MODE_SIZE (INMODE) \
1691 || (GET_MODE_SIZE (OUTMODE) <= GET_MODE_SIZE (INMODE) \
1692 && TRULY_NOOP_TRUNCATION_MODES_P (OUTMODE, INMODE)))
1694 /* Record that REG is being set to a value with the mode of REG. */
1697 move2add_record_mode (rtx reg
)
1700 machine_mode mode
= GET_MODE (reg
);
1702 if (GET_CODE (reg
) == SUBREG
)
1704 regno
= subreg_regno (reg
);
1705 nregs
= subreg_nregs (reg
);
1707 else if (REG_P (reg
))
1709 regno
= REGNO (reg
);
1710 nregs
= hard_regno_nregs
[regno
][mode
];
1714 for (int i
= nregs
- 1; i
> 0; i
--)
1715 reg_mode
[regno
+ i
] = BLKmode
;
1716 reg_mode
[regno
] = mode
;
1719 /* Record that REG is being set to the sum of SYM and OFF. */
1722 move2add_record_sym_value (rtx reg
, rtx sym
, rtx off
)
1724 int regno
= REGNO (reg
);
1726 move2add_record_mode (reg
);
1727 reg_set_luid
[regno
] = move2add_luid
;
1728 reg_base_reg
[regno
] = -1;
1729 reg_symbol_ref
[regno
] = sym
;
1730 reg_offset
[regno
] = INTVAL (off
);
1733 /* Check if REGNO contains a valid value in MODE. */
1736 move2add_valid_value_p (int regno
, machine_mode mode
)
1738 if (reg_set_luid
[regno
] <= move2add_last_label_luid
)
1741 if (mode
!= reg_mode
[regno
])
1743 if (!MODES_OK_FOR_MOVE2ADD (mode
, reg_mode
[regno
]))
1745 /* The value loaded into regno in reg_mode[regno] is also valid in
1746 mode after truncation only if (REG:mode regno) is the lowpart of
1747 (REG:reg_mode[regno] regno). Now, for big endian, the starting
1748 regno of the lowpart might be different. */
1749 int s_off
= subreg_lowpart_offset (mode
, reg_mode
[regno
]);
1750 s_off
= subreg_regno_offset (regno
, reg_mode
[regno
], s_off
, mode
);
1752 /* We could in principle adjust regno, check reg_mode[regno] to be
1753 BLKmode, and return s_off to the caller (vs. -1 for failure),
1754 but we currently have no callers that could make use of this
1759 for (int i
= hard_regno_nregs
[regno
][mode
] - 1; i
> 0; i
--)
1760 if (reg_mode
[regno
+ i
] != BLKmode
)
1765 /* This function is called with INSN that sets REG to (SYM + OFF),
1766 while REG is known to already have value (SYM + offset).
1767 This function tries to change INSN into an add instruction
1768 (set (REG) (plus (REG) (OFF - offset))) using the known value.
1769 It also updates the information about REG's known value.
1770 Return true if we made a change. */
1773 move2add_use_add2_insn (rtx reg
, rtx sym
, rtx off
, rtx_insn
*insn
)
1775 rtx pat
= PATTERN (insn
);
1776 rtx src
= SET_SRC (pat
);
1777 int regno
= REGNO (reg
);
1778 rtx new_src
= gen_int_mode (UINTVAL (off
) - reg_offset
[regno
],
1780 bool speed
= optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn
));
1781 bool changed
= false;
1783 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1784 use (set (reg) (reg)) instead.
1785 We don't delete this insn, nor do we convert it into a
1786 note, to avoid losing register notes or the return
1787 value flag. jump2 already knows how to get rid of
1789 if (new_src
== const0_rtx
)
1791 /* If the constants are different, this is a
1792 truncation, that, if turned into (set (reg)
1793 (reg)), would be discarded. Maybe we should
1794 try a truncMN pattern? */
1795 if (INTVAL (off
) == reg_offset
[regno
])
1796 changed
= validate_change (insn
, &SET_SRC (pat
), reg
, 0);
1800 struct full_rtx_costs oldcst
, newcst
;
1801 rtx tem
= gen_rtx_PLUS (GET_MODE (reg
), reg
, new_src
);
1803 get_full_set_rtx_cost (pat
, &oldcst
);
1804 SET_SRC (pat
) = tem
;
1805 get_full_set_rtx_cost (pat
, &newcst
);
1806 SET_SRC (pat
) = src
;
1808 if (costs_lt_p (&newcst
, &oldcst
, speed
)
1809 && have_add2_insn (reg
, new_src
))
1810 changed
= validate_change (insn
, &SET_SRC (pat
), tem
, 0);
1811 else if (sym
== NULL_RTX
&& GET_MODE (reg
) != BImode
)
1813 machine_mode narrow_mode
;
1814 for (narrow_mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1815 narrow_mode
!= VOIDmode
1816 && narrow_mode
!= GET_MODE (reg
);
1817 narrow_mode
= GET_MODE_WIDER_MODE (narrow_mode
))
1819 if (have_insn_for (STRICT_LOW_PART
, narrow_mode
)
1820 && ((reg_offset
[regno
] & ~GET_MODE_MASK (narrow_mode
))
1821 == (INTVAL (off
) & ~GET_MODE_MASK (narrow_mode
))))
1823 rtx narrow_reg
= gen_lowpart_common (narrow_mode
, reg
);
1824 rtx narrow_src
= gen_int_mode (INTVAL (off
),
1827 = gen_rtx_SET (VOIDmode
,
1828 gen_rtx_STRICT_LOW_PART (VOIDmode
,
1831 get_full_set_rtx_cost (new_set
, &newcst
);
1832 if (costs_lt_p (&newcst
, &oldcst
, speed
))
1834 changed
= validate_change (insn
, &PATTERN (insn
),
1843 move2add_record_sym_value (reg
, sym
, off
);
1848 /* This function is called with INSN that sets REG to (SYM + OFF),
1849 but REG doesn't have known value (SYM + offset). This function
1850 tries to find another register which is known to already have
1851 value (SYM + offset) and change INSN into an add instruction
1852 (set (REG) (plus (the found register) (OFF - offset))) if such
1853 a register is found. It also updates the information about
1855 Return true iff we made a change. */
1858 move2add_use_add3_insn (rtx reg
, rtx sym
, rtx off
, rtx_insn
*insn
)
1860 rtx pat
= PATTERN (insn
);
1861 rtx src
= SET_SRC (pat
);
1862 int regno
= REGNO (reg
);
1864 bool speed
= optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn
));
1866 bool changed
= false;
1867 struct full_rtx_costs oldcst
, newcst
, mincst
;
1870 init_costs_to_max (&mincst
);
1871 get_full_set_rtx_cost (pat
, &oldcst
);
1873 plus_expr
= gen_rtx_PLUS (GET_MODE (reg
), reg
, const0_rtx
);
1874 SET_SRC (pat
) = plus_expr
;
1876 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1877 if (move2add_valid_value_p (i
, GET_MODE (reg
))
1878 && reg_base_reg
[i
] < 0
1879 && reg_symbol_ref
[i
] != NULL_RTX
1880 && rtx_equal_p (sym
, reg_symbol_ref
[i
]))
1882 rtx new_src
= gen_int_mode (UINTVAL (off
) - reg_offset
[i
],
1884 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1885 use (set (reg) (reg)) instead.
1886 We don't delete this insn, nor do we convert it into a
1887 note, to avoid losing register notes or the return
1888 value flag. jump2 already knows how to get rid of
1890 if (new_src
== const0_rtx
)
1892 init_costs_to_zero (&mincst
);
1898 XEXP (plus_expr
, 1) = new_src
;
1899 get_full_set_rtx_cost (pat
, &newcst
);
1901 if (costs_lt_p (&newcst
, &mincst
, speed
))
1908 SET_SRC (pat
) = src
;
1910 if (costs_lt_p (&mincst
, &oldcst
, speed
))
1914 tem
= gen_rtx_REG (GET_MODE (reg
), min_regno
);
1917 rtx new_src
= gen_int_mode (UINTVAL (off
) - reg_offset
[min_regno
],
1919 tem
= gen_rtx_PLUS (GET_MODE (reg
), tem
, new_src
);
1921 if (validate_change (insn
, &SET_SRC (pat
), tem
, 0))
1924 reg_set_luid
[regno
] = move2add_luid
;
1925 move2add_record_sym_value (reg
, sym
, off
);
1929 /* Convert move insns with constant inputs to additions if they are cheaper.
1930 Return true if any changes were made. */
1932 reload_cse_move2add (rtx_insn
*first
)
1936 bool changed
= false;
1938 for (i
= FIRST_PSEUDO_REGISTER
- 1; i
>= 0; i
--)
1940 reg_set_luid
[i
] = 0;
1942 reg_base_reg
[i
] = 0;
1943 reg_symbol_ref
[i
] = NULL_RTX
;
1944 reg_mode
[i
] = VOIDmode
;
1947 move2add_last_label_luid
= 0;
1949 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
), move2add_luid
++)
1955 move2add_last_label_luid
= move2add_luid
;
1956 /* We're going to increment move2add_luid twice after a
1957 label, so that we can use move2add_last_label_luid + 1 as
1958 the luid for constants. */
1962 if (! INSN_P (insn
))
1964 pat
= PATTERN (insn
);
1965 /* For simplicity, we only perform this optimization on
1966 straightforward SETs. */
1967 if (GET_CODE (pat
) == SET
1968 && REG_P (SET_DEST (pat
)))
1970 rtx reg
= SET_DEST (pat
);
1971 int regno
= REGNO (reg
);
1972 rtx src
= SET_SRC (pat
);
1974 /* Check if we have valid information on the contents of this
1975 register in the mode of REG. */
1976 if (move2add_valid_value_p (regno
, GET_MODE (reg
))
1977 && dbg_cnt (cse2_move2add
))
1979 /* Try to transform (set (REGX) (CONST_INT A))
1981 (set (REGX) (CONST_INT B))
1983 (set (REGX) (CONST_INT A))
1985 (set (REGX) (plus (REGX) (CONST_INT B-A)))
1987 (set (REGX) (CONST_INT A))
1989 (set (STRICT_LOW_PART (REGX)) (CONST_INT B))
1992 if (CONST_INT_P (src
)
1993 && reg_base_reg
[regno
] < 0
1994 && reg_symbol_ref
[regno
] == NULL_RTX
)
1996 changed
|= move2add_use_add2_insn (reg
, NULL_RTX
, src
, insn
);
2000 /* Try to transform (set (REGX) (REGY))
2001 (set (REGX) (PLUS (REGX) (CONST_INT A)))
2004 (set (REGX) (PLUS (REGX) (CONST_INT B)))
2007 (set (REGX) (PLUS (REGX) (CONST_INT A)))
2009 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
2010 else if (REG_P (src
)
2011 && reg_set_luid
[regno
] == reg_set_luid
[REGNO (src
)]
2012 && reg_base_reg
[regno
] == reg_base_reg
[REGNO (src
)]
2013 && move2add_valid_value_p (REGNO (src
), GET_MODE (reg
)))
2015 rtx_insn
*next
= next_nonnote_nondebug_insn (insn
);
2018 set
= single_set (next
);
2020 && SET_DEST (set
) == reg
2021 && GET_CODE (SET_SRC (set
)) == PLUS
2022 && XEXP (SET_SRC (set
), 0) == reg
2023 && CONST_INT_P (XEXP (SET_SRC (set
), 1)))
2025 rtx src3
= XEXP (SET_SRC (set
), 1);
2026 unsigned HOST_WIDE_INT added_offset
= UINTVAL (src3
);
2027 HOST_WIDE_INT base_offset
= reg_offset
[REGNO (src
)];
2028 HOST_WIDE_INT regno_offset
= reg_offset
[regno
];
2030 gen_int_mode (added_offset
2034 bool success
= false;
2035 bool speed
= optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn
));
2037 if (new_src
== const0_rtx
)
2038 /* See above why we create (set (reg) (reg)) here. */
2040 = validate_change (next
, &SET_SRC (set
), reg
, 0);
2043 rtx old_src
= SET_SRC (set
);
2044 struct full_rtx_costs oldcst
, newcst
;
2045 rtx tem
= gen_rtx_PLUS (GET_MODE (reg
), reg
, new_src
);
2047 get_full_set_rtx_cost (set
, &oldcst
);
2048 SET_SRC (set
) = tem
;
2049 get_full_set_src_cost (tem
, &newcst
);
2050 SET_SRC (set
) = old_src
;
2051 costs_add_n_insns (&oldcst
, 1);
2053 if (costs_lt_p (&newcst
, &oldcst
, speed
)
2054 && have_add2_insn (reg
, new_src
))
2056 rtx newpat
= gen_rtx_SET (VOIDmode
, reg
, tem
);
2058 = validate_change (next
, &PATTERN (next
),
2066 move2add_record_mode (reg
);
2068 = trunc_int_for_mode (added_offset
+ base_offset
,
2076 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
2078 (set (REGY) (CONST (PLUS (SYMBOL_REF) (CONST_INT B))))
2080 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
2082 (set (REGY) (CONST (PLUS (REGX) (CONST_INT B-A)))) */
2083 if ((GET_CODE (src
) == SYMBOL_REF
2084 || (GET_CODE (src
) == CONST
2085 && GET_CODE (XEXP (src
, 0)) == PLUS
2086 && GET_CODE (XEXP (XEXP (src
, 0), 0)) == SYMBOL_REF
2087 && CONST_INT_P (XEXP (XEXP (src
, 0), 1))))
2088 && dbg_cnt (cse2_move2add
))
2092 if (GET_CODE (src
) == SYMBOL_REF
)
2099 sym
= XEXP (XEXP (src
, 0), 0);
2100 off
= XEXP (XEXP (src
, 0), 1);
2103 /* If the reg already contains the value which is sum of
2104 sym and some constant value, we can use an add2 insn. */
2105 if (move2add_valid_value_p (regno
, GET_MODE (reg
))
2106 && reg_base_reg
[regno
] < 0
2107 && reg_symbol_ref
[regno
] != NULL_RTX
2108 && rtx_equal_p (sym
, reg_symbol_ref
[regno
]))
2109 changed
|= move2add_use_add2_insn (reg
, sym
, off
, insn
);
2111 /* Otherwise, we have to find a register whose value is sum
2112 of sym and some constant value. */
2114 changed
|= move2add_use_add3_insn (reg
, sym
, off
, insn
);
2120 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
2122 if (REG_NOTE_KIND (note
) == REG_INC
2123 && REG_P (XEXP (note
, 0)))
2125 /* Reset the information about this register. */
2126 int regno
= REGNO (XEXP (note
, 0));
2127 if (regno
< FIRST_PSEUDO_REGISTER
)
2129 move2add_record_mode (XEXP (note
, 0));
2130 reg_mode
[regno
] = VOIDmode
;
2134 note_stores (PATTERN (insn
), move2add_note_store
, insn
);
2136 /* If INSN is a conditional branch, we try to extract an
2137 implicit set out of it. */
2138 if (any_condjump_p (insn
))
2140 rtx cnd
= fis_get_condition (insn
);
2143 && GET_CODE (cnd
) == NE
2144 && REG_P (XEXP (cnd
, 0))
2145 && !reg_set_p (XEXP (cnd
, 0), insn
)
2146 /* The following two checks, which are also in
2147 move2add_note_store, are intended to reduce the
2148 number of calls to gen_rtx_SET to avoid memory
2149 allocation if possible. */
2150 && SCALAR_INT_MODE_P (GET_MODE (XEXP (cnd
, 0)))
2151 && hard_regno_nregs
[REGNO (XEXP (cnd
, 0))][GET_MODE (XEXP (cnd
, 0))] == 1
2152 && CONST_INT_P (XEXP (cnd
, 1)))
2155 gen_rtx_SET (VOIDmode
, XEXP (cnd
, 0), XEXP (cnd
, 1));
2156 move2add_note_store (SET_DEST (implicit_set
), implicit_set
, insn
);
2160 /* If this is a CALL_INSN, all call used registers are stored with
2164 for (i
= FIRST_PSEUDO_REGISTER
- 1; i
>= 0; i
--)
2166 if (call_used_regs
[i
])
2167 /* Reset the information about this register. */
2168 reg_mode
[i
] = VOIDmode
;
2175 /* SET is a SET or CLOBBER that sets DST. DATA is the insn which
2177 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
2178 Called from reload_cse_move2add via note_stores. */
2181 move2add_note_store (rtx dst
, const_rtx set
, void *data
)
2183 rtx_insn
*insn
= (rtx_insn
*) data
;
2184 unsigned int regno
= 0;
2185 machine_mode mode
= GET_MODE (dst
);
2187 /* Some targets do argument pushes without adding REG_INC notes. */
2191 dst
= XEXP (dst
, 0);
2192 if (GET_CODE (dst
) == PRE_INC
|| GET_CODE (dst
) == POST_INC
2193 || GET_CODE (dst
) == PRE_DEC
|| GET_CODE (dst
) == POST_DEC
)
2194 reg_mode
[REGNO (XEXP (dst
, 0))] = VOIDmode
;
2198 if (GET_CODE (dst
) == SUBREG
)
2199 regno
= subreg_regno (dst
);
2200 else if (REG_P (dst
))
2201 regno
= REGNO (dst
);
2205 if (SCALAR_INT_MODE_P (mode
)
2206 && GET_CODE (set
) == SET
)
2208 rtx note
, sym
= NULL_RTX
;
2211 note
= find_reg_equal_equiv_note (insn
);
2212 if (note
&& GET_CODE (XEXP (note
, 0)) == SYMBOL_REF
)
2214 sym
= XEXP (note
, 0);
2217 else if (note
&& GET_CODE (XEXP (note
, 0)) == CONST
2218 && GET_CODE (XEXP (XEXP (note
, 0), 0)) == PLUS
2219 && GET_CODE (XEXP (XEXP (XEXP (note
, 0), 0), 0)) == SYMBOL_REF
2220 && CONST_INT_P (XEXP (XEXP (XEXP (note
, 0), 0), 1)))
2222 sym
= XEXP (XEXP (XEXP (note
, 0), 0), 0);
2223 off
= XEXP (XEXP (XEXP (note
, 0), 0), 1);
2226 if (sym
!= NULL_RTX
)
2228 move2add_record_sym_value (dst
, sym
, off
);
2233 if (SCALAR_INT_MODE_P (mode
)
2234 && GET_CODE (set
) == SET
2235 && GET_CODE (SET_DEST (set
)) != ZERO_EXTRACT
2236 && GET_CODE (SET_DEST (set
)) != STRICT_LOW_PART
)
2238 rtx src
= SET_SRC (set
);
2240 unsigned HOST_WIDE_INT offset
;
2243 switch (GET_CODE (src
))
2246 if (REG_P (XEXP (src
, 0)))
2248 base_reg
= XEXP (src
, 0);
2250 if (CONST_INT_P (XEXP (src
, 1)))
2251 offset
= UINTVAL (XEXP (src
, 1));
2252 else if (REG_P (XEXP (src
, 1))
2253 && move2add_valid_value_p (REGNO (XEXP (src
, 1)), mode
))
2255 if (reg_base_reg
[REGNO (XEXP (src
, 1))] < 0
2256 && reg_symbol_ref
[REGNO (XEXP (src
, 1))] == NULL_RTX
)
2257 offset
= reg_offset
[REGNO (XEXP (src
, 1))];
2258 /* Maybe the first register is known to be a
2260 else if (move2add_valid_value_p (REGNO (base_reg
), mode
)
2261 && reg_base_reg
[REGNO (base_reg
)] < 0
2262 && reg_symbol_ref
[REGNO (base_reg
)] == NULL_RTX
)
2264 offset
= reg_offset
[REGNO (base_reg
)];
2265 base_reg
= XEXP (src
, 1);
2284 /* Start tracking the register as a constant. */
2285 reg_base_reg
[regno
] = -1;
2286 reg_symbol_ref
[regno
] = NULL_RTX
;
2287 reg_offset
[regno
] = INTVAL (SET_SRC (set
));
2288 /* We assign the same luid to all registers set to constants. */
2289 reg_set_luid
[regno
] = move2add_last_label_luid
+ 1;
2290 move2add_record_mode (dst
);
2297 base_regno
= REGNO (base_reg
);
2298 /* If information about the base register is not valid, set it
2299 up as a new base register, pretending its value is known
2300 starting from the current insn. */
2301 if (!move2add_valid_value_p (base_regno
, mode
))
2303 reg_base_reg
[base_regno
] = base_regno
;
2304 reg_symbol_ref
[base_regno
] = NULL_RTX
;
2305 reg_offset
[base_regno
] = 0;
2306 reg_set_luid
[base_regno
] = move2add_luid
;
2307 gcc_assert (GET_MODE (base_reg
) == mode
);
2308 move2add_record_mode (base_reg
);
2311 /* Copy base information from our base register. */
2312 reg_set_luid
[regno
] = reg_set_luid
[base_regno
];
2313 reg_base_reg
[regno
] = reg_base_reg
[base_regno
];
2314 reg_symbol_ref
[regno
] = reg_symbol_ref
[base_regno
];
2316 /* Compute the sum of the offsets or constants. */
2318 = trunc_int_for_mode (offset
+ reg_offset
[base_regno
], mode
);
2320 move2add_record_mode (dst
);
2325 /* Invalidate the contents of the register. */
2326 move2add_record_mode (dst
);
2327 reg_mode
[regno
] = VOIDmode
;
2333 const pass_data pass_data_postreload_cse
=
2335 RTL_PASS
, /* type */
2336 "postreload", /* name */
2337 OPTGROUP_NONE
, /* optinfo_flags */
2338 TV_RELOAD_CSE_REGS
, /* tv_id */
2339 0, /* properties_required */
2340 0, /* properties_provided */
2341 0, /* properties_destroyed */
2342 0, /* todo_flags_start */
2343 TODO_df_finish
, /* todo_flags_finish */
2346 class pass_postreload_cse
: public rtl_opt_pass
2349 pass_postreload_cse (gcc::context
*ctxt
)
2350 : rtl_opt_pass (pass_data_postreload_cse
, ctxt
)
2353 /* opt_pass methods: */
2354 virtual bool gate (function
*) { return (optimize
> 0 && reload_completed
); }
2356 virtual unsigned int execute (function
*);
2358 }; // class pass_postreload_cse
2361 pass_postreload_cse::execute (function
*fun
)
2363 if (!dbg_cnt (postreload_cse
))
2366 /* Do a very simple CSE pass over just the hard registers. */
2367 reload_cse_regs (get_insns ());
2368 /* Reload_cse_regs can eliminate potentially-trapping MEMs.
2369 Remove any EH edges associated with them. */
2370 if (fun
->can_throw_non_call_exceptions
2371 && purge_all_dead_edges ())
2380 make_pass_postreload_cse (gcc::context
*ctxt
)
2382 return new pass_postreload_cse (ctxt
);