1 /* Post-reload compare elimination.
2 Copyright (C) 2010-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* There is a set of targets whose general-purpose move or addition
21 instructions clobber the flags. These targets cannot split their
22 CBRANCH/CSTORE etc patterns before reload is complete, lest reload
23 itself insert these instructions in between the flags setter and user.
24 Because these targets cannot split the compare from the use, they
25 cannot make use of the comparison elimination offered by the combine pass.
27 This is a small pass intended to provide comparison elimination similar to
28 what was available via NOTICE_UPDATE_CC for cc0 targets.
32 (0) CBRANCH/CSTORE etc have been split in pass_split_after_reload.
34 (1) All comparison patterns are represented as
36 [(set (reg:CC) (compare:CC (reg) (reg_or_immediate)))]
38 (2) All insn patterns that modify the flags are represented as
40 [(set (reg) (operation)
43 (3) If an insn of form (2) can usefully set the flags, there is
44 another pattern of the form
46 [(set (reg:CCM) (compare:CCM (operation) (immediate)))
47 (set (reg) (operation)]
49 The mode CCM will be chosen as if by SELECT_CC_MODE.
51 Note that unlike NOTICE_UPDATE_CC, we do not handle memory operands.
52 This could be handled as a future enhancement.
57 #include "coretypes.h"
64 #include "insn-config.h"
68 #include "tree-pass.h"
72 /* These structures describe a comparison and how it is used. */
74 /* The choice of maximum 3 uses comes from wanting to eliminate the two
75 duplicate compares from a three-way branch on the sign of a value.
76 This is also sufficient to eliminate the duplicate compare against the
77 high-part of a double-word comparison. */
82 /* The instruction in which the result of the compare is used. */
84 /* The location of the flags register within the use. */
86 /* The comparison code applied against the flags register. */
92 /* The comparison instruction. */
95 /* The insn prior to the comparison insn that clobbers the flags. */
96 rtx_insn
*prev_clobber
;
98 /* The insn prior to the comparison insn that sets in_a REG. */
99 rtx_insn
*in_a_setter
;
101 /* The two values being compared. These will be either REGs or
105 /* The REG_EH_REGION of the comparison. */
108 /* Information about how this comparison is used. */
109 struct comparison_use uses
[MAX_CMP_USE
];
111 /* The original CC_MODE for this comparison. */
112 machine_mode orig_mode
;
114 /* The number of uses identified for this comparison. */
115 unsigned short n_uses
;
117 /* True if not all uses of this comparison have been identified.
118 This can happen either for overflowing the array above, or if
119 the flags register is used in some unusual context. */
122 /* True if its inputs are still valid at the end of the block. */
125 /* Whether IN_A is wrapped in a NOT before being compared. */
129 static vec
<comparison
*> all_compares
;
131 /* Return whether X is a NOT unary expression. */
136 return GET_CODE (x
) == NOT
;
139 /* Strip a NOT unary expression around X, if any. */
150 /* Look for a "conforming" comparison, as defined above. If valid, return
151 the rtx for the COMPARE itself. */
154 conforming_compare (rtx_insn
*insn
)
158 set
= single_set (insn
);
163 if (GET_CODE (src
) != COMPARE
)
166 dest
= SET_DEST (set
);
167 if (!REG_P (dest
) || REGNO (dest
) != targetm
.flags_regnum
)
170 if (!REG_P (strip_not (XEXP (src
, 0))))
173 if (CONSTANT_P (XEXP (src
, 1)) || REG_P (XEXP (src
, 1)))
176 if (GET_CODE (XEXP (src
, 1)) == UNSPEC
)
178 for (int i
= 0; i
< XVECLEN (XEXP (src
, 1), 0); i
++)
179 if (!REG_P (XVECEXP (XEXP (src
, 1), 0, i
)))
187 /* Look for a pattern of the "correct" form for an insn with a flags clobber
188 for which we may be able to eliminate a compare later. We're not looking
189 to validate any inputs at this time, merely see that the basic shape is
190 correct. The term "arithmetic" may be somewhat misleading... */
193 arithmetic_flags_clobber_p (rtx_insn
*insn
)
197 if (!NONJUMP_INSN_P (insn
))
199 pat
= PATTERN (insn
);
200 if (asm_noperands (pat
) >= 0)
203 if (GET_CODE (pat
) == PARALLEL
&& XVECLEN (pat
, 0) == 2)
205 x
= XVECEXP (pat
, 0, 0);
206 if (GET_CODE (x
) != SET
)
212 x
= XVECEXP (pat
, 0, 1);
213 if (GET_CODE (x
) == CLOBBER
)
216 if (REG_P (x
) && REGNO (x
) == targetm
.flags_regnum
)
224 /* Look for uses of FLAGS in INSN. If we find one we can analyze, record
225 it in CMP; otherwise indicate that we've missed a use. */
228 find_flags_uses_in_insn (struct comparison
*cmp
, rtx_insn
*insn
)
232 /* If we've already lost track of uses, don't bother collecting more. */
233 if (cmp
->missing_uses
)
236 /* Find a USE of the flags register. */
237 FOR_EACH_INSN_USE (use
, insn
)
238 if (DF_REF_REGNO (use
) == targetm
.flags_regnum
)
242 /* If this is an unusual use, quit. */
243 if (DF_REF_TYPE (use
) != DF_REF_REG_USE
)
246 /* If we've run out of slots to record uses, quit. */
247 if (cmp
->n_uses
== MAX_CMP_USE
)
250 /* Unfortunately the location of the flags register, while present
251 in the reference structure, doesn't help. We need to find the
252 comparison code that is outer to the actual flags use. */
253 loc
= DF_REF_LOC (use
);
255 if (GET_CODE (x
) == PARALLEL
)
256 x
= XVECEXP (x
, 0, 0);
257 if (GET_CODE (x
) == SET
)
259 if (GET_CODE (x
) == IF_THEN_ELSE
)
262 && loc
== &XEXP (x
, 0)
263 && XEXP (x
, 1) == const0_rtx
)
265 /* We've found a use of the flags that we understand. */
266 struct comparison_use
*cuse
= &cmp
->uses
[cmp
->n_uses
++];
269 cuse
->code
= GET_CODE (x
);
277 /* We failed to recognize this use of the flags register. */
278 cmp
->missing_uses
= true;
281 class find_comparison_dom_walker
: public dom_walker
284 find_comparison_dom_walker (cdi_direction direction
)
285 : dom_walker (direction
) {}
287 edge
before_dom_children (basic_block
) final override
;
290 /* Return true if conforming COMPARE with EH_NOTE is redundant with comparison
291 CMP and can thus be eliminated. */
294 can_eliminate_compare (rtx compare
, rtx eh_note
, struct comparison
*cmp
)
296 /* Take care that it's in the same EH region. */
297 if (cfun
->can_throw_non_call_exceptions
298 && !rtx_equal_p (eh_note
, cmp
->eh_note
))
301 /* Make sure the compare is redundant with the previous. */
302 if (!rtx_equal_p (strip_not (XEXP (compare
, 0)), cmp
->in_a
)
303 || !rtx_equal_p (XEXP (compare
, 1), cmp
->in_b
))
306 if (is_not (XEXP (compare
, 0)) != cmp
->not_in_a
)
309 /* New mode must be compatible with the previous compare mode. */
310 machine_mode new_mode
311 = targetm
.cc_modes_compatible (GET_MODE (compare
), cmp
->orig_mode
);
313 if (new_mode
== VOIDmode
)
316 if (cmp
->orig_mode
!= new_mode
)
318 /* Generate new comparison for substitution. */
319 rtx flags
= gen_rtx_REG (new_mode
, targetm
.flags_regnum
);
320 rtx x
= gen_rtx_COMPARE (new_mode
, cmp
->in_a
, cmp
->in_b
);
321 x
= gen_rtx_SET (flags
, x
);
323 if (!validate_change (cmp
->insn
, &PATTERN (cmp
->insn
), x
, false))
326 cmp
->orig_mode
= new_mode
;
332 /* Identify comparison instructions within BB. If the flags from the last
333 compare in the BB is live at the end of the block, install the compare
334 in BB->AUX. Called via dom_walker.walk (). */
337 find_comparison_dom_walker::before_dom_children (basic_block bb
)
339 rtx_insn
*insn
, *next
;
340 bool need_purge
= false;
341 rtx_insn
*last_setter
[FIRST_PSEUDO_REGISTER
];
343 /* The last comparison that was made. Will be reset to NULL
344 once the flags are clobbered. */
345 struct comparison
*last_cmp
= NULL
;
347 /* True iff the last comparison has not been clobbered, nor
348 have its inputs. Used to eliminate duplicate compares. */
349 bool last_cmp_valid
= false;
351 /* The last insn that clobbered the flags, if that insn is of
352 a form that may be valid for eliminating a following compare.
353 To be reset to NULL once the flags are set otherwise. */
354 rtx_insn
*last_clobber
= NULL
;
356 /* Propagate the last live comparison throughout the extended basic block. */
357 if (single_pred_p (bb
))
359 last_cmp
= (struct comparison
*) single_pred (bb
)->aux
;
361 last_cmp_valid
= last_cmp
->inputs_valid
;
364 memset (last_setter
, 0, sizeof (last_setter
));
365 for (insn
= BB_HEAD (bb
); insn
; insn
= next
)
369 next
= (insn
== BB_END (bb
) ? NULL
: NEXT_INSN (insn
));
370 if (!NONDEBUG_INSN_P (insn
))
373 src
= conforming_compare (insn
);
378 if (cfun
->can_throw_non_call_exceptions
)
379 eh_note
= find_reg_note (insn
, REG_EH_REGION
, NULL
);
381 if (last_cmp_valid
&& can_eliminate_compare (src
, eh_note
, last_cmp
))
389 last_cmp
= XCNEW (struct comparison
);
390 last_cmp
->insn
= insn
;
391 last_cmp
->prev_clobber
= last_clobber
;
392 last_cmp
->in_a
= strip_not (XEXP (src
, 0));
393 last_cmp
->in_b
= XEXP (src
, 1);
394 last_cmp
->not_in_a
= is_not (XEXP (src
, 0));
395 last_cmp
->eh_note
= eh_note
;
396 last_cmp
->orig_mode
= GET_MODE (src
);
397 if (last_cmp
->in_b
== const0_rtx
398 && last_setter
[REGNO (last_cmp
->in_a
)])
400 rtx set
= single_set (last_setter
[REGNO (last_cmp
->in_a
)]);
401 if (set
&& rtx_equal_p (SET_DEST (set
), last_cmp
->in_a
))
402 last_cmp
->in_a_setter
= last_setter
[REGNO (last_cmp
->in_a
)];
404 all_compares
.safe_push (last_cmp
);
406 /* It's unusual, but be prepared for comparison patterns that
407 also clobber an input, or perhaps a scratch. */
409 last_cmp_valid
= true;
414 /* Notice if this instruction uses the flags register. */
416 find_flags_uses_in_insn (last_cmp
, insn
);
418 /* Notice if this instruction kills the flags register. */
420 FOR_EACH_INSN_DEF (def
, insn
)
421 if (DF_REF_REGNO (def
) == targetm
.flags_regnum
)
423 /* See if this insn could be the "clobber" that eliminates
424 a future comparison. */
425 last_clobber
= (arithmetic_flags_clobber_p (insn
)
428 /* In either case, the previous compare is no longer valid. */
430 last_cmp_valid
= false;
435 /* Notice if any of the inputs to the comparison have changed
436 and remember last insn that sets each register. */
438 FOR_EACH_INSN_DEF (def
, insn
)
441 && (DF_REF_REGNO (def
) == REGNO (last_cmp
->in_a
)
442 || (REG_P (last_cmp
->in_b
)
443 && DF_REF_REGNO (def
) == REGNO (last_cmp
->in_b
))))
444 last_cmp_valid
= false;
445 last_setter
[DF_REF_REGNO (def
)] = insn
;
449 /* Remember the live comparison for subsequent members of
450 the extended basic block. */
454 last_cmp
->inputs_valid
= last_cmp_valid
;
456 /* Look to see if the flags register is live outgoing here, and
457 incoming to any successor not part of the extended basic block. */
458 if (bitmap_bit_p (df_get_live_out (bb
), targetm
.flags_regnum
))
463 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
465 basic_block dest
= e
->dest
;
466 if (bitmap_bit_p (df_get_live_in (bb
), targetm
.flags_regnum
)
467 && !single_pred_p (dest
))
469 last_cmp
->missing_uses
= true;
476 /* If we deleted a compare with a REG_EH_REGION note, we may need to
479 purge_dead_edges (bb
);
484 /* Find all comparisons in the function. */
487 find_comparisons (void)
489 calculate_dominance_info (CDI_DOMINATORS
);
491 find_comparison_dom_walker (CDI_DOMINATORS
)
492 .walk (cfun
->cfg
->x_entry_block_ptr
);
494 clear_aux_for_blocks ();
495 free_dominance_info (CDI_DOMINATORS
);
498 /* Select an alternate CC_MODE for a comparison insn comparing A and B.
499 Note that inputs are almost certainly different than the IN_A and IN_B
500 stored in CMP -- we're called while attempting to eliminate the compare
501 after all. Return the new FLAGS rtx if successful, else return NULL.
502 Note that this function may start a change group. */
505 maybe_select_cc_mode (struct comparison
*cmp
, rtx a ATTRIBUTE_UNUSED
,
506 rtx b ATTRIBUTE_UNUSED
)
508 machine_mode sel_mode
;
509 const int n
= cmp
->n_uses
;
512 #ifndef SELECT_CC_MODE
513 /* Minimize code differences when this target macro is undefined. */
515 #define SELECT_CC_MODE(A,B,C) (gcc_unreachable (), VOIDmode)
518 /* If we don't have access to all of the uses, we can't validate. */
519 if (cmp
->missing_uses
|| n
== 0)
522 /* Find a new mode that works for all of the uses. Special case the
523 common case of exactly one use. */
526 sel_mode
= SELECT_CC_MODE (cmp
->uses
[0].code
, a
, b
);
527 if (sel_mode
!= cmp
->orig_mode
)
529 flags
= gen_rtx_REG (sel_mode
, targetm
.flags_regnum
);
530 validate_change (cmp
->uses
[0].insn
, cmp
->uses
[0].loc
, flags
, true);
537 sel_mode
= SELECT_CC_MODE (cmp
->uses
[0].code
, a
, b
);
538 for (i
= 1; i
< n
; ++i
)
540 machine_mode new_mode
= SELECT_CC_MODE (cmp
->uses
[i
].code
, a
, b
);
541 if (new_mode
!= sel_mode
)
543 sel_mode
= targetm
.cc_modes_compatible (sel_mode
, new_mode
);
544 if (sel_mode
== VOIDmode
)
549 if (sel_mode
!= cmp
->orig_mode
)
551 flags
= gen_rtx_REG (sel_mode
, targetm
.flags_regnum
);
552 for (i
= 0; i
< n
; ++i
)
553 validate_change (cmp
->uses
[i
].insn
, cmp
->uses
[i
].loc
, flags
, true);
560 /* Return a register RTX holding the same value at START as REG at END, or
561 NULL_RTX if there is none. */
564 equivalent_reg_at_start (rtx reg
, rtx_insn
*end
, rtx_insn
*start
)
566 machine_mode orig_mode
= GET_MODE (reg
);
567 rtx_insn
*bb_head
= BB_HEAD (BLOCK_FOR_INSN (end
));
569 for (rtx_insn
*insn
= PREV_INSN (end
);
571 insn
= PREV_INSN (insn
))
573 const int abnormal_flags
574 = (DF_REF_CONDITIONAL
| DF_REF_PARTIAL
| DF_REF_MAY_CLOBBER
575 | DF_REF_MUST_CLOBBER
| DF_REF_SIGN_EXTRACT
576 | DF_REF_ZERO_EXTRACT
| DF_REF_STRICT_LOW_PART
577 | DF_REF_PRE_POST_MODIFY
);
580 /* Note that the BB_HEAD is always either a note or a label, but in
581 any case it means that REG is defined outside the block. */
584 if (NOTE_P (insn
) || DEBUG_INSN_P (insn
))
587 /* Find a possible def of REG in INSN. */
588 FOR_EACH_INSN_DEF (def
, insn
)
589 if (DF_REF_REGNO (def
) == REGNO (reg
))
592 /* No definitions of REG; continue searching. */
596 /* Bail if this is not a totally normal set of REG. */
597 if (DF_REF_IS_ARTIFICIAL (def
))
599 if (DF_REF_FLAGS (def
) & abnormal_flags
)
602 /* We've found an insn between the compare and the clobber that sets
603 REG. Given that pass_cprop_hardreg has not yet run, we still find
604 situations in which we can usefully look through a copy insn. */
605 rtx x
= single_set (insn
);
613 if (GET_MODE (reg
) != orig_mode
)
619 /* Return true if it is okay to merge the comparison CMP_INSN with
620 the instruction ARITH_INSN. Both instructions are assumed to be in the
621 same basic block with ARITH_INSN appearing before CMP_INSN. This checks
622 that there are no uses or defs of the condition flags or control flow
623 changes between the two instructions. */
626 can_merge_compare_into_arith (rtx_insn
*cmp_insn
, rtx_insn
*arith_insn
)
628 for (rtx_insn
*insn
= PREV_INSN (cmp_insn
);
629 insn
&& insn
!= arith_insn
;
630 insn
= PREV_INSN (insn
))
632 if (!NONDEBUG_INSN_P (insn
))
634 /* Bail if there are jumps or calls in between. */
635 if (!NONJUMP_INSN_P (insn
))
638 /* Bail on old-style asm statements because they lack
639 data flow information. */
640 if (GET_CODE (PATTERN (insn
)) == ASM_INPUT
)
644 /* Find a USE of the flags register. */
645 FOR_EACH_INSN_USE (ref
, insn
)
646 if (DF_REF_REGNO (ref
) == targetm
.flags_regnum
)
649 /* Find a DEF of the flags register. */
650 FOR_EACH_INSN_DEF (ref
, insn
)
651 if (DF_REF_REGNO (ref
) == targetm
.flags_regnum
)
657 /* Given two SET expressions, SET_A and SET_B determine whether they form
658 a recognizable pattern when emitted in parallel. Return that parallel
659 if so. Otherwise return NULL. */
662 try_validate_parallel (rtx set_a
, rtx set_b
)
664 rtx par
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, set_a
, set_b
));
665 rtx_insn
*insn
= make_insn_raw (par
);
667 if (insn_invalid_p (insn
, false))
669 crtl
->emit
.x_cur_insn_uid
--;
673 SET_PREV_INSN (insn
) = NULL_RTX
;
674 SET_NEXT_INSN (insn
) = NULL_RTX
;
675 INSN_LOCATION (insn
) = 0;
679 /* For a comparison instruction described by CMP check if it compares a
680 register with zero i.e. it is of the form CC := CMP R1, 0.
681 If it is, find the instruction defining R1 (say I1) and try to create a
682 PARALLEL consisting of I1 and the comparison, representing a flag-setting
683 arithmetic instruction. Example:
685 <instructions that don't read the condition register>
687 I2 can be merged with I1 into:
688 I1: { CC := CMP (R2 + R3) 0 ; R1 := R2 + R3 }
689 This catches cases where R1 is used between I1 and I2 and therefore
690 combine and other RTL optimisations will not try to propagate it into
691 I2. Return true if we succeeded in merging CMP. */
694 try_merge_compare (struct comparison
*cmp
)
696 rtx_insn
*cmp_insn
= cmp
->insn
;
698 if (cmp
->in_b
!= const0_rtx
|| cmp
->in_a_setter
== NULL
)
700 rtx in_a
= cmp
->in_a
;
703 FOR_EACH_INSN_USE (use
, cmp_insn
)
704 if (DF_REF_REGNO (use
) == REGNO (in_a
))
709 rtx_insn
*def_insn
= cmp
->in_a_setter
;
710 rtx set
= single_set (def_insn
);
714 if (!can_merge_compare_into_arith (cmp_insn
, def_insn
))
717 rtx src
= SET_SRC (set
);
719 /* If the source uses addressing modes with side effects, we can't
720 do the merge because we'd end up with a PARALLEL that has two
721 instances of that side effect in it. */
722 if (side_effects_p (src
))
725 rtx flags
= maybe_select_cc_mode (cmp
, src
, CONST0_RTX (GET_MODE (src
)));
728 /* We may already have a change group going through maybe_select_cc_mode.
729 Discard it properly. */
735 = gen_rtx_SET (flags
, gen_rtx_COMPARE (GET_MODE (flags
),
737 CONST0_RTX (GET_MODE (src
))));
738 rtx arith_set
= copy_rtx (PATTERN (def_insn
));
739 rtx par
= try_validate_parallel (flag_set
, arith_set
);
742 /* We may already have a change group going through maybe_select_cc_mode.
743 Discard it properly. */
747 if (!apply_change_group ())
749 emit_insn_after (par
, def_insn
);
750 delete_insn (def_insn
);
751 delete_insn (cmp
->insn
);
755 /* Attempt to replace a comparison with a prior arithmetic insn that can
756 compute the same flags value as the comparison itself. Return true if
757 successful, having made all rtl modifications necessary. */
760 try_eliminate_compare (struct comparison
*cmp
)
762 rtx flags
, in_a
, in_b
, cmp_a
, cmp_b
;
764 if (try_merge_compare (cmp
))
767 /* We must have found an interesting "clobber" preceding the compare. */
768 if (cmp
->prev_clobber
== NULL
)
771 /* Verify that IN_A is not clobbered in between CMP and PREV_CLOBBER.
772 Given that this target requires this pass, we can assume that most
773 insns do clobber the flags, and so the distance between the compare
774 and the clobber is likely to be small. */
775 /* ??? This is one point at which one could argue that DF_REF_CHAIN would
776 be useful, but it is thought to be too heavy-weight a solution here. */
777 in_a
= equivalent_reg_at_start (cmp
->in_a
, cmp
->insn
, cmp
->prev_clobber
);
781 /* Likewise for IN_B if need be. */
782 if (CONSTANT_P (cmp
->in_b
))
784 else if (REG_P (cmp
->in_b
))
786 in_b
= equivalent_reg_at_start (cmp
->in_b
, cmp
->insn
, cmp
->prev_clobber
);
790 else if (GET_CODE (cmp
->in_b
) == UNSPEC
)
792 const int len
= XVECLEN (cmp
->in_b
, 0);
793 rtvec v
= rtvec_alloc (len
);
794 for (int i
= 0; i
< len
; i
++)
796 rtx r
= equivalent_reg_at_start (XVECEXP (cmp
->in_b
, 0, i
),
797 cmp
->insn
, cmp
->prev_clobber
);
800 RTVEC_ELT (v
, i
) = r
;
802 in_b
= gen_rtx_UNSPEC (GET_MODE (cmp
->in_b
), v
, XINT (cmp
->in_b
, 1));
807 /* We've reached PREV_CLOBBER without finding a modification of IN_A.
808 Validate that PREV_CLOBBER itself does in fact refer to IN_A. Do
809 recall that we've already validated the shape of PREV_CLOBBER. */
810 rtx_insn
*insn
= cmp
->prev_clobber
;
812 rtx x
= XVECEXP (PATTERN (insn
), 0, 0);
813 if (rtx_equal_p (SET_DEST (x
), in_a
))
816 /* Also check operations with implicit extensions, e.g.:
818 (zero_extend:DI (plus:SI (reg:SI) (reg:SI))))
820 (compare:CCZ (plus:SI (reg:SI) (reg:SI))
822 else if (REG_P (SET_DEST (x
))
824 && REGNO (SET_DEST (x
)) == REGNO (in_a
)
825 && (GET_CODE (SET_SRC (x
)) == ZERO_EXTEND
826 || GET_CODE (SET_SRC (x
)) == SIGN_EXTEND
)
827 && GET_MODE (XEXP (SET_SRC (x
), 0)) == GET_MODE (in_a
))
828 cmp_a
= XEXP (SET_SRC (x
), 0);
830 /* Also check fully redundant comparisons, e.g.:
832 (minus:SI (reg:SI) (reg:SI))))
834 (compare:CC (reg:SI) (reg:SI)))] */
835 else if (REG_P (in_b
)
836 && GET_CODE (SET_SRC (x
)) == MINUS
837 && rtx_equal_p (XEXP (SET_SRC (x
), 0), in_a
)
838 && rtx_equal_p (XEXP (SET_SRC (x
), 1), in_b
))
844 /* If the source uses addressing modes with side effects, we can't
845 do the merge because we'd end up with a PARALLEL that has two
846 instances of that side effect in it. */
847 if (side_effects_p (cmp_a
))
852 else if (rtx_equal_p (SET_DEST (x
), in_b
))
856 if (side_effects_p (cmp_b
))
859 /* Determine if we ought to use a different CC_MODE here. */
860 flags
= maybe_select_cc_mode (cmp
, cmp_a
, cmp_b
);
862 flags
= gen_rtx_REG (cmp
->orig_mode
, targetm
.flags_regnum
);
864 /* Generate a new comparison for installation in the setter. */
865 rtx y
= cmp
->not_in_a
866 ? gen_rtx_NOT (GET_MODE (cmp_a
), copy_rtx (cmp_a
))
868 y
= gen_rtx_COMPARE (GET_MODE (flags
), y
, copy_rtx (cmp_b
));
869 y
= gen_rtx_SET (flags
, y
);
871 /* Canonicalize instruction to:
872 [(set (reg:CCM) (compare:CCM (operation) (immediate)))
873 (set (reg) (operation)] */
875 rtvec v
= rtvec_alloc (2);
876 RTVEC_ELT (v
, 0) = y
;
877 RTVEC_ELT (v
, 1) = x
;
879 rtx pat
= gen_rtx_PARALLEL (VOIDmode
, v
);
881 /* Succeed if the new instruction is valid. Note that we may have started
882 a change group within maybe_select_cc_mode, therefore we must continue. */
883 validate_change (insn
, &PATTERN (insn
), pat
, true);
885 if (!apply_change_group ())
888 /* Success. Delete the compare insn... */
889 delete_insn (cmp
->insn
);
891 /* ... and any notes that are now invalid due to multiple sets. */
892 x
= find_regno_note (insn
, REG_UNUSED
, targetm
.flags_regnum
);
894 remove_note (insn
, x
);
895 x
= find_reg_note (insn
, REG_EQUAL
, NULL
);
897 remove_note (insn
, x
);
898 x
= find_reg_note (insn
, REG_EQUIV
, NULL
);
900 remove_note (insn
, x
);
905 /* Main entry point to the pass. */
908 execute_compare_elim_after_reload (void)
910 df_set_flags (DF_LR_RUN_DCE
);
913 gcc_checking_assert (!all_compares
.exists ());
915 /* Locate all comparisons and their uses, and eliminate duplicates. */
917 if (all_compares
.exists ())
919 struct comparison
*cmp
;
922 /* Eliminate comparisons that are redundant with flags computation. */
923 FOR_EACH_VEC_ELT (all_compares
, i
, cmp
)
925 try_eliminate_compare (cmp
);
929 all_compares
.release ();
937 const pass_data pass_data_compare_elim_after_reload
=
940 "cmpelim", /* name */
941 OPTGROUP_NONE
, /* optinfo_flags */
943 0, /* properties_required */
944 0, /* properties_provided */
945 0, /* properties_destroyed */
946 0, /* todo_flags_start */
947 ( TODO_df_finish
| TODO_df_verify
), /* todo_flags_finish */
950 class pass_compare_elim_after_reload
: public rtl_opt_pass
953 pass_compare_elim_after_reload (gcc::context
*ctxt
)
954 : rtl_opt_pass (pass_data_compare_elim_after_reload
, ctxt
)
957 /* opt_pass methods: */
958 bool gate (function
*) final override
960 /* Setting this target hook value is how a backend indicates the need. */
961 if (targetm
.flags_regnum
== INVALID_REGNUM
)
963 return flag_compare_elim_after_reload
;
966 unsigned int execute (function
*) final override
968 return execute_compare_elim_after_reload ();
971 }; // class pass_compare_elim_after_reload
976 make_pass_compare_elim_after_reload (gcc::context
*ctxt
)
978 return new pass_compare_elim_after_reload (ctxt
);