c++: remove NON_DEPENDENT_EXPR, part 2
[official-gcc.git] / gcc / compare-elim.cc
blobc59dc0cf5a53d93c8e26e03990ef923beb50c2b0
1 /* Post-reload compare elimination.
2 Copyright (C) 2010-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* There is a set of targets whose general-purpose move or addition
21 instructions clobber the flags. These targets cannot split their
22 CBRANCH/CSTORE etc patterns before reload is complete, lest reload
23 itself insert these instructions in between the flags setter and user.
24 Because these targets cannot split the compare from the use, they
25 cannot make use of the comparison elimination offered by the combine pass.
27 This is a small pass intended to provide comparison elimination similar to
28 what was available via NOTICE_UPDATE_CC for cc0 targets.
30 This pass assumes:
32 (0) CBRANCH/CSTORE etc have been split in pass_split_after_reload.
34 (1) All comparison patterns are represented as
36 [(set (reg:CC) (compare:CC (reg) (reg_or_immediate)))]
38 (2) All insn patterns that modify the flags are represented as
40 [(set (reg) (operation)
41 (clobber (reg:CC))]
43 (3) If an insn of form (2) can usefully set the flags, there is
44 another pattern of the form
46 [(set (reg:CCM) (compare:CCM (operation) (immediate)))
47 (set (reg) (operation)]
49 The mode CCM will be chosen as if by SELECT_CC_MODE.
51 Note that unlike NOTICE_UPDATE_CC, we do not handle memory operands.
52 This could be handled as a future enhancement.
55 #include "config.h"
56 #include "system.h"
57 #include "coretypes.h"
58 #include "backend.h"
59 #include "target.h"
60 #include "rtl.h"
61 #include "df.h"
62 #include "memmodel.h"
63 #include "tm_p.h"
64 #include "insn-config.h"
65 #include "recog.h"
66 #include "emit-rtl.h"
67 #include "cfgrtl.h"
68 #include "tree-pass.h"
69 #include "domwalk.h"
72 /* These structures describe a comparison and how it is used. */
74 /* The choice of maximum 3 uses comes from wanting to eliminate the two
75 duplicate compares from a three-way branch on the sign of a value.
76 This is also sufficient to eliminate the duplicate compare against the
77 high-part of a double-word comparison. */
78 #define MAX_CMP_USE 3
80 struct comparison_use
82 /* The instruction in which the result of the compare is used. */
83 rtx_insn *insn;
84 /* The location of the flags register within the use. */
85 rtx *loc;
86 /* The comparison code applied against the flags register. */
87 enum rtx_code code;
90 struct comparison
92 /* The comparison instruction. */
93 rtx_insn *insn;
95 /* The insn prior to the comparison insn that clobbers the flags. */
96 rtx_insn *prev_clobber;
98 /* The insn prior to the comparison insn that sets in_a REG. */
99 rtx_insn *in_a_setter;
101 /* The two values being compared. These will be either REGs or
102 constants. */
103 rtx in_a, in_b;
105 /* The REG_EH_REGION of the comparison. */
106 rtx eh_note;
108 /* Information about how this comparison is used. */
109 struct comparison_use uses[MAX_CMP_USE];
111 /* The original CC_MODE for this comparison. */
112 machine_mode orig_mode;
114 /* The number of uses identified for this comparison. */
115 unsigned short n_uses;
117 /* True if not all uses of this comparison have been identified.
118 This can happen either for overflowing the array above, or if
119 the flags register is used in some unusual context. */
120 bool missing_uses;
122 /* True if its inputs are still valid at the end of the block. */
123 bool inputs_valid;
125 /* Whether IN_A is wrapped in a NOT before being compared. */
126 bool not_in_a;
129 static vec<comparison *> all_compares;
131 /* Return whether X is a NOT unary expression. */
133 static bool
134 is_not (rtx x)
136 return GET_CODE (x) == NOT;
139 /* Strip a NOT unary expression around X, if any. */
141 static rtx
142 strip_not (rtx x)
144 if (is_not (x))
145 return XEXP (x, 0);
147 return x;
150 /* Look for a "conforming" comparison, as defined above. If valid, return
151 the rtx for the COMPARE itself. */
153 static rtx
154 conforming_compare (rtx_insn *insn)
156 rtx set, src, dest;
158 set = single_set (insn);
159 if (set == NULL)
160 return NULL;
162 src = SET_SRC (set);
163 if (GET_CODE (src) != COMPARE)
164 return NULL;
166 dest = SET_DEST (set);
167 if (!REG_P (dest) || REGNO (dest) != targetm.flags_regnum)
168 return NULL;
170 if (!REG_P (strip_not (XEXP (src, 0))))
171 return NULL;
173 if (CONSTANT_P (XEXP (src, 1)) || REG_P (XEXP (src, 1)))
174 return src;
176 if (GET_CODE (XEXP (src, 1)) == UNSPEC)
178 for (int i = 0; i < XVECLEN (XEXP (src, 1), 0); i++)
179 if (!REG_P (XVECEXP (XEXP (src, 1), 0, i)))
180 return NULL;
181 return src;
184 return NULL;
187 /* Look for a pattern of the "correct" form for an insn with a flags clobber
188 for which we may be able to eliminate a compare later. We're not looking
189 to validate any inputs at this time, merely see that the basic shape is
190 correct. The term "arithmetic" may be somewhat misleading... */
192 static bool
193 arithmetic_flags_clobber_p (rtx_insn *insn)
195 rtx pat, x;
197 if (!NONJUMP_INSN_P (insn))
198 return false;
199 pat = PATTERN (insn);
200 if (asm_noperands (pat) >= 0)
201 return false;
203 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) == 2)
205 x = XVECEXP (pat, 0, 0);
206 if (GET_CODE (x) != SET)
207 return false;
208 x = SET_DEST (x);
209 if (!REG_P (x))
210 return false;
212 x = XVECEXP (pat, 0, 1);
213 if (GET_CODE (x) == CLOBBER)
215 x = XEXP (x, 0);
216 if (REG_P (x) && REGNO (x) == targetm.flags_regnum)
217 return true;
221 return false;
224 /* Look for uses of FLAGS in INSN. If we find one we can analyze, record
225 it in CMP; otherwise indicate that we've missed a use. */
227 static void
228 find_flags_uses_in_insn (struct comparison *cmp, rtx_insn *insn)
230 df_ref use;
232 /* If we've already lost track of uses, don't bother collecting more. */
233 if (cmp->missing_uses)
234 return;
236 /* Find a USE of the flags register. */
237 FOR_EACH_INSN_USE (use, insn)
238 if (DF_REF_REGNO (use) == targetm.flags_regnum)
240 rtx x, *loc;
242 /* If this is an unusual use, quit. */
243 if (DF_REF_TYPE (use) != DF_REF_REG_USE)
244 goto fail;
246 /* If we've run out of slots to record uses, quit. */
247 if (cmp->n_uses == MAX_CMP_USE)
248 goto fail;
250 /* Unfortunately the location of the flags register, while present
251 in the reference structure, doesn't help. We need to find the
252 comparison code that is outer to the actual flags use. */
253 loc = DF_REF_LOC (use);
254 x = PATTERN (insn);
255 if (GET_CODE (x) == PARALLEL)
256 x = XVECEXP (x, 0, 0);
257 if (GET_CODE (x) == SET)
258 x = SET_SRC (x);
259 if (GET_CODE (x) == IF_THEN_ELSE)
260 x = XEXP (x, 0);
261 if (COMPARISON_P (x)
262 && loc == &XEXP (x, 0)
263 && XEXP (x, 1) == const0_rtx)
265 /* We've found a use of the flags that we understand. */
266 struct comparison_use *cuse = &cmp->uses[cmp->n_uses++];
267 cuse->insn = insn;
268 cuse->loc = loc;
269 cuse->code = GET_CODE (x);
271 else
272 goto fail;
274 return;
276 fail:
277 /* We failed to recognize this use of the flags register. */
278 cmp->missing_uses = true;
281 class find_comparison_dom_walker : public dom_walker
283 public:
284 find_comparison_dom_walker (cdi_direction direction)
285 : dom_walker (direction) {}
287 edge before_dom_children (basic_block) final override;
290 /* Return true if conforming COMPARE with EH_NOTE is redundant with comparison
291 CMP and can thus be eliminated. */
293 static bool
294 can_eliminate_compare (rtx compare, rtx eh_note, struct comparison *cmp)
296 /* Take care that it's in the same EH region. */
297 if (cfun->can_throw_non_call_exceptions
298 && !rtx_equal_p (eh_note, cmp->eh_note))
299 return false;
301 /* Make sure the compare is redundant with the previous. */
302 if (!rtx_equal_p (strip_not (XEXP (compare, 0)), cmp->in_a)
303 || !rtx_equal_p (XEXP (compare, 1), cmp->in_b))
304 return false;
306 if (is_not (XEXP (compare, 0)) != cmp->not_in_a)
307 return false;
309 /* New mode must be compatible with the previous compare mode. */
310 machine_mode new_mode
311 = targetm.cc_modes_compatible (GET_MODE (compare), cmp->orig_mode);
313 if (new_mode == VOIDmode)
314 return false;
316 if (cmp->orig_mode != new_mode)
318 /* Generate new comparison for substitution. */
319 rtx flags = gen_rtx_REG (new_mode, targetm.flags_regnum);
320 rtx x = gen_rtx_COMPARE (new_mode, cmp->in_a, cmp->in_b);
321 x = gen_rtx_SET (flags, x);
323 if (!validate_change (cmp->insn, &PATTERN (cmp->insn), x, false))
324 return false;
326 cmp->orig_mode = new_mode;
329 return true;
332 /* Identify comparison instructions within BB. If the flags from the last
333 compare in the BB is live at the end of the block, install the compare
334 in BB->AUX. Called via dom_walker.walk (). */
336 edge
337 find_comparison_dom_walker::before_dom_children (basic_block bb)
339 rtx_insn *insn, *next;
340 bool need_purge = false;
341 rtx_insn *last_setter[FIRST_PSEUDO_REGISTER];
343 /* The last comparison that was made. Will be reset to NULL
344 once the flags are clobbered. */
345 struct comparison *last_cmp = NULL;
347 /* True iff the last comparison has not been clobbered, nor
348 have its inputs. Used to eliminate duplicate compares. */
349 bool last_cmp_valid = false;
351 /* The last insn that clobbered the flags, if that insn is of
352 a form that may be valid for eliminating a following compare.
353 To be reset to NULL once the flags are set otherwise. */
354 rtx_insn *last_clobber = NULL;
356 /* Propagate the last live comparison throughout the extended basic block. */
357 if (single_pred_p (bb))
359 last_cmp = (struct comparison *) single_pred (bb)->aux;
360 if (last_cmp)
361 last_cmp_valid = last_cmp->inputs_valid;
364 memset (last_setter, 0, sizeof (last_setter));
365 for (insn = BB_HEAD (bb); insn; insn = next)
367 rtx src;
369 next = (insn == BB_END (bb) ? NULL : NEXT_INSN (insn));
370 if (!NONDEBUG_INSN_P (insn))
371 continue;
373 src = conforming_compare (insn);
374 if (src)
376 rtx eh_note = NULL;
378 if (cfun->can_throw_non_call_exceptions)
379 eh_note = find_reg_note (insn, REG_EH_REGION, NULL);
381 if (last_cmp_valid && can_eliminate_compare (src, eh_note, last_cmp))
383 if (eh_note)
384 need_purge = true;
385 delete_insn (insn);
386 continue;
389 last_cmp = XCNEW (struct comparison);
390 last_cmp->insn = insn;
391 last_cmp->prev_clobber = last_clobber;
392 last_cmp->in_a = strip_not (XEXP (src, 0));
393 last_cmp->in_b = XEXP (src, 1);
394 last_cmp->not_in_a = is_not (XEXP (src, 0));
395 last_cmp->eh_note = eh_note;
396 last_cmp->orig_mode = GET_MODE (src);
397 if (last_cmp->in_b == const0_rtx
398 && last_setter[REGNO (last_cmp->in_a)])
400 rtx set = single_set (last_setter[REGNO (last_cmp->in_a)]);
401 if (set && rtx_equal_p (SET_DEST (set), last_cmp->in_a))
402 last_cmp->in_a_setter = last_setter[REGNO (last_cmp->in_a)];
404 all_compares.safe_push (last_cmp);
406 /* It's unusual, but be prepared for comparison patterns that
407 also clobber an input, or perhaps a scratch. */
408 last_clobber = NULL;
409 last_cmp_valid = true;
412 else
414 /* Notice if this instruction uses the flags register. */
415 if (last_cmp)
416 find_flags_uses_in_insn (last_cmp, insn);
418 /* Notice if this instruction kills the flags register. */
419 df_ref def;
420 FOR_EACH_INSN_DEF (def, insn)
421 if (DF_REF_REGNO (def) == targetm.flags_regnum)
423 /* See if this insn could be the "clobber" that eliminates
424 a future comparison. */
425 last_clobber = (arithmetic_flags_clobber_p (insn)
426 ? insn : NULL);
428 /* In either case, the previous compare is no longer valid. */
429 last_cmp = NULL;
430 last_cmp_valid = false;
431 break;
435 /* Notice if any of the inputs to the comparison have changed
436 and remember last insn that sets each register. */
437 df_ref def;
438 FOR_EACH_INSN_DEF (def, insn)
440 if (last_cmp_valid
441 && (DF_REF_REGNO (def) == REGNO (last_cmp->in_a)
442 || (REG_P (last_cmp->in_b)
443 && DF_REF_REGNO (def) == REGNO (last_cmp->in_b))))
444 last_cmp_valid = false;
445 last_setter[DF_REF_REGNO (def)] = insn;
449 /* Remember the live comparison for subsequent members of
450 the extended basic block. */
451 if (last_cmp)
453 bb->aux = last_cmp;
454 last_cmp->inputs_valid = last_cmp_valid;
456 /* Look to see if the flags register is live outgoing here, and
457 incoming to any successor not part of the extended basic block. */
458 if (bitmap_bit_p (df_get_live_out (bb), targetm.flags_regnum))
460 edge e;
461 edge_iterator ei;
463 FOR_EACH_EDGE (e, ei, bb->succs)
465 basic_block dest = e->dest;
466 if (bitmap_bit_p (df_get_live_in (bb), targetm.flags_regnum)
467 && !single_pred_p (dest))
469 last_cmp->missing_uses = true;
470 break;
476 /* If we deleted a compare with a REG_EH_REGION note, we may need to
477 remove EH edges. */
478 if (need_purge)
479 purge_dead_edges (bb);
481 return NULL;
484 /* Find all comparisons in the function. */
486 static void
487 find_comparisons (void)
489 calculate_dominance_info (CDI_DOMINATORS);
491 find_comparison_dom_walker (CDI_DOMINATORS)
492 .walk (cfun->cfg->x_entry_block_ptr);
494 clear_aux_for_blocks ();
495 free_dominance_info (CDI_DOMINATORS);
498 /* Select an alternate CC_MODE for a comparison insn comparing A and B.
499 Note that inputs are almost certainly different than the IN_A and IN_B
500 stored in CMP -- we're called while attempting to eliminate the compare
501 after all. Return the new FLAGS rtx if successful, else return NULL.
502 Note that this function may start a change group. */
504 static rtx
505 maybe_select_cc_mode (struct comparison *cmp, rtx a ATTRIBUTE_UNUSED,
506 rtx b ATTRIBUTE_UNUSED)
508 machine_mode sel_mode;
509 const int n = cmp->n_uses;
510 rtx flags = NULL;
512 #ifndef SELECT_CC_MODE
513 /* Minimize code differences when this target macro is undefined. */
514 return NULL;
515 #define SELECT_CC_MODE(A,B,C) (gcc_unreachable (), VOIDmode)
516 #endif
518 /* If we don't have access to all of the uses, we can't validate. */
519 if (cmp->missing_uses || n == 0)
520 return NULL;
522 /* Find a new mode that works for all of the uses. Special case the
523 common case of exactly one use. */
524 if (n == 1)
526 sel_mode = SELECT_CC_MODE (cmp->uses[0].code, a, b);
527 if (sel_mode != cmp->orig_mode)
529 flags = gen_rtx_REG (sel_mode, targetm.flags_regnum);
530 validate_change (cmp->uses[0].insn, cmp->uses[0].loc, flags, true);
533 else
535 int i;
537 sel_mode = SELECT_CC_MODE (cmp->uses[0].code, a, b);
538 for (i = 1; i < n; ++i)
540 machine_mode new_mode = SELECT_CC_MODE (cmp->uses[i].code, a, b);
541 if (new_mode != sel_mode)
543 sel_mode = targetm.cc_modes_compatible (sel_mode, new_mode);
544 if (sel_mode == VOIDmode)
545 return NULL;
549 if (sel_mode != cmp->orig_mode)
551 flags = gen_rtx_REG (sel_mode, targetm.flags_regnum);
552 for (i = 0; i < n; ++i)
553 validate_change (cmp->uses[i].insn, cmp->uses[i].loc, flags, true);
557 return flags;
560 /* Return a register RTX holding the same value at START as REG at END, or
561 NULL_RTX if there is none. */
563 static rtx
564 equivalent_reg_at_start (rtx reg, rtx_insn *end, rtx_insn *start)
566 machine_mode orig_mode = GET_MODE (reg);
567 rtx_insn *bb_head = BB_HEAD (BLOCK_FOR_INSN (end));
569 for (rtx_insn *insn = PREV_INSN (end);
570 insn != start;
571 insn = PREV_INSN (insn))
573 const int abnormal_flags
574 = (DF_REF_CONDITIONAL | DF_REF_PARTIAL | DF_REF_MAY_CLOBBER
575 | DF_REF_MUST_CLOBBER | DF_REF_SIGN_EXTRACT
576 | DF_REF_ZERO_EXTRACT | DF_REF_STRICT_LOW_PART
577 | DF_REF_PRE_POST_MODIFY);
578 df_ref def;
580 /* Note that the BB_HEAD is always either a note or a label, but in
581 any case it means that REG is defined outside the block. */
582 if (insn == bb_head)
583 return NULL_RTX;
584 if (NOTE_P (insn) || DEBUG_INSN_P (insn))
585 continue;
587 /* Find a possible def of REG in INSN. */
588 FOR_EACH_INSN_DEF (def, insn)
589 if (DF_REF_REGNO (def) == REGNO (reg))
590 break;
592 /* No definitions of REG; continue searching. */
593 if (def == NULL)
594 continue;
596 /* Bail if this is not a totally normal set of REG. */
597 if (DF_REF_IS_ARTIFICIAL (def))
598 return NULL_RTX;
599 if (DF_REF_FLAGS (def) & abnormal_flags)
600 return NULL_RTX;
602 /* We've found an insn between the compare and the clobber that sets
603 REG. Given that pass_cprop_hardreg has not yet run, we still find
604 situations in which we can usefully look through a copy insn. */
605 rtx x = single_set (insn);
606 if (x == NULL_RTX)
607 return NULL_RTX;
608 reg = SET_SRC (x);
609 if (!REG_P (reg))
610 return NULL_RTX;
613 if (GET_MODE (reg) != orig_mode)
614 return NULL_RTX;
616 return reg;
619 /* Return true if it is okay to merge the comparison CMP_INSN with
620 the instruction ARITH_INSN. Both instructions are assumed to be in the
621 same basic block with ARITH_INSN appearing before CMP_INSN. This checks
622 that there are no uses or defs of the condition flags or control flow
623 changes between the two instructions. */
625 static bool
626 can_merge_compare_into_arith (rtx_insn *cmp_insn, rtx_insn *arith_insn)
628 for (rtx_insn *insn = PREV_INSN (cmp_insn);
629 insn && insn != arith_insn;
630 insn = PREV_INSN (insn))
632 if (!NONDEBUG_INSN_P (insn))
633 continue;
634 /* Bail if there are jumps or calls in between. */
635 if (!NONJUMP_INSN_P (insn))
636 return false;
638 /* Bail on old-style asm statements because they lack
639 data flow information. */
640 if (GET_CODE (PATTERN (insn)) == ASM_INPUT)
641 return false;
643 df_ref ref;
644 /* Find a USE of the flags register. */
645 FOR_EACH_INSN_USE (ref, insn)
646 if (DF_REF_REGNO (ref) == targetm.flags_regnum)
647 return false;
649 /* Find a DEF of the flags register. */
650 FOR_EACH_INSN_DEF (ref, insn)
651 if (DF_REF_REGNO (ref) == targetm.flags_regnum)
652 return false;
654 return true;
657 /* Given two SET expressions, SET_A and SET_B determine whether they form
658 a recognizable pattern when emitted in parallel. Return that parallel
659 if so. Otherwise return NULL. */
661 static rtx
662 try_validate_parallel (rtx set_a, rtx set_b)
664 rtx par = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set_a, set_b));
665 rtx_insn *insn = make_insn_raw (par);
667 if (insn_invalid_p (insn, false))
669 crtl->emit.x_cur_insn_uid--;
670 return NULL_RTX;
673 SET_PREV_INSN (insn) = NULL_RTX;
674 SET_NEXT_INSN (insn) = NULL_RTX;
675 INSN_LOCATION (insn) = 0;
676 return insn;
679 /* For a comparison instruction described by CMP check if it compares a
680 register with zero i.e. it is of the form CC := CMP R1, 0.
681 If it is, find the instruction defining R1 (say I1) and try to create a
682 PARALLEL consisting of I1 and the comparison, representing a flag-setting
683 arithmetic instruction. Example:
684 I1: R1 := R2 + R3
685 <instructions that don't read the condition register>
686 I2: CC := CMP R1 0
687 I2 can be merged with I1 into:
688 I1: { CC := CMP (R2 + R3) 0 ; R1 := R2 + R3 }
689 This catches cases where R1 is used between I1 and I2 and therefore
690 combine and other RTL optimisations will not try to propagate it into
691 I2. Return true if we succeeded in merging CMP. */
693 static bool
694 try_merge_compare (struct comparison *cmp)
696 rtx_insn *cmp_insn = cmp->insn;
698 if (cmp->in_b != const0_rtx || cmp->in_a_setter == NULL)
699 return false;
700 rtx in_a = cmp->in_a;
701 df_ref use;
703 FOR_EACH_INSN_USE (use, cmp_insn)
704 if (DF_REF_REGNO (use) == REGNO (in_a))
705 break;
706 if (!use)
707 return false;
709 rtx_insn *def_insn = cmp->in_a_setter;
710 rtx set = single_set (def_insn);
711 if (!set)
712 return false;
714 if (!can_merge_compare_into_arith (cmp_insn, def_insn))
715 return false;
717 rtx src = SET_SRC (set);
719 /* If the source uses addressing modes with side effects, we can't
720 do the merge because we'd end up with a PARALLEL that has two
721 instances of that side effect in it. */
722 if (side_effects_p (src))
723 return false;
725 rtx flags = maybe_select_cc_mode (cmp, src, CONST0_RTX (GET_MODE (src)));
726 if (!flags)
728 /* We may already have a change group going through maybe_select_cc_mode.
729 Discard it properly. */
730 cancel_changes (0);
731 return false;
734 rtx flag_set
735 = gen_rtx_SET (flags, gen_rtx_COMPARE (GET_MODE (flags),
736 copy_rtx (src),
737 CONST0_RTX (GET_MODE (src))));
738 rtx arith_set = copy_rtx (PATTERN (def_insn));
739 rtx par = try_validate_parallel (flag_set, arith_set);
740 if (!par)
742 /* We may already have a change group going through maybe_select_cc_mode.
743 Discard it properly. */
744 cancel_changes (0);
745 return false;
747 if (!apply_change_group ())
748 return false;
749 emit_insn_after (par, def_insn);
750 delete_insn (def_insn);
751 delete_insn (cmp->insn);
752 return true;
755 /* Attempt to replace a comparison with a prior arithmetic insn that can
756 compute the same flags value as the comparison itself. Return true if
757 successful, having made all rtl modifications necessary. */
759 static bool
760 try_eliminate_compare (struct comparison *cmp)
762 rtx flags, in_a, in_b, cmp_a, cmp_b;
764 if (try_merge_compare (cmp))
765 return true;
767 /* We must have found an interesting "clobber" preceding the compare. */
768 if (cmp->prev_clobber == NULL)
769 return false;
771 /* Verify that IN_A is not clobbered in between CMP and PREV_CLOBBER.
772 Given that this target requires this pass, we can assume that most
773 insns do clobber the flags, and so the distance between the compare
774 and the clobber is likely to be small. */
775 /* ??? This is one point at which one could argue that DF_REF_CHAIN would
776 be useful, but it is thought to be too heavy-weight a solution here. */
777 in_a = equivalent_reg_at_start (cmp->in_a, cmp->insn, cmp->prev_clobber);
778 if (!in_a)
779 return false;
781 /* Likewise for IN_B if need be. */
782 if (CONSTANT_P (cmp->in_b))
783 in_b = cmp->in_b;
784 else if (REG_P (cmp->in_b))
786 in_b = equivalent_reg_at_start (cmp->in_b, cmp->insn, cmp->prev_clobber);
787 if (!in_b)
788 return false;
790 else if (GET_CODE (cmp->in_b) == UNSPEC)
792 const int len = XVECLEN (cmp->in_b, 0);
793 rtvec v = rtvec_alloc (len);
794 for (int i = 0; i < len; i++)
796 rtx r = equivalent_reg_at_start (XVECEXP (cmp->in_b, 0, i),
797 cmp->insn, cmp->prev_clobber);
798 if (!r)
799 return false;
800 RTVEC_ELT (v, i) = r;
802 in_b = gen_rtx_UNSPEC (GET_MODE (cmp->in_b), v, XINT (cmp->in_b, 1));
804 else
805 gcc_unreachable ();
807 /* We've reached PREV_CLOBBER without finding a modification of IN_A.
808 Validate that PREV_CLOBBER itself does in fact refer to IN_A. Do
809 recall that we've already validated the shape of PREV_CLOBBER. */
810 rtx_insn *insn = cmp->prev_clobber;
812 rtx x = XVECEXP (PATTERN (insn), 0, 0);
813 if (rtx_equal_p (SET_DEST (x), in_a))
814 cmp_a = SET_SRC (x);
816 /* Also check operations with implicit extensions, e.g.:
817 [(set (reg:DI)
818 (zero_extend:DI (plus:SI (reg:SI) (reg:SI))))
819 (set (reg:CCZ flags)
820 (compare:CCZ (plus:SI (reg:SI) (reg:SI))
821 (const_int 0)))] */
822 else if (REG_P (SET_DEST (x))
823 && REG_P (in_a)
824 && REGNO (SET_DEST (x)) == REGNO (in_a)
825 && (GET_CODE (SET_SRC (x)) == ZERO_EXTEND
826 || GET_CODE (SET_SRC (x)) == SIGN_EXTEND)
827 && GET_MODE (XEXP (SET_SRC (x), 0)) == GET_MODE (in_a))
828 cmp_a = XEXP (SET_SRC (x), 0);
830 /* Also check fully redundant comparisons, e.g.:
831 [(set (reg:SI)
832 (minus:SI (reg:SI) (reg:SI))))
833 (set (reg:CC flags)
834 (compare:CC (reg:SI) (reg:SI)))] */
835 else if (REG_P (in_b)
836 && GET_CODE (SET_SRC (x)) == MINUS
837 && rtx_equal_p (XEXP (SET_SRC (x), 0), in_a)
838 && rtx_equal_p (XEXP (SET_SRC (x), 1), in_b))
839 cmp_a = in_a;
841 else
842 return false;
844 /* If the source uses addressing modes with side effects, we can't
845 do the merge because we'd end up with a PARALLEL that has two
846 instances of that side effect in it. */
847 if (side_effects_p (cmp_a))
848 return false;
850 if (in_a == in_b)
851 cmp_b = cmp_a;
852 else if (rtx_equal_p (SET_DEST (x), in_b))
853 cmp_b = SET_SRC (x);
854 else
855 cmp_b = in_b;
856 if (side_effects_p (cmp_b))
857 return false;
859 /* Determine if we ought to use a different CC_MODE here. */
860 flags = maybe_select_cc_mode (cmp, cmp_a, cmp_b);
861 if (flags == NULL)
862 flags = gen_rtx_REG (cmp->orig_mode, targetm.flags_regnum);
864 /* Generate a new comparison for installation in the setter. */
865 rtx y = cmp->not_in_a
866 ? gen_rtx_NOT (GET_MODE (cmp_a), copy_rtx (cmp_a))
867 : copy_rtx (cmp_a);
868 y = gen_rtx_COMPARE (GET_MODE (flags), y, copy_rtx (cmp_b));
869 y = gen_rtx_SET (flags, y);
871 /* Canonicalize instruction to:
872 [(set (reg:CCM) (compare:CCM (operation) (immediate)))
873 (set (reg) (operation)] */
875 rtvec v = rtvec_alloc (2);
876 RTVEC_ELT (v, 0) = y;
877 RTVEC_ELT (v, 1) = x;
879 rtx pat = gen_rtx_PARALLEL (VOIDmode, v);
881 /* Succeed if the new instruction is valid. Note that we may have started
882 a change group within maybe_select_cc_mode, therefore we must continue. */
883 validate_change (insn, &PATTERN (insn), pat, true);
885 if (!apply_change_group ())
886 return false;
888 /* Success. Delete the compare insn... */
889 delete_insn (cmp->insn);
891 /* ... and any notes that are now invalid due to multiple sets. */
892 x = find_regno_note (insn, REG_UNUSED, targetm.flags_regnum);
893 if (x)
894 remove_note (insn, x);
895 x = find_reg_note (insn, REG_EQUAL, NULL);
896 if (x)
897 remove_note (insn, x);
898 x = find_reg_note (insn, REG_EQUIV, NULL);
899 if (x)
900 remove_note (insn, x);
902 return true;
905 /* Main entry point to the pass. */
907 static unsigned int
908 execute_compare_elim_after_reload (void)
910 df_set_flags (DF_LR_RUN_DCE);
911 df_analyze ();
913 gcc_checking_assert (!all_compares.exists ());
915 /* Locate all comparisons and their uses, and eliminate duplicates. */
916 find_comparisons ();
917 if (all_compares.exists ())
919 struct comparison *cmp;
920 size_t i;
922 /* Eliminate comparisons that are redundant with flags computation. */
923 FOR_EACH_VEC_ELT (all_compares, i, cmp)
925 try_eliminate_compare (cmp);
926 XDELETE (cmp);
929 all_compares.release ();
932 return 0;
935 namespace {
937 const pass_data pass_data_compare_elim_after_reload =
939 RTL_PASS, /* type */
940 "cmpelim", /* name */
941 OPTGROUP_NONE, /* optinfo_flags */
942 TV_NONE, /* tv_id */
943 0, /* properties_required */
944 0, /* properties_provided */
945 0, /* properties_destroyed */
946 0, /* todo_flags_start */
947 ( TODO_df_finish | TODO_df_verify ), /* todo_flags_finish */
950 class pass_compare_elim_after_reload : public rtl_opt_pass
952 public:
953 pass_compare_elim_after_reload (gcc::context *ctxt)
954 : rtl_opt_pass (pass_data_compare_elim_after_reload, ctxt)
957 /* opt_pass methods: */
958 bool gate (function *) final override
960 /* Setting this target hook value is how a backend indicates the need. */
961 if (targetm.flags_regnum == INVALID_REGNUM)
962 return false;
963 return flag_compare_elim_after_reload;
966 unsigned int execute (function *) final override
968 return execute_compare_elim_after_reload ();
971 }; // class pass_compare_elim_after_reload
973 } // anon namespace
975 rtl_opt_pass *
976 make_pass_compare_elim_after_reload (gcc::context *ctxt)
978 return new pass_compare_elim_after_reload (ctxt);