* config/visium/visium.c (visium_select_cc_mode): Return CCmode
[official-gcc.git] / gcc / compare-elim.c
blob086fbc76f011c2896af2318eb10bbd9b45797b55
1 /* Post-reload compare elimination.
2 Copyright (C) 2010-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* There is a set of targets whose general-purpose move or addition
21 instructions clobber the flags. These targets cannot split their
22 CBRANCH/CSTORE etc patterns before reload is complete, lest reload
23 itself insert these instructions in between the flags setter and user.
24 Because these targets cannot split the compare from the use, they
25 cannot make use of the comparison elimination offered by the combine pass.
27 This is a small pass intended to provide comparison elimination similar to
28 what is available via NOTICE_UPDATE_CC for cc0 targets. This should help
29 encourage cc0 targets to convert to an explicit post-reload representation
30 of the flags.
32 This pass assumes:
34 (0) CBRANCH/CSTORE etc have been split in pass_split_after_reload.
36 (1) All comparison patterns are represented as
38 [(set (reg:CC) (compare:CC (reg) (reg_or_immediate)))]
40 (2) All insn patterns that modify the flags are represented as
42 [(set (reg) (operation)
43 (clobber (reg:CC))]
45 (3) If an insn of form (2) can usefully set the flags, there is
46 another pattern of the form
48 [(set (reg:CCM) (compare:CCM (operation) (immediate)))
49 (set (reg) (operation)]
51 The mode CCM will be chosen as if by SELECT_CC_MODE.
53 Note that unlike NOTICE_UPDATE_CC, we do not handle memory operands.
54 This could be handled as a future enhancement.
57 #include "config.h"
58 #include "system.h"
59 #include "coretypes.h"
60 #include "backend.h"
61 #include "target.h"
62 #include "rtl.h"
63 #include "df.h"
64 #include "memmodel.h"
65 #include "tm_p.h"
66 #include "insn-config.h"
67 #include "recog.h"
68 #include "emit-rtl.h"
69 #include "cfgrtl.h"
70 #include "tree-pass.h"
71 #include "domwalk.h"
74 /* These structures describe a comparison and how it is used. */
76 /* The choice of maximum 3 uses comes from wanting to eliminate the two
77 duplicate compares from a three-way branch on the sign of a value.
78 This is also sufficient to eliminate the duplicate compare against the
79 high-part of a double-word comparison. */
80 #define MAX_CMP_USE 3
82 struct comparison_use
84 /* The instruction in which the result of the compare is used. */
85 rtx_insn *insn;
86 /* The location of the flags register within the use. */
87 rtx *loc;
88 /* The comparison code applied against the flags register. */
89 enum rtx_code code;
92 struct comparison
94 /* The comparison instruction. */
95 rtx_insn *insn;
97 /* The insn prior to the comparison insn that clobbers the flags. */
98 rtx_insn *prev_clobber;
100 /* The two values being compared. These will be either REGs or
101 constants. */
102 rtx in_a, in_b;
104 /* The REG_EH_REGION of the comparison. */
105 rtx eh_note;
107 /* Information about how this comparison is used. */
108 struct comparison_use uses[MAX_CMP_USE];
110 /* The original CC_MODE for this comparison. */
111 machine_mode orig_mode;
113 /* The number of uses identified for this comparison. */
114 unsigned short n_uses;
116 /* True if not all uses of this comparison have been identified.
117 This can happen either for overflowing the array above, or if
118 the flags register is used in some unusual context. */
119 bool missing_uses;
121 /* True if its inputs are still valid at the end of the block. */
122 bool inputs_valid;
125 static vec<comparison *> all_compares;
127 /* Look for a "conforming" comparison, as defined above. If valid, return
128 the rtx for the COMPARE itself. */
130 static rtx
131 conforming_compare (rtx_insn *insn)
133 rtx set, src, dest;
135 set = single_set (insn);
136 if (set == NULL)
137 return NULL;
139 src = SET_SRC (set);
140 if (GET_CODE (src) != COMPARE)
141 return NULL;
143 dest = SET_DEST (set);
144 if (!REG_P (dest) || REGNO (dest) != targetm.flags_regnum)
145 return NULL;
147 if (!REG_P (XEXP (src, 0)))
148 return NULL;
150 if (CONSTANT_P (XEXP (src, 1)) || REG_P (XEXP (src, 1)))
151 return src;
153 if (GET_CODE (XEXP (src, 1)) == UNSPEC)
155 for (int i = 0; i < XVECLEN (XEXP (src, 1), 0); i++)
156 if (!REG_P (XVECEXP (XEXP (src, 1), 0, i)))
157 return NULL;
158 return src;
161 return NULL;
164 /* Look for a pattern of the "correct" form for an insn with a flags clobber
165 for which we may be able to eliminate a compare later. We're not looking
166 to validate any inputs at this time, merely see that the basic shape is
167 correct. The term "arithmetic" may be somewhat misleading... */
169 static bool
170 arithmetic_flags_clobber_p (rtx_insn *insn)
172 rtx pat, x;
174 if (!NONJUMP_INSN_P (insn))
175 return false;
176 pat = PATTERN (insn);
177 if (asm_noperands (pat) >= 0)
178 return false;
180 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) == 2)
182 x = XVECEXP (pat, 0, 0);
183 if (GET_CODE (x) != SET)
184 return false;
185 x = SET_DEST (x);
186 if (!REG_P (x))
187 return false;
189 x = XVECEXP (pat, 0, 1);
190 if (GET_CODE (x) == CLOBBER)
192 x = XEXP (x, 0);
193 if (REG_P (x) && REGNO (x) == targetm.flags_regnum)
194 return true;
198 return false;
201 /* Look for uses of FLAGS in INSN. If we find one we can analyze, record
202 it in CMP; otherwise indicate that we've missed a use. */
204 static void
205 find_flags_uses_in_insn (struct comparison *cmp, rtx_insn *insn)
207 df_ref use;
209 /* If we've already lost track of uses, don't bother collecting more. */
210 if (cmp->missing_uses)
211 return;
213 /* Find a USE of the flags register. */
214 FOR_EACH_INSN_USE (use, insn)
215 if (DF_REF_REGNO (use) == targetm.flags_regnum)
217 rtx x, *loc;
219 /* If this is an unusual use, quit. */
220 if (DF_REF_TYPE (use) != DF_REF_REG_USE)
221 goto fail;
223 /* If we've run out of slots to record uses, quit. */
224 if (cmp->n_uses == MAX_CMP_USE)
225 goto fail;
227 /* Unfortunately the location of the flags register, while present
228 in the reference structure, doesn't help. We need to find the
229 comparison code that is outer to the actual flags use. */
230 loc = DF_REF_LOC (use);
231 x = PATTERN (insn);
232 if (GET_CODE (x) == PARALLEL)
233 x = XVECEXP (x, 0, 0);
234 x = SET_SRC (x);
235 if (GET_CODE (x) == IF_THEN_ELSE)
236 x = XEXP (x, 0);
237 if (COMPARISON_P (x)
238 && loc == &XEXP (x, 0)
239 && XEXP (x, 1) == const0_rtx)
241 /* We've found a use of the flags that we understand. */
242 struct comparison_use *cuse = &cmp->uses[cmp->n_uses++];
243 cuse->insn = insn;
244 cuse->loc = loc;
245 cuse->code = GET_CODE (x);
247 else
248 goto fail;
250 return;
252 fail:
253 /* We failed to recognize this use of the flags register. */
254 cmp->missing_uses = true;
257 class find_comparison_dom_walker : public dom_walker
259 public:
260 find_comparison_dom_walker (cdi_direction direction)
261 : dom_walker (direction) {}
263 virtual edge before_dom_children (basic_block);
266 /* Return true if conforming COMPARE with EH_NOTE is redundant with comparison
267 CMP and can thus be eliminated. */
269 static bool
270 can_eliminate_compare (rtx compare, rtx eh_note, struct comparison *cmp)
272 /* Take care that it's in the same EH region. */
273 if (cfun->can_throw_non_call_exceptions
274 && !rtx_equal_p (eh_note, cmp->eh_note))
275 return false;
277 /* Make sure the compare is redundant with the previous. */
278 if (!rtx_equal_p (XEXP (compare, 0), cmp->in_a)
279 || !rtx_equal_p (XEXP (compare, 1), cmp->in_b))
280 return false;
282 /* New mode must be compatible with the previous compare mode. */
283 machine_mode new_mode
284 = targetm.cc_modes_compatible (GET_MODE (compare), cmp->orig_mode);
286 if (new_mode == VOIDmode)
287 return false;
289 if (cmp->orig_mode != new_mode)
291 /* Generate new comparison for substitution. */
292 rtx flags = gen_rtx_REG (new_mode, targetm.flags_regnum);
293 rtx x = gen_rtx_COMPARE (new_mode, cmp->in_a, cmp->in_b);
294 x = gen_rtx_SET (flags, x);
296 if (!validate_change (cmp->insn, &PATTERN (cmp->insn), x, false))
297 return false;
299 cmp->orig_mode = new_mode;
302 return true;
305 /* Identify comparison instructions within BB. If the flags from the last
306 compare in the BB is live at the end of the block, install the compare
307 in BB->AUX. Called via dom_walker.walk (). */
309 edge
310 find_comparison_dom_walker::before_dom_children (basic_block bb)
312 struct comparison *last_cmp;
313 rtx_insn *insn, *next, *last_clobber;
314 bool last_cmp_valid;
315 bool need_purge = false;
316 bitmap killed;
318 killed = BITMAP_ALLOC (NULL);
320 /* The last comparison that was made. Will be reset to NULL
321 once the flags are clobbered. */
322 last_cmp = NULL;
324 /* True iff the last comparison has not been clobbered, nor
325 have its inputs. Used to eliminate duplicate compares. */
326 last_cmp_valid = false;
328 /* The last insn that clobbered the flags, if that insn is of
329 a form that may be valid for eliminating a following compare.
330 To be reset to NULL once the flags are set otherwise. */
331 last_clobber = NULL;
333 /* Propagate the last live comparison throughout the extended basic block. */
334 if (single_pred_p (bb))
336 last_cmp = (struct comparison *) single_pred (bb)->aux;
337 if (last_cmp)
338 last_cmp_valid = last_cmp->inputs_valid;
341 for (insn = BB_HEAD (bb); insn; insn = next)
343 rtx src;
345 next = (insn == BB_END (bb) ? NULL : NEXT_INSN (insn));
346 if (!NONDEBUG_INSN_P (insn))
347 continue;
349 /* Compute the set of registers modified by this instruction. */
350 bitmap_clear (killed);
351 df_simulate_find_defs (insn, killed);
353 src = conforming_compare (insn);
354 if (src)
356 rtx eh_note = NULL;
358 if (cfun->can_throw_non_call_exceptions)
359 eh_note = find_reg_note (insn, REG_EH_REGION, NULL);
361 if (last_cmp_valid && can_eliminate_compare (src, eh_note, last_cmp))
363 if (eh_note)
364 need_purge = true;
365 delete_insn (insn);
366 continue;
369 last_cmp = XCNEW (struct comparison);
370 last_cmp->insn = insn;
371 last_cmp->prev_clobber = last_clobber;
372 last_cmp->in_a = XEXP (src, 0);
373 last_cmp->in_b = XEXP (src, 1);
374 last_cmp->eh_note = eh_note;
375 last_cmp->orig_mode = GET_MODE (src);
376 all_compares.safe_push (last_cmp);
378 /* It's unusual, but be prepared for comparison patterns that
379 also clobber an input, or perhaps a scratch. */
380 last_clobber = NULL;
381 last_cmp_valid = true;
384 else
386 /* Notice if this instruction uses the flags register. */
387 if (last_cmp)
388 find_flags_uses_in_insn (last_cmp, insn);
390 /* Notice if this instruction kills the flags register. */
391 if (bitmap_bit_p (killed, targetm.flags_regnum))
393 /* See if this insn could be the "clobber" that eliminates
394 a future comparison. */
395 last_clobber = (arithmetic_flags_clobber_p (insn) ? insn : NULL);
397 /* In either case, the previous compare is no longer valid. */
398 last_cmp = NULL;
399 last_cmp_valid = false;
403 /* Notice if any of the inputs to the comparison have changed. */
404 if (last_cmp_valid
405 && (bitmap_bit_p (killed, REGNO (last_cmp->in_a))
406 || (REG_P (last_cmp->in_b)
407 && bitmap_bit_p (killed, REGNO (last_cmp->in_b)))))
408 last_cmp_valid = false;
411 BITMAP_FREE (killed);
413 /* Remember the live comparison for subsequent members of
414 the extended basic block. */
415 if (last_cmp)
417 bb->aux = last_cmp;
418 last_cmp->inputs_valid = last_cmp_valid;
420 /* Look to see if the flags register is live outgoing here, and
421 incoming to any successor not part of the extended basic block. */
422 if (bitmap_bit_p (df_get_live_out (bb), targetm.flags_regnum))
424 edge e;
425 edge_iterator ei;
427 FOR_EACH_EDGE (e, ei, bb->succs)
429 basic_block dest = e->dest;
430 if (bitmap_bit_p (df_get_live_in (bb), targetm.flags_regnum)
431 && !single_pred_p (dest))
433 last_cmp->missing_uses = true;
434 break;
440 /* If we deleted a compare with a REG_EH_REGION note, we may need to
441 remove EH edges. */
442 if (need_purge)
443 purge_dead_edges (bb);
445 return NULL;
448 /* Find all comparisons in the function. */
450 static void
451 find_comparisons (void)
453 calculate_dominance_info (CDI_DOMINATORS);
455 find_comparison_dom_walker (CDI_DOMINATORS)
456 .walk (cfun->cfg->x_entry_block_ptr);
458 clear_aux_for_blocks ();
459 free_dominance_info (CDI_DOMINATORS);
462 /* Select an alternate CC_MODE for a comparison insn comparing A and B.
463 Note that inputs are almost certainly different than the IN_A and IN_B
464 stored in CMP -- we're called while attempting to eliminate the compare
465 after all. Return the new FLAGS rtx if successful, else return NULL.
466 Note that this function may start a change group. */
468 static rtx
469 maybe_select_cc_mode (struct comparison *cmp, rtx a ATTRIBUTE_UNUSED,
470 rtx b ATTRIBUTE_UNUSED)
472 machine_mode sel_mode;
473 const int n = cmp->n_uses;
474 rtx flags = NULL;
476 #ifndef SELECT_CC_MODE
477 /* Minimize code differences when this target macro is undefined. */
478 return NULL;
479 #define SELECT_CC_MODE(A,B,C) (gcc_unreachable (), VOIDmode)
480 #endif
482 /* If we don't have access to all of the uses, we can't validate. */
483 if (cmp->missing_uses || n == 0)
484 return NULL;
486 /* Find a new mode that works for all of the uses. Special case the
487 common case of exactly one use. */
488 if (n == 1)
490 sel_mode = SELECT_CC_MODE (cmp->uses[0].code, a, b);
491 if (sel_mode != cmp->orig_mode)
493 flags = gen_rtx_REG (sel_mode, targetm.flags_regnum);
494 validate_change (cmp->uses[0].insn, cmp->uses[0].loc, flags, true);
497 else
499 int i;
501 sel_mode = SELECT_CC_MODE (cmp->uses[0].code, a, b);
502 for (i = 1; i < n; ++i)
504 machine_mode new_mode = SELECT_CC_MODE (cmp->uses[i].code, a, b);
505 if (new_mode != sel_mode)
507 sel_mode = targetm.cc_modes_compatible (sel_mode, new_mode);
508 if (sel_mode == VOIDmode)
509 return NULL;
513 if (sel_mode != cmp->orig_mode)
515 flags = gen_rtx_REG (sel_mode, targetm.flags_regnum);
516 for (i = 0; i < n; ++i)
517 validate_change (cmp->uses[i].insn, cmp->uses[i].loc, flags, true);
521 return flags;
524 /* Return a register RTX holding the same value at START as REG at END, or
525 NULL_RTX if there is none. */
527 static rtx
528 equivalent_reg_at_start (rtx reg, rtx_insn *end, rtx_insn *start)
530 machine_mode orig_mode = GET_MODE (reg);
531 rtx_insn *bb_head = BB_HEAD (BLOCK_FOR_INSN (end));
533 for (rtx_insn *insn = PREV_INSN (end);
534 insn != start;
535 insn = PREV_INSN (insn))
537 const int abnormal_flags
538 = (DF_REF_CONDITIONAL | DF_REF_PARTIAL | DF_REF_MAY_CLOBBER
539 | DF_REF_MUST_CLOBBER | DF_REF_SIGN_EXTRACT
540 | DF_REF_ZERO_EXTRACT | DF_REF_STRICT_LOW_PART
541 | DF_REF_PRE_POST_MODIFY);
542 df_ref def;
544 /* Note that the BB_HEAD is always either a note or a label, but in
545 any case it means that REG is defined outside the block. */
546 if (insn == bb_head)
547 return NULL_RTX;
548 if (NOTE_P (insn) || DEBUG_INSN_P (insn))
549 continue;
551 /* Find a possible def of REG in INSN. */
552 FOR_EACH_INSN_DEF (def, insn)
553 if (DF_REF_REGNO (def) == REGNO (reg))
554 break;
556 /* No definitions of REG; continue searching. */
557 if (def == NULL)
558 continue;
560 /* Bail if this is not a totally normal set of REG. */
561 if (DF_REF_IS_ARTIFICIAL (def))
562 return NULL_RTX;
563 if (DF_REF_FLAGS (def) & abnormal_flags)
564 return NULL_RTX;
566 /* We've found an insn between the compare and the clobber that sets
567 REG. Given that pass_cprop_hardreg has not yet run, we still find
568 situations in which we can usefully look through a copy insn. */
569 rtx x = single_set (insn);
570 if (x == NULL_RTX)
571 return NULL_RTX;
572 reg = SET_SRC (x);
573 if (!REG_P (reg))
574 return NULL_RTX;
577 if (GET_MODE (reg) != orig_mode)
578 return NULL_RTX;
580 return reg;
583 /* Return true if it is okay to merge the comparison CMP_INSN with
584 the instruction ARITH_INSN. Both instructions are assumed to be in the
585 same basic block with ARITH_INSN appearing before CMP_INSN. This checks
586 that there are no uses or defs of the condition flags or control flow
587 changes between the two instructions. */
589 static bool
590 can_merge_compare_into_arith (rtx_insn *cmp_insn, rtx_insn *arith_insn)
592 for (rtx_insn *insn = PREV_INSN (cmp_insn);
593 insn && insn != arith_insn;
594 insn = PREV_INSN (insn))
596 if (!NONDEBUG_INSN_P (insn))
597 continue;
598 /* Bail if there are jumps or calls in between. */
599 if (!NONJUMP_INSN_P (insn))
600 return false;
602 /* Bail on old-style asm statements because they lack
603 data flow information. */
604 if (GET_CODE (PATTERN (insn)) == ASM_INPUT)
605 return false;
607 df_ref ref;
608 /* Find a USE of the flags register. */
609 FOR_EACH_INSN_USE (ref, insn)
610 if (DF_REF_REGNO (ref) == targetm.flags_regnum)
611 return false;
613 /* Find a DEF of the flags register. */
614 FOR_EACH_INSN_DEF (ref, insn)
615 if (DF_REF_REGNO (ref) == targetm.flags_regnum)
616 return false;
618 return true;
621 /* Given two SET expressions, SET_A and SET_B determine whether they form
622 a recognizable pattern when emitted in parallel. Return that parallel
623 if so. Otherwise return NULL. */
625 static rtx
626 try_validate_parallel (rtx set_a, rtx set_b)
628 rtx par
629 = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set_a, set_b));
631 rtx_insn *insn;
632 insn = gen_rtx_INSN (VOIDmode, 0, 0, 0, par, 0, -1, 0);
634 return recog_memoized (insn) > 0 ? par : NULL_RTX;
637 /* For a comparison instruction described by CMP check if it compares a
638 register with zero i.e. it is of the form CC := CMP R1, 0.
639 If it is, find the instruction defining R1 (say I1) and try to create a
640 PARALLEL consisting of I1 and the comparison, representing a flag-setting
641 arithmetic instruction. Example:
642 I1: R1 := R2 + R3
643 <instructions that don't read the condition register>
644 I2: CC := CMP R1 0
645 I2 can be merged with I1 into:
646 I1: { R1 := R2 + R3 ; CC := CMP (R2 + R3) 0 }
647 This catches cases where R1 is used between I1 and I2 and therefore
648 combine and other RTL optimisations will not try to propagate it into
649 I2. Return true if we succeeded in merging CMP. */
651 static bool
652 try_merge_compare (struct comparison *cmp)
654 rtx_insn *cmp_insn = cmp->insn;
656 if (!REG_P (cmp->in_a) || cmp->in_b != const0_rtx)
657 return false;
658 rtx in_a = cmp->in_a;
659 df_ref use;
661 FOR_EACH_INSN_USE (use, cmp_insn)
662 if (DF_REF_REGNO (use) == REGNO (in_a))
663 break;
664 if (!use)
665 return false;
667 /* Validate the data flow information before attempting to
668 find the instruction that defines in_a. */
670 struct df_link *ref_chain;
671 ref_chain = DF_REF_CHAIN (use);
672 if (!ref_chain || !ref_chain->ref
673 || !DF_REF_INSN_INFO (ref_chain->ref) || ref_chain->next != NULL)
674 return false;
676 rtx_insn *def_insn = DF_REF_INSN (ref_chain->ref);
677 /* We found the insn that defines in_a. Only consider the cases where
678 it is in the same block as the comparison. */
679 if (BLOCK_FOR_INSN (cmp_insn) != BLOCK_FOR_INSN (def_insn))
680 return false;
682 rtx set = single_set (def_insn);
683 if (!set)
684 return false;
686 if (!can_merge_compare_into_arith (cmp_insn, def_insn))
687 return false;
689 rtx src = SET_SRC (set);
690 rtx flags = maybe_select_cc_mode (cmp, src, CONST0_RTX (GET_MODE (src)));
691 if (!flags)
693 /* We may already have a change group going through maybe_select_cc_mode.
694 Discard it properly. */
695 cancel_changes (0);
696 return false;
699 rtx flag_set
700 = gen_rtx_SET (flags, gen_rtx_COMPARE (GET_MODE (flags),
701 copy_rtx (src),
702 CONST0_RTX (GET_MODE (src))));
703 rtx arith_set = copy_rtx (PATTERN (def_insn));
704 rtx par = try_validate_parallel (flag_set, arith_set);
705 if (!par)
707 /* We may already have a change group going through maybe_select_cc_mode.
708 Discard it properly. */
709 cancel_changes (0);
710 return false;
712 if (!apply_change_group ())
713 return false;
714 emit_insn_after (par, def_insn);
715 delete_insn (def_insn);
716 delete_insn (cmp->insn);
717 return true;
720 /* Attempt to replace a comparison with a prior arithmetic insn that can
721 compute the same flags value as the comparison itself. Return true if
722 successful, having made all rtl modifications necessary. */
724 static bool
725 try_eliminate_compare (struct comparison *cmp)
727 rtx flags, in_a, in_b, cmp_src;
729 if (try_merge_compare (cmp))
730 return true;
732 /* We must have found an interesting "clobber" preceding the compare. */
733 if (cmp->prev_clobber == NULL)
734 return false;
736 /* Verify that IN_A is not clobbered in between CMP and PREV_CLOBBER.
737 Given that this target requires this pass, we can assume that most
738 insns do clobber the flags, and so the distance between the compare
739 and the clobber is likely to be small. */
740 /* ??? This is one point at which one could argue that DF_REF_CHAIN would
741 be useful, but it is thought to be too heavy-weight a solution here. */
742 in_a = equivalent_reg_at_start (cmp->in_a, cmp->insn, cmp->prev_clobber);
743 if (!in_a)
744 return false;
746 /* Likewise for IN_B if need be. */
747 if (CONSTANT_P (cmp->in_b))
748 in_b = cmp->in_b;
749 else if (REG_P (cmp->in_b))
751 in_b = equivalent_reg_at_start (cmp->in_b, cmp->insn, cmp->prev_clobber);
752 if (!in_b)
753 return false;
755 else if (GET_CODE (cmp->in_b) == UNSPEC)
757 const int len = XVECLEN (cmp->in_b, 0);
758 rtvec v = rtvec_alloc (len);
759 for (int i = 0; i < len; i++)
761 rtx r = equivalent_reg_at_start (XVECEXP (cmp->in_b, 0, i),
762 cmp->insn, cmp->prev_clobber);
763 if (!r)
764 return false;
765 RTVEC_ELT (v, i) = r;
767 in_b = gen_rtx_UNSPEC (GET_MODE (cmp->in_b), v, XINT (cmp->in_b, 1));
769 else
770 gcc_unreachable ();
772 /* We've reached PREV_CLOBBER without finding a modification of IN_A.
773 Validate that PREV_CLOBBER itself does in fact refer to IN_A. Do
774 recall that we've already validated the shape of PREV_CLOBBER. */
775 rtx_insn *insn = cmp->prev_clobber;
777 rtx x = XVECEXP (PATTERN (insn), 0, 0);
778 if (rtx_equal_p (SET_DEST (x), in_a))
779 cmp_src = SET_SRC (x);
781 /* Also check operations with implicit extensions, e.g.:
782 [(set (reg:DI)
783 (zero_extend:DI (plus:SI (reg:SI) (reg:SI))))
784 (set (reg:CCZ flags)
785 (compare:CCZ (plus:SI (reg:SI) (reg:SI))
786 (const_int 0)))] */
787 else if (REG_P (SET_DEST (x))
788 && REG_P (in_a)
789 && REGNO (SET_DEST (x)) == REGNO (in_a)
790 && (GET_CODE (SET_SRC (x)) == ZERO_EXTEND
791 || GET_CODE (SET_SRC (x)) == SIGN_EXTEND)
792 && GET_MODE (XEXP (SET_SRC (x), 0)) == GET_MODE (in_a))
793 cmp_src = XEXP (SET_SRC (x), 0);
795 /* Also check fully redundant comparisons, e.g.:
796 [(set (reg:SI)
797 (minus:SI (reg:SI) (reg:SI))))
798 (set (reg:CC flags)
799 (compare:CC (reg:SI) (reg:SI)))] */
800 else if (REG_P (in_b)
801 && GET_CODE (SET_SRC (x)) == MINUS
802 && rtx_equal_p (XEXP (SET_SRC (x), 0), in_a)
803 && rtx_equal_p (XEXP (SET_SRC (x), 1), in_b))
804 cmp_src = in_a;
806 else
807 return false;
809 /* Determine if we ought to use a different CC_MODE here. */
810 flags = maybe_select_cc_mode (cmp, cmp_src, in_b);
811 if (flags == NULL)
812 flags = gen_rtx_REG (cmp->orig_mode, targetm.flags_regnum);
814 /* Generate a new comparison for installation in the setter. */
815 rtx y = copy_rtx (cmp_src);
816 y = gen_rtx_COMPARE (GET_MODE (flags), y, in_b);
817 y = gen_rtx_SET (flags, y);
819 /* Canonicalize instruction to:
820 [(set (reg:CCM) (compare:CCM (operation) (immediate)))
821 (set (reg) (operation)] */
823 rtvec v = rtvec_alloc (2);
824 RTVEC_ELT (v, 0) = y;
825 RTVEC_ELT (v, 1) = x;
827 rtx pat = gen_rtx_PARALLEL (VOIDmode, v);
829 /* Succeed if the new instruction is valid. Note that we may have started
830 a change group within maybe_select_cc_mode, therefore we must continue. */
831 validate_change (insn, &PATTERN (insn), pat, true);
833 if (!apply_change_group ())
834 return false;
836 /* Success. Delete the compare insn... */
837 delete_insn (cmp->insn);
839 /* ... and any notes that are now invalid due to multiple sets. */
840 x = find_regno_note (insn, REG_UNUSED, targetm.flags_regnum);
841 if (x)
842 remove_note (insn, x);
843 x = find_reg_note (insn, REG_EQUAL, NULL);
844 if (x)
845 remove_note (insn, x);
846 x = find_reg_note (insn, REG_EQUIV, NULL);
847 if (x)
848 remove_note (insn, x);
850 return true;
853 /* Main entry point to the pass. */
855 static unsigned int
856 execute_compare_elim_after_reload (void)
858 df_chain_add_problem (DF_UD_CHAIN + DF_DU_CHAIN);
859 df_analyze ();
861 gcc_checking_assert (!all_compares.exists ());
863 /* Locate all comparisons and their uses, and eliminate duplicates. */
864 find_comparisons ();
865 if (all_compares.exists ())
867 struct comparison *cmp;
868 size_t i;
870 /* Eliminate comparisons that are redundant with flags computation. */
871 FOR_EACH_VEC_ELT (all_compares, i, cmp)
873 try_eliminate_compare (cmp);
874 XDELETE (cmp);
877 all_compares.release ();
880 return 0;
883 namespace {
885 const pass_data pass_data_compare_elim_after_reload =
887 RTL_PASS, /* type */
888 "cmpelim", /* name */
889 OPTGROUP_NONE, /* optinfo_flags */
890 TV_NONE, /* tv_id */
891 0, /* properties_required */
892 0, /* properties_provided */
893 0, /* properties_destroyed */
894 0, /* todo_flags_start */
895 ( TODO_df_finish | TODO_df_verify ), /* todo_flags_finish */
898 class pass_compare_elim_after_reload : public rtl_opt_pass
900 public:
901 pass_compare_elim_after_reload (gcc::context *ctxt)
902 : rtl_opt_pass (pass_data_compare_elim_after_reload, ctxt)
905 /* opt_pass methods: */
906 virtual bool gate (function *)
908 /* Setting this target hook value is how a backend indicates the need. */
909 if (targetm.flags_regnum == INVALID_REGNUM)
910 return false;
911 return flag_compare_elim_after_reload;
914 virtual unsigned int execute (function *)
916 return execute_compare_elim_after_reload ();
919 }; // class pass_compare_elim_after_reload
921 } // anon namespace
923 rtl_opt_pass *
924 make_pass_compare_elim_after_reload (gcc::context *ctxt)
926 return new pass_compare_elim_after_reload (ctxt);