1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "langhooks.h"
37 #include "basic-block.h"
41 static bool prefer_and_bit_test (enum machine_mode
, int);
42 static void do_jump_by_parts_greater (tree
, tree
, int, rtx
, rtx
, int);
43 static void do_jump_by_parts_equality (tree
, tree
, rtx
, rtx
, int);
44 static void do_compare_and_jump (tree
, tree
, enum rtx_code
, enum rtx_code
, rtx
,
47 /* Invert probability if there is any. -1 stands for unknown. */
52 return prob
== -1 ? -1 : REG_BR_PROB_BASE
- prob
;
55 /* At the start of a function, record that we have no previously-pushed
56 arguments waiting to be popped. */
59 init_pending_stack_adjust (void)
61 pending_stack_adjust
= 0;
64 /* Discard any pending stack adjustment. This avoid relying on the
65 RTL optimizers to remove useless adjustments when we know the
66 stack pointer value is dead. */
68 discard_pending_stack_adjust (void)
70 stack_pointer_delta
-= pending_stack_adjust
;
71 pending_stack_adjust
= 0;
74 /* When exiting from function, if safe, clear out any pending stack adjust
75 so the adjustment won't get done.
77 Note, if the current function calls alloca, then it must have a
78 frame pointer regardless of the value of flag_omit_frame_pointer. */
81 clear_pending_stack_adjust (void)
84 && (! flag_omit_frame_pointer
|| cfun
->calls_alloca
)
86 discard_pending_stack_adjust ();
89 /* Pop any previously-pushed arguments that have not been popped yet. */
92 do_pending_stack_adjust (void)
94 if (inhibit_defer_pop
== 0)
96 if (pending_stack_adjust
!= 0)
97 adjust_stack (GEN_INT (pending_stack_adjust
));
98 pending_stack_adjust
= 0;
102 /* Expand conditional expressions. */
104 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
105 LABEL is an rtx of code CODE_LABEL, in this function and all the
109 jumpifnot (tree exp
, rtx label
, int prob
)
111 do_jump (exp
, label
, NULL_RTX
, inv (prob
));
115 jumpifnot_1 (enum tree_code code
, tree op0
, tree op1
, rtx label
, int prob
)
117 do_jump_1 (code
, op0
, op1
, label
, NULL_RTX
, inv (prob
));
120 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
123 jumpif (tree exp
, rtx label
, int prob
)
125 do_jump (exp
, NULL_RTX
, label
, prob
);
129 jumpif_1 (enum tree_code code
, tree op0
, tree op1
, rtx label
, int prob
)
131 do_jump_1 (code
, op0
, op1
, NULL_RTX
, label
, prob
);
134 /* Used internally by prefer_and_bit_test. */
136 static GTY(()) rtx and_reg
;
137 static GTY(()) rtx and_test
;
138 static GTY(()) rtx shift_test
;
140 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
141 where X is an arbitrary register of mode MODE. Return true if the former
145 prefer_and_bit_test (enum machine_mode mode
, int bitnum
)
151 /* Set up rtxes for the two variations. Use NULL as a placeholder
152 for the BITNUM-based constants. */
153 and_reg
= gen_rtx_REG (mode
, FIRST_PSEUDO_REGISTER
);
154 and_test
= gen_rtx_AND (mode
, and_reg
, NULL
);
155 shift_test
= gen_rtx_AND (mode
, gen_rtx_ASHIFTRT (mode
, and_reg
, NULL
),
160 /* Change the mode of the previously-created rtxes. */
161 PUT_MODE (and_reg
, mode
);
162 PUT_MODE (and_test
, mode
);
163 PUT_MODE (shift_test
, mode
);
164 PUT_MODE (XEXP (shift_test
, 0), mode
);
167 /* Fill in the integers. */
169 = immed_double_int_const (double_int_setbit (double_int_zero
, bitnum
),
171 XEXP (XEXP (shift_test
, 0), 1) = GEN_INT (bitnum
);
173 speed_p
= optimize_insn_for_speed_p ();
174 return (rtx_cost (and_test
, IF_THEN_ELSE
, 0, speed_p
)
175 <= rtx_cost (shift_test
, IF_THEN_ELSE
, 0, speed_p
));
178 /* Subroutine of do_jump, dealing with exploded comparisons of the type
179 OP0 CODE OP1 . IF_FALSE_LABEL and IF_TRUE_LABEL like in do_jump.
180 PROB is probability of jump to if_true_label, or -1 if unknown. */
183 do_jump_1 (enum tree_code code
, tree op0
, tree op1
,
184 rtx if_false_label
, rtx if_true_label
, int prob
)
186 enum machine_mode mode
;
187 rtx drop_through_label
= 0;
193 tree inner_type
= TREE_TYPE (op0
);
195 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
196 != MODE_COMPLEX_FLOAT
);
197 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
198 != MODE_COMPLEX_INT
);
200 if (integer_zerop (op1
))
201 do_jump (op0
, if_true_label
, if_false_label
, inv (prob
));
202 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
203 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
204 do_jump_by_parts_equality (op0
, op1
, if_false_label
, if_true_label
,
207 do_compare_and_jump (op0
, op1
, EQ
, EQ
, if_false_label
, if_true_label
,
214 tree inner_type
= TREE_TYPE (op0
);
216 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
217 != MODE_COMPLEX_FLOAT
);
218 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
219 != MODE_COMPLEX_INT
);
221 if (integer_zerop (op1
))
222 do_jump (op0
, if_false_label
, if_true_label
, prob
);
223 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
224 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
225 do_jump_by_parts_equality (op0
, op1
, if_true_label
, if_false_label
,
228 do_compare_and_jump (op0
, op1
, NE
, NE
, if_false_label
, if_true_label
,
234 mode
= TYPE_MODE (TREE_TYPE (op0
));
235 if (GET_MODE_CLASS (mode
) == MODE_INT
236 && ! can_compare_p (LT
, mode
, ccp_jump
))
237 do_jump_by_parts_greater (op0
, op1
, 1, if_false_label
, if_true_label
,
240 do_compare_and_jump (op0
, op1
, LT
, LTU
, if_false_label
, if_true_label
,
245 mode
= TYPE_MODE (TREE_TYPE (op0
));
246 if (GET_MODE_CLASS (mode
) == MODE_INT
247 && ! can_compare_p (LE
, mode
, ccp_jump
))
248 do_jump_by_parts_greater (op0
, op1
, 0, if_true_label
, if_false_label
,
251 do_compare_and_jump (op0
, op1
, LE
, LEU
, if_false_label
, if_true_label
,
256 mode
= TYPE_MODE (TREE_TYPE (op0
));
257 if (GET_MODE_CLASS (mode
) == MODE_INT
258 && ! can_compare_p (GT
, mode
, ccp_jump
))
259 do_jump_by_parts_greater (op0
, op1
, 0, if_false_label
, if_true_label
,
262 do_compare_and_jump (op0
, op1
, GT
, GTU
, if_false_label
, if_true_label
,
267 mode
= TYPE_MODE (TREE_TYPE (op0
));
268 if (GET_MODE_CLASS (mode
) == MODE_INT
269 && ! can_compare_p (GE
, mode
, ccp_jump
))
270 do_jump_by_parts_greater (op0
, op1
, 1, if_true_label
, if_false_label
,
273 do_compare_and_jump (op0
, op1
, GE
, GEU
, if_false_label
, if_true_label
,
278 do_compare_and_jump (op0
, op1
, ORDERED
, ORDERED
,
279 if_false_label
, if_true_label
, prob
);
283 do_compare_and_jump (op0
, op1
, UNORDERED
, UNORDERED
,
284 if_false_label
, if_true_label
, prob
);
288 do_compare_and_jump (op0
, op1
, UNLT
, UNLT
, if_false_label
, if_true_label
,
293 do_compare_and_jump (op0
, op1
, UNLE
, UNLE
, if_false_label
, if_true_label
,
298 do_compare_and_jump (op0
, op1
, UNGT
, UNGT
, if_false_label
, if_true_label
,
303 do_compare_and_jump (op0
, op1
, UNGE
, UNGE
, if_false_label
, if_true_label
,
308 do_compare_and_jump (op0
, op1
, UNEQ
, UNEQ
, if_false_label
, if_true_label
,
313 do_compare_and_jump (op0
, op1
, LTGT
, LTGT
, if_false_label
, if_true_label
,
317 case TRUTH_ANDIF_EXPR
:
318 if (if_false_label
== NULL_RTX
)
320 drop_through_label
= gen_label_rtx ();
321 do_jump (op0
, drop_through_label
, NULL_RTX
, prob
);
322 do_jump (op1
, NULL_RTX
, if_true_label
, prob
);
326 do_jump (op0
, if_false_label
, NULL_RTX
, prob
);
327 do_jump (op1
, if_false_label
, if_true_label
, prob
);
331 case TRUTH_ORIF_EXPR
:
332 if (if_true_label
== NULL_RTX
)
334 drop_through_label
= gen_label_rtx ();
335 do_jump (op0
, NULL_RTX
, drop_through_label
, prob
);
336 do_jump (op1
, if_false_label
, NULL_RTX
, prob
);
340 do_jump (op0
, NULL_RTX
, if_true_label
, prob
);
341 do_jump (op1
, if_false_label
, if_true_label
, prob
);
349 if (drop_through_label
)
351 do_pending_stack_adjust ();
352 emit_label (drop_through_label
);
356 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
357 the result is zero, or IF_TRUE_LABEL if the result is one.
358 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
359 meaning fall through in that case.
361 do_jump always does any pending stack adjust except when it does not
362 actually perform a jump. An example where there is no jump
363 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
365 PROB is probability of jump to if_true_label, or -1 if unknown. */
368 do_jump (tree exp
, rtx if_false_label
, rtx if_true_label
, int prob
)
370 enum tree_code code
= TREE_CODE (exp
);
374 enum machine_mode mode
;
375 rtx drop_through_label
= 0;
383 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
389 /* This is not true with #pragma weak */
391 /* The address of something can never be zero. */
393 emit_jump (if_true_label
);
398 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
399 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
400 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
401 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_RANGE_REF
)
404 /* If we are narrowing the operand, we have to do the compare in the
406 if ((TYPE_PRECISION (TREE_TYPE (exp
))
407 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
409 case NON_LVALUE_EXPR
:
414 /* These cannot change zero->nonzero or vice versa. */
415 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
, prob
);
419 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
,
425 rtx label1
= gen_label_rtx ();
426 if (!if_true_label
|| !if_false_label
)
428 drop_through_label
= gen_label_rtx ();
430 if_true_label
= drop_through_label
;
432 if_false_label
= drop_through_label
;
435 do_pending_stack_adjust ();
436 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
, -1);
437 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
, prob
);
439 do_jump (TREE_OPERAND (exp
, 2), if_false_label
, if_true_label
, prob
);
444 /* Lowered by gimplify.c. */
450 case ARRAY_RANGE_REF
:
452 HOST_WIDE_INT bitsize
, bitpos
;
454 enum machine_mode mode
;
459 /* Get description of this reference. We don't actually care
460 about the underlying object here. */
461 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
462 &unsignedp
, &volatilep
, false);
464 type
= lang_hooks
.types
.type_for_size (bitsize
, unsignedp
);
465 if (! SLOW_BYTE_ACCESS
466 && type
!= 0 && bitsize
>= 0
467 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
468 && have_insn_for (COMPARE
, TYPE_MODE (type
)))
470 do_jump (fold_convert (type
, exp
), if_false_label
, if_true_label
,
478 /* Nonzero iff operands of minus differ. */
496 case TRUTH_ANDIF_EXPR
:
497 case TRUTH_ORIF_EXPR
:
499 do_jump_1 (code
, TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
500 if_false_label
, if_true_label
, prob
);
504 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
505 See if the former is preferred for jump tests and restore it
507 if (integer_onep (TREE_OPERAND (exp
, 1)))
509 tree exp0
= TREE_OPERAND (exp
, 0);
510 rtx set_label
, clr_label
;
511 int setclr_prob
= prob
;
513 /* Strip narrowing integral type conversions. */
514 while (CONVERT_EXPR_P (exp0
)
515 && TREE_OPERAND (exp0
, 0) != error_mark_node
516 && TYPE_PRECISION (TREE_TYPE (exp0
))
517 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0
, 0))))
518 exp0
= TREE_OPERAND (exp0
, 0);
520 /* "exp0 ^ 1" inverts the sense of the single bit test. */
521 if (TREE_CODE (exp0
) == BIT_XOR_EXPR
522 && integer_onep (TREE_OPERAND (exp0
, 1)))
524 exp0
= TREE_OPERAND (exp0
, 0);
525 clr_label
= if_true_label
;
526 set_label
= if_false_label
;
527 setclr_prob
= inv (prob
);
531 clr_label
= if_false_label
;
532 set_label
= if_true_label
;
535 if (TREE_CODE (exp0
) == RSHIFT_EXPR
)
537 tree arg
= TREE_OPERAND (exp0
, 0);
538 tree shift
= TREE_OPERAND (exp0
, 1);
539 tree argtype
= TREE_TYPE (arg
);
540 if (TREE_CODE (shift
) == INTEGER_CST
541 && compare_tree_int (shift
, 0) >= 0
542 && compare_tree_int (shift
, HOST_BITS_PER_WIDE_INT
) < 0
543 && prefer_and_bit_test (TYPE_MODE (argtype
),
544 TREE_INT_CST_LOW (shift
)))
546 unsigned HOST_WIDE_INT mask
547 = (unsigned HOST_WIDE_INT
) 1 << TREE_INT_CST_LOW (shift
);
548 do_jump (build2 (BIT_AND_EXPR
, argtype
, arg
,
549 build_int_cstu (argtype
, mask
)),
550 clr_label
, set_label
, setclr_prob
);
556 /* If we are AND'ing with a small constant, do this comparison in the
557 smallest type that fits. If the machine doesn't have comparisons
558 that small, it will be converted back to the wider comparison.
559 This helps if we are testing the sign bit of a narrower object.
560 combine can't do this for us because it can't know whether a
561 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
563 if (! SLOW_BYTE_ACCESS
564 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
565 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
566 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
567 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
568 && (type
= lang_hooks
.types
.type_for_mode (mode
, 1)) != 0
569 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
570 && have_insn_for (COMPARE
, TYPE_MODE (type
)))
572 do_jump (fold_convert (type
, exp
), if_false_label
, if_true_label
,
577 if (TYPE_PRECISION (TREE_TYPE (exp
)) > 1
578 || TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
581 /* Boolean comparisons can be compiled as TRUTH_AND_EXPR. */
584 /* High branch cost, expand as the bitwise AND of the conditions.
585 Do the same if the RHS has side effects, because we're effectively
586 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */
587 if (BRANCH_COST (optimize_insn_for_speed_p (),
589 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
591 code
= TRUTH_ANDIF_EXPR
;
596 /* High branch cost, expand as the bitwise OR of the conditions.
597 Do the same if the RHS has side effects, because we're effectively
598 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */
599 if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4
600 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
602 code
= TRUTH_ORIF_EXPR
;
605 /* Fall through and generate the normal code. */
608 temp
= expand_normal (exp
);
609 do_pending_stack_adjust ();
610 /* The RTL optimizers prefer comparisons against pseudos. */
611 if (GET_CODE (temp
) == SUBREG
)
613 /* Compare promoted variables in their promoted mode. */
614 if (SUBREG_PROMOTED_VAR_P (temp
)
615 && REG_P (XEXP (temp
, 0)))
616 temp
= XEXP (temp
, 0);
618 temp
= copy_to_reg (temp
);
620 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
621 NE
, TYPE_UNSIGNED (TREE_TYPE (exp
)),
622 GET_MODE (temp
), NULL_RTX
,
623 if_false_label
, if_true_label
, prob
);
626 if (drop_through_label
)
628 do_pending_stack_adjust ();
629 emit_label (drop_through_label
);
633 /* Compare OP0 with OP1, word at a time, in mode MODE.
634 UNSIGNEDP says to do unsigned comparison.
635 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
638 do_jump_by_parts_greater_rtx (enum machine_mode mode
, int unsignedp
, rtx op0
,
639 rtx op1
, rtx if_false_label
, rtx if_true_label
,
642 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
643 rtx drop_through_label
= 0;
644 bool drop_through_if_true
= false, drop_through_if_false
= false;
645 enum rtx_code code
= GT
;
648 if (! if_true_label
|| ! if_false_label
)
649 drop_through_label
= gen_label_rtx ();
652 if_true_label
= drop_through_label
;
653 drop_through_if_true
= true;
655 if (! if_false_label
)
657 if_false_label
= drop_through_label
;
658 drop_through_if_false
= true;
661 /* Deal with the special case 0 > x: only one comparison is necessary and
662 we reverse it to avoid jumping to the drop-through label. */
663 if (op0
== const0_rtx
&& drop_through_if_true
&& !drop_through_if_false
)
666 if_true_label
= if_false_label
;
667 if_false_label
= drop_through_label
;
668 drop_through_if_true
= false;
669 drop_through_if_false
= true;
672 /* Compare a word at a time, high order first. */
673 for (i
= 0; i
< nwords
; i
++)
675 rtx op0_word
, op1_word
;
677 if (WORDS_BIG_ENDIAN
)
679 op0_word
= operand_subword_force (op0
, i
, mode
);
680 op1_word
= operand_subword_force (op1
, i
, mode
);
684 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
685 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
688 /* All but high-order word must be compared as unsigned. */
689 do_compare_rtx_and_jump (op0_word
, op1_word
, code
, (unsignedp
|| i
> 0),
690 word_mode
, NULL_RTX
, NULL_RTX
, if_true_label
,
693 /* Emit only one comparison for 0. Do not emit the last cond jump. */
694 if (op0
== const0_rtx
|| i
== nwords
- 1)
697 /* Consider lower words only if these are equal. */
698 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
699 NULL_RTX
, NULL_RTX
, if_false_label
, inv (prob
));
702 if (!drop_through_if_false
)
703 emit_jump (if_false_label
);
704 if (drop_through_label
)
705 emit_label (drop_through_label
);
708 /* Given a comparison expression EXP for values too wide to be compared
709 with one insn, test the comparison and jump to the appropriate label.
710 The code of EXP is ignored; we always test GT if SWAP is 0,
711 and LT if SWAP is 1. */
714 do_jump_by_parts_greater (tree treeop0
, tree treeop1
, int swap
,
715 rtx if_false_label
, rtx if_true_label
, int prob
)
717 rtx op0
= expand_normal (swap
? treeop1
: treeop0
);
718 rtx op1
= expand_normal (swap
? treeop0
: treeop1
);
719 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (treeop0
));
720 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (treeop0
));
722 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
,
723 if_true_label
, prob
);
726 /* Jump according to whether OP0 is 0. We assume that OP0 has an integer
727 mode, MODE, that is too wide for the available compare insns. Either
728 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
729 to indicate drop through. */
732 do_jump_by_parts_zero_rtx (enum machine_mode mode
, rtx op0
,
733 rtx if_false_label
, rtx if_true_label
, int prob
)
735 int nwords
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
738 rtx drop_through_label
= 0;
740 /* The fastest way of doing this comparison on almost any machine is to
741 "or" all the words and compare the result. If all have to be loaded
742 from memory and this is a very wide item, it's possible this may
743 be slower, but that's highly unlikely. */
745 part
= gen_reg_rtx (word_mode
);
746 emit_move_insn (part
, operand_subword_force (op0
, 0, mode
));
747 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
748 part
= expand_binop (word_mode
, ior_optab
, part
,
749 operand_subword_force (op0
, i
, mode
),
750 part
, 1, OPTAB_WIDEN
);
754 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
755 NULL_RTX
, if_false_label
, if_true_label
, prob
);
759 /* If we couldn't do the "or" simply, do this with a series of compares. */
760 if (! if_false_label
)
761 drop_through_label
= if_false_label
= gen_label_rtx ();
763 for (i
= 0; i
< nwords
; i
++)
764 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
765 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
,
766 if_false_label
, NULL_RTX
, prob
);
769 emit_jump (if_true_label
);
771 if (drop_through_label
)
772 emit_label (drop_through_label
);
775 /* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
776 where MODE is an integer mode too wide to be compared with one insn.
777 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
778 to indicate drop through. */
781 do_jump_by_parts_equality_rtx (enum machine_mode mode
, rtx op0
, rtx op1
,
782 rtx if_false_label
, rtx if_true_label
, int prob
)
784 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
785 rtx drop_through_label
= 0;
788 if (op1
== const0_rtx
)
790 do_jump_by_parts_zero_rtx (mode
, op0
, if_false_label
, if_true_label
,
794 else if (op0
== const0_rtx
)
796 do_jump_by_parts_zero_rtx (mode
, op1
, if_false_label
, if_true_label
,
801 if (! if_false_label
)
802 drop_through_label
= if_false_label
= gen_label_rtx ();
804 for (i
= 0; i
< nwords
; i
++)
805 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
806 operand_subword_force (op1
, i
, mode
),
807 EQ
, 0, word_mode
, NULL_RTX
,
808 if_false_label
, NULL_RTX
, prob
);
811 emit_jump (if_true_label
);
812 if (drop_through_label
)
813 emit_label (drop_through_label
);
816 /* Given an EQ_EXPR expression EXP for values too wide to be compared
817 with one insn, test the comparison and jump to the appropriate label. */
820 do_jump_by_parts_equality (tree treeop0
, tree treeop1
, rtx if_false_label
,
821 rtx if_true_label
, int prob
)
823 rtx op0
= expand_normal (treeop0
);
824 rtx op1
= expand_normal (treeop1
);
825 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (treeop0
));
826 do_jump_by_parts_equality_rtx (mode
, op0
, op1
, if_false_label
,
827 if_true_label
, prob
);
830 /* Split a comparison into two others, the second of which has the other
831 "orderedness". The first is always ORDERED or UNORDERED if MODE
832 does not honor NaNs (which means that it can be skipped in that case;
833 see do_compare_rtx_and_jump).
835 The two conditions are written in *CODE1 and *CODE2. Return true if
836 the conditions must be ANDed, false if they must be ORed. */
839 split_comparison (enum rtx_code code
, enum machine_mode mode
,
840 enum rtx_code
*code1
, enum rtx_code
*code2
)
889 /* Do not turn a trapping comparison into a non-trapping one. */
890 if (HONOR_SNANS (mode
))
908 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
909 The decision as to signed or unsigned comparison must be made by the caller.
911 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
915 do_compare_rtx_and_jump (rtx op0
, rtx op1
, enum rtx_code code
, int unsignedp
,
916 enum machine_mode mode
, rtx size
, rtx if_false_label
,
917 rtx if_true_label
, int prob
)
920 rtx dummy_label
= NULL_RTX
;
923 /* Reverse the comparison if that is safe and we want to jump if it is
924 false. Also convert to the reverse comparison if the target can
927 || ! can_compare_p (code
, mode
, ccp_jump
))
928 && (! FLOAT_MODE_P (mode
)
929 || code
== ORDERED
|| code
== UNORDERED
930 || (! HONOR_NANS (mode
) && (code
== LTGT
|| code
== UNEQ
))
931 || (! HONOR_SNANS (mode
) && (code
== EQ
|| code
== NE
))))
934 if (FLOAT_MODE_P (mode
))
935 rcode
= reverse_condition_maybe_unordered (code
);
937 rcode
= reverse_condition (code
);
939 /* Canonicalize to UNORDERED for the libcall. */
940 if (can_compare_p (rcode
, mode
, ccp_jump
)
941 || (code
== ORDERED
&& ! can_compare_p (ORDERED
, mode
, ccp_jump
)))
944 if_true_label
= if_false_label
;
945 if_false_label
= tem
;
951 /* If one operand is constant, make it the second one. Only do this
952 if the other operand is not constant as well. */
954 if (swap_commutative_operands_p (op0
, op1
))
959 code
= swap_condition (code
);
962 do_pending_stack_adjust ();
964 code
= unsignedp
? unsigned_condition (code
) : code
;
965 if (0 != (tem
= simplify_relational_operation (code
, mode
, VOIDmode
,
968 if (CONSTANT_P (tem
))
970 rtx label
= (tem
== const0_rtx
|| tem
== CONST0_RTX (mode
))
971 ? if_false_label
: if_true_label
;
977 code
= GET_CODE (tem
);
978 mode
= GET_MODE (tem
);
981 unsignedp
= (code
== GTU
|| code
== LTU
|| code
== GEU
|| code
== LEU
);
985 dummy_label
= if_true_label
= gen_label_rtx ();
987 if (GET_MODE_CLASS (mode
) == MODE_INT
988 && ! can_compare_p (code
, mode
, ccp_jump
))
993 do_jump_by_parts_greater_rtx (mode
, 1, op1
, op0
,
994 if_false_label
, if_true_label
, prob
);
998 do_jump_by_parts_greater_rtx (mode
, 1, op0
, op1
,
999 if_true_label
, if_false_label
,
1004 do_jump_by_parts_greater_rtx (mode
, 1, op0
, op1
,
1005 if_false_label
, if_true_label
, prob
);
1009 do_jump_by_parts_greater_rtx (mode
, 1, op1
, op0
,
1010 if_true_label
, if_false_label
,
1015 do_jump_by_parts_greater_rtx (mode
, 0, op1
, op0
,
1016 if_false_label
, if_true_label
, prob
);
1020 do_jump_by_parts_greater_rtx (mode
, 0, op0
, op1
,
1021 if_true_label
, if_false_label
,
1026 do_jump_by_parts_greater_rtx (mode
, 0, op0
, op1
,
1027 if_false_label
, if_true_label
, prob
);
1031 do_jump_by_parts_greater_rtx (mode
, 0, op1
, op0
,
1032 if_true_label
, if_false_label
,
1037 do_jump_by_parts_equality_rtx (mode
, op0
, op1
, if_false_label
,
1038 if_true_label
, prob
);
1042 do_jump_by_parts_equality_rtx (mode
, op0
, op1
, if_true_label
,
1043 if_false_label
, inv (prob
));
1052 if (SCALAR_FLOAT_MODE_P (mode
)
1053 && ! can_compare_p (code
, mode
, ccp_jump
)
1054 && can_compare_p (swap_condition (code
), mode
, ccp_jump
))
1057 code
= swap_condition (code
);
1063 else if (SCALAR_FLOAT_MODE_P (mode
)
1064 && ! can_compare_p (code
, mode
, ccp_jump
)
1066 /* Never split ORDERED and UNORDERED. These must be implemented. */
1067 && (code
!= ORDERED
&& code
!= UNORDERED
)
1069 /* Split a floating-point comparison if we can jump on other
1071 && (have_insn_for (COMPARE
, mode
)
1073 /* ... or if there is no libcall for it. */
1074 || code_to_optab
[code
] == NULL
))
1076 enum rtx_code first_code
;
1077 bool and_them
= split_comparison (code
, mode
, &first_code
, &code
);
1079 /* If there are no NaNs, the first comparison should always fall
1081 if (!HONOR_NANS (mode
))
1082 gcc_assert (first_code
== (and_them
? ORDERED
: UNORDERED
));
1089 /* If we only jump if true, just bypass the second jump. */
1090 if (! if_false_label
)
1093 dummy_label
= gen_label_rtx ();
1094 dest_label
= dummy_label
;
1097 dest_label
= if_false_label
;
1098 do_compare_rtx_and_jump (op0
, op1
, first_code
, unsignedp
, mode
,
1099 size
, dest_label
, NULL_RTX
, prob
);
1102 do_compare_rtx_and_jump (op0
, op1
, first_code
, unsignedp
, mode
,
1103 size
, NULL_RTX
, if_true_label
, prob
);
1107 last
= get_last_insn ();
1108 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
,
1110 if (prob
!= -1 && profile_status
!= PROFILE_ABSENT
)
1112 for (last
= NEXT_INSN (last
);
1113 last
&& NEXT_INSN (last
);
1114 last
= NEXT_INSN (last
))
1120 || !any_condjump_p (last
))
1123 fprintf (dump_file
, "Failed to add probability note\n");
1127 gcc_assert (!find_reg_note (last
, REG_BR_PROB
, 0));
1128 add_reg_note (last
, REG_BR_PROB
, GEN_INT (prob
));
1134 emit_jump (if_false_label
);
1136 emit_label (dummy_label
);
1139 /* Generate code for a comparison expression EXP (including code to compute
1140 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
1141 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
1142 generated code will drop through.
1143 SIGNED_CODE should be the rtx operation for this comparison for
1144 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
1146 We force a stack adjustment unless there are currently
1147 things pushed on the stack that aren't yet used. */
1150 do_compare_and_jump (tree treeop0
, tree treeop1
, enum rtx_code signed_code
,
1151 enum rtx_code unsigned_code
, rtx if_false_label
,
1152 rtx if_true_label
, int prob
)
1156 enum machine_mode mode
;
1160 /* Don't crash if the comparison was erroneous. */
1161 op0
= expand_normal (treeop0
);
1162 if (TREE_CODE (treeop0
) == ERROR_MARK
)
1165 op1
= expand_normal (treeop1
);
1166 if (TREE_CODE (treeop1
) == ERROR_MARK
)
1169 type
= TREE_TYPE (treeop0
);
1170 mode
= TYPE_MODE (type
);
1171 if (TREE_CODE (treeop0
) == INTEGER_CST
1172 && (TREE_CODE (treeop1
) != INTEGER_CST
1173 || (GET_MODE_BITSIZE (mode
)
1174 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (treeop1
))))))
1176 /* op0 might have been replaced by promoted constant, in which
1177 case the type of second argument should be used. */
1178 type
= TREE_TYPE (treeop1
);
1179 mode
= TYPE_MODE (type
);
1181 unsignedp
= TYPE_UNSIGNED (type
);
1182 code
= unsignedp
? unsigned_code
: signed_code
;
1184 #ifdef HAVE_canonicalize_funcptr_for_compare
1185 /* If function pointers need to be "canonicalized" before they can
1186 be reliably compared, then canonicalize them.
1187 Only do this if *both* sides of the comparison are function pointers.
1188 If one side isn't, we want a noncanonicalized comparison. See PR
1189 middle-end/17564. */
1190 if (HAVE_canonicalize_funcptr_for_compare
1191 && TREE_CODE (TREE_TYPE (treeop0
)) == POINTER_TYPE
1192 && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop0
)))
1194 && TREE_CODE (TREE_TYPE (treeop1
)) == POINTER_TYPE
1195 && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop1
)))
1198 rtx new_op0
= gen_reg_rtx (mode
);
1199 rtx new_op1
= gen_reg_rtx (mode
);
1201 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
1204 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
1209 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
1211 ? expr_size (treeop0
) : NULL_RTX
),
1212 if_false_label
, if_true_label
, prob
);
1215 #include "gt-dojump.h"