1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "stor-layout.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
34 #include "langhooks.h"
36 #include "basic-block.h"
39 static bool prefer_and_bit_test (enum machine_mode
, int);
40 static void do_jump_by_parts_greater (tree
, tree
, int, rtx
, rtx
, int);
41 static void do_jump_by_parts_equality (tree
, tree
, rtx
, rtx
, int);
42 static void do_compare_and_jump (tree
, tree
, enum rtx_code
, enum rtx_code
, rtx
,
45 /* Invert probability if there is any. -1 stands for unknown. */
50 return prob
== -1 ? -1 : REG_BR_PROB_BASE
- prob
;
53 /* At the start of a function, record that we have no previously-pushed
54 arguments waiting to be popped. */
57 init_pending_stack_adjust (void)
59 pending_stack_adjust
= 0;
62 /* Discard any pending stack adjustment. This avoid relying on the
63 RTL optimizers to remove useless adjustments when we know the
64 stack pointer value is dead. */
66 discard_pending_stack_adjust (void)
68 stack_pointer_delta
-= pending_stack_adjust
;
69 pending_stack_adjust
= 0;
72 /* When exiting from function, if safe, clear out any pending stack adjust
73 so the adjustment won't get done.
75 Note, if the current function calls alloca, then it must have a
76 frame pointer regardless of the value of flag_omit_frame_pointer. */
79 clear_pending_stack_adjust (void)
82 && (! flag_omit_frame_pointer
|| cfun
->calls_alloca
)
84 discard_pending_stack_adjust ();
87 /* Pop any previously-pushed arguments that have not been popped yet. */
90 do_pending_stack_adjust (void)
92 if (inhibit_defer_pop
== 0)
94 if (pending_stack_adjust
!= 0)
95 adjust_stack (GEN_INT (pending_stack_adjust
));
96 pending_stack_adjust
= 0;
100 /* Expand conditional expressions. */
102 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
103 LABEL is an rtx of code CODE_LABEL, in this function and all the
107 jumpifnot (tree exp
, rtx label
, int prob
)
109 do_jump (exp
, label
, NULL_RTX
, inv (prob
));
113 jumpifnot_1 (enum tree_code code
, tree op0
, tree op1
, rtx label
, int prob
)
115 do_jump_1 (code
, op0
, op1
, label
, NULL_RTX
, inv (prob
));
118 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
121 jumpif (tree exp
, rtx label
, int prob
)
123 do_jump (exp
, NULL_RTX
, label
, prob
);
127 jumpif_1 (enum tree_code code
, tree op0
, tree op1
, rtx label
, int prob
)
129 do_jump_1 (code
, op0
, op1
, NULL_RTX
, label
, prob
);
132 /* Used internally by prefer_and_bit_test. */
134 static GTY(()) rtx and_reg
;
135 static GTY(()) rtx and_test
;
136 static GTY(()) rtx shift_test
;
138 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
139 where X is an arbitrary register of mode MODE. Return true if the former
143 prefer_and_bit_test (enum machine_mode mode
, int bitnum
)
149 /* Set up rtxes for the two variations. Use NULL as a placeholder
150 for the BITNUM-based constants. */
151 and_reg
= gen_rtx_REG (mode
, FIRST_PSEUDO_REGISTER
);
152 and_test
= gen_rtx_AND (mode
, and_reg
, NULL
);
153 shift_test
= gen_rtx_AND (mode
, gen_rtx_ASHIFTRT (mode
, and_reg
, NULL
),
158 /* Change the mode of the previously-created rtxes. */
159 PUT_MODE (and_reg
, mode
);
160 PUT_MODE (and_test
, mode
);
161 PUT_MODE (shift_test
, mode
);
162 PUT_MODE (XEXP (shift_test
, 0), mode
);
165 /* Fill in the integers. */
167 = immed_double_int_const (double_int_zero
.set_bit (bitnum
), mode
);
168 XEXP (XEXP (shift_test
, 0), 1) = GEN_INT (bitnum
);
170 speed_p
= optimize_insn_for_speed_p ();
171 return (rtx_cost (and_test
, IF_THEN_ELSE
, 0, speed_p
)
172 <= rtx_cost (shift_test
, IF_THEN_ELSE
, 0, speed_p
));
175 /* Subroutine of do_jump, dealing with exploded comparisons of the type
176 OP0 CODE OP1 . IF_FALSE_LABEL and IF_TRUE_LABEL like in do_jump.
177 PROB is probability of jump to if_true_label, or -1 if unknown. */
180 do_jump_1 (enum tree_code code
, tree op0
, tree op1
,
181 rtx if_false_label
, rtx if_true_label
, int prob
)
183 enum machine_mode mode
;
184 rtx drop_through_label
= 0;
190 tree inner_type
= TREE_TYPE (op0
);
192 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
193 != MODE_COMPLEX_FLOAT
);
194 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
195 != MODE_COMPLEX_INT
);
197 if (integer_zerop (op1
))
198 do_jump (op0
, if_true_label
, if_false_label
, inv (prob
));
199 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
200 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
201 do_jump_by_parts_equality (op0
, op1
, if_false_label
, if_true_label
,
204 do_compare_and_jump (op0
, op1
, EQ
, EQ
, if_false_label
, if_true_label
,
211 tree inner_type
= TREE_TYPE (op0
);
213 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
214 != MODE_COMPLEX_FLOAT
);
215 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
216 != MODE_COMPLEX_INT
);
218 if (integer_zerop (op1
))
219 do_jump (op0
, if_false_label
, if_true_label
, prob
);
220 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
221 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
222 do_jump_by_parts_equality (op0
, op1
, if_true_label
, if_false_label
,
225 do_compare_and_jump (op0
, op1
, NE
, NE
, if_false_label
, if_true_label
,
231 mode
= TYPE_MODE (TREE_TYPE (op0
));
232 if (GET_MODE_CLASS (mode
) == MODE_INT
233 && ! can_compare_p (LT
, mode
, ccp_jump
))
234 do_jump_by_parts_greater (op0
, op1
, 1, if_false_label
, if_true_label
,
237 do_compare_and_jump (op0
, op1
, LT
, LTU
, if_false_label
, if_true_label
,
242 mode
= TYPE_MODE (TREE_TYPE (op0
));
243 if (GET_MODE_CLASS (mode
) == MODE_INT
244 && ! can_compare_p (LE
, mode
, ccp_jump
))
245 do_jump_by_parts_greater (op0
, op1
, 0, if_true_label
, if_false_label
,
248 do_compare_and_jump (op0
, op1
, LE
, LEU
, if_false_label
, if_true_label
,
253 mode
= TYPE_MODE (TREE_TYPE (op0
));
254 if (GET_MODE_CLASS (mode
) == MODE_INT
255 && ! can_compare_p (GT
, mode
, ccp_jump
))
256 do_jump_by_parts_greater (op0
, op1
, 0, if_false_label
, if_true_label
,
259 do_compare_and_jump (op0
, op1
, GT
, GTU
, if_false_label
, if_true_label
,
264 mode
= TYPE_MODE (TREE_TYPE (op0
));
265 if (GET_MODE_CLASS (mode
) == MODE_INT
266 && ! can_compare_p (GE
, mode
, ccp_jump
))
267 do_jump_by_parts_greater (op0
, op1
, 1, if_true_label
, if_false_label
,
270 do_compare_and_jump (op0
, op1
, GE
, GEU
, if_false_label
, if_true_label
,
275 do_compare_and_jump (op0
, op1
, ORDERED
, ORDERED
,
276 if_false_label
, if_true_label
, prob
);
280 do_compare_and_jump (op0
, op1
, UNORDERED
, UNORDERED
,
281 if_false_label
, if_true_label
, prob
);
285 do_compare_and_jump (op0
, op1
, UNLT
, UNLT
, if_false_label
, if_true_label
,
290 do_compare_and_jump (op0
, op1
, UNLE
, UNLE
, if_false_label
, if_true_label
,
295 do_compare_and_jump (op0
, op1
, UNGT
, UNGT
, if_false_label
, if_true_label
,
300 do_compare_and_jump (op0
, op1
, UNGE
, UNGE
, if_false_label
, if_true_label
,
305 do_compare_and_jump (op0
, op1
, UNEQ
, UNEQ
, if_false_label
, if_true_label
,
310 do_compare_and_jump (op0
, op1
, LTGT
, LTGT
, if_false_label
, if_true_label
,
314 case TRUTH_ANDIF_EXPR
:
316 /* Spread the probability that the expression is false evenly between
317 the two conditions. So the first condition is false half the total
318 probability of being false. The second condition is false the other
319 half of the total probability of being false, so its jump has a false
320 probability of half the total, relative to the probability we
321 reached it (i.e. the first condition was true). */
326 int false_prob
= inv (prob
);
327 int op0_false_prob
= false_prob
/ 2;
328 int op1_false_prob
= GCOV_COMPUTE_SCALE ((false_prob
/ 2),
329 inv (op0_false_prob
));
330 /* Get the probability that each jump below is true. */
331 op0_prob
= inv (op0_false_prob
);
332 op1_prob
= inv (op1_false_prob
);
334 if (if_false_label
== NULL_RTX
)
336 drop_through_label
= gen_label_rtx ();
337 do_jump (op0
, drop_through_label
, NULL_RTX
, op0_prob
);
338 do_jump (op1
, NULL_RTX
, if_true_label
, op1_prob
);
342 do_jump (op0
, if_false_label
, NULL_RTX
, op0_prob
);
343 do_jump (op1
, if_false_label
, if_true_label
, op1_prob
);
348 case TRUTH_ORIF_EXPR
:
350 /* Spread the probability evenly between the two conditions. So
351 the first condition has half the total probability of being true.
352 The second condition has the other half of the total probability,
353 so its jump has a probability of half the total, relative to
354 the probability we reached it (i.e. the first condition was false). */
360 op1_prob
= GCOV_COMPUTE_SCALE ((prob
/ 2), inv (op0_prob
));
362 if (if_true_label
== NULL_RTX
)
364 drop_through_label
= gen_label_rtx ();
365 do_jump (op0
, NULL_RTX
, drop_through_label
, op0_prob
);
366 do_jump (op1
, if_false_label
, NULL_RTX
, op1_prob
);
370 do_jump (op0
, NULL_RTX
, if_true_label
, op0_prob
);
371 do_jump (op1
, if_false_label
, if_true_label
, op1_prob
);
380 if (drop_through_label
)
382 do_pending_stack_adjust ();
383 emit_label (drop_through_label
);
387 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
388 the result is zero, or IF_TRUE_LABEL if the result is one.
389 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
390 meaning fall through in that case.
392 do_jump always does any pending stack adjust except when it does not
393 actually perform a jump. An example where there is no jump
394 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
396 PROB is probability of jump to if_true_label, or -1 if unknown. */
399 do_jump (tree exp
, rtx if_false_label
, rtx if_true_label
, int prob
)
401 enum tree_code code
= TREE_CODE (exp
);
405 enum machine_mode mode
;
406 rtx drop_through_label
= 0;
414 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
420 /* This is not true with #pragma weak */
422 /* The address of something can never be zero. */
424 emit_jump (if_true_label
);
429 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
430 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
431 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
432 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_RANGE_REF
)
435 /* If we are narrowing the operand, we have to do the compare in the
437 if ((TYPE_PRECISION (TREE_TYPE (exp
))
438 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
440 case NON_LVALUE_EXPR
:
445 /* These cannot change zero->nonzero or vice versa. */
446 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
, prob
);
450 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
,
456 rtx label1
= gen_label_rtx ();
457 if (!if_true_label
|| !if_false_label
)
459 drop_through_label
= gen_label_rtx ();
461 if_true_label
= drop_through_label
;
463 if_false_label
= drop_through_label
;
466 do_pending_stack_adjust ();
467 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
, -1);
468 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
, prob
);
470 do_jump (TREE_OPERAND (exp
, 2), if_false_label
, if_true_label
, prob
);
475 /* Lowered by gimplify.c. */
479 /* Nonzero iff operands of minus differ. */
497 case TRUTH_ANDIF_EXPR
:
498 case TRUTH_ORIF_EXPR
:
500 do_jump_1 (code
, TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
501 if_false_label
, if_true_label
, prob
);
505 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
506 See if the former is preferred for jump tests and restore it
508 if (integer_onep (TREE_OPERAND (exp
, 1)))
510 tree exp0
= TREE_OPERAND (exp
, 0);
511 rtx set_label
, clr_label
;
512 int setclr_prob
= prob
;
514 /* Strip narrowing integral type conversions. */
515 while (CONVERT_EXPR_P (exp0
)
516 && TREE_OPERAND (exp0
, 0) != error_mark_node
517 && TYPE_PRECISION (TREE_TYPE (exp0
))
518 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0
, 0))))
519 exp0
= TREE_OPERAND (exp0
, 0);
521 /* "exp0 ^ 1" inverts the sense of the single bit test. */
522 if (TREE_CODE (exp0
) == BIT_XOR_EXPR
523 && integer_onep (TREE_OPERAND (exp0
, 1)))
525 exp0
= TREE_OPERAND (exp0
, 0);
526 clr_label
= if_true_label
;
527 set_label
= if_false_label
;
528 setclr_prob
= inv (prob
);
532 clr_label
= if_false_label
;
533 set_label
= if_true_label
;
536 if (TREE_CODE (exp0
) == RSHIFT_EXPR
)
538 tree arg
= TREE_OPERAND (exp0
, 0);
539 tree shift
= TREE_OPERAND (exp0
, 1);
540 tree argtype
= TREE_TYPE (arg
);
541 if (TREE_CODE (shift
) == INTEGER_CST
542 && compare_tree_int (shift
, 0) >= 0
543 && compare_tree_int (shift
, HOST_BITS_PER_WIDE_INT
) < 0
544 && prefer_and_bit_test (TYPE_MODE (argtype
),
545 TREE_INT_CST_LOW (shift
)))
547 unsigned HOST_WIDE_INT mask
548 = (unsigned HOST_WIDE_INT
) 1 << TREE_INT_CST_LOW (shift
);
549 do_jump (build2 (BIT_AND_EXPR
, argtype
, arg
,
550 build_int_cstu (argtype
, mask
)),
551 clr_label
, set_label
, setclr_prob
);
557 /* If we are AND'ing with a small constant, do this comparison in the
558 smallest type that fits. If the machine doesn't have comparisons
559 that small, it will be converted back to the wider comparison.
560 This helps if we are testing the sign bit of a narrower object.
561 combine can't do this for us because it can't know whether a
562 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
564 if (! SLOW_BYTE_ACCESS
565 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
566 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
567 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
568 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
569 && (type
= lang_hooks
.types
.type_for_mode (mode
, 1)) != 0
570 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
571 && have_insn_for (COMPARE
, TYPE_MODE (type
)))
573 do_jump (fold_convert (type
, exp
), if_false_label
, if_true_label
,
578 if (TYPE_PRECISION (TREE_TYPE (exp
)) > 1
579 || TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
582 /* Boolean comparisons can be compiled as TRUTH_AND_EXPR. */
585 /* High branch cost, expand as the bitwise AND of the conditions.
586 Do the same if the RHS has side effects, because we're effectively
587 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */
588 if (BRANCH_COST (optimize_insn_for_speed_p (),
590 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
592 code
= TRUTH_ANDIF_EXPR
;
597 /* High branch cost, expand as the bitwise OR of the conditions.
598 Do the same if the RHS has side effects, because we're effectively
599 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */
600 if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4
601 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
603 code
= TRUTH_ORIF_EXPR
;
606 /* Fall through and generate the normal code. */
609 temp
= expand_normal (exp
);
610 do_pending_stack_adjust ();
611 /* The RTL optimizers prefer comparisons against pseudos. */
612 if (GET_CODE (temp
) == SUBREG
)
614 /* Compare promoted variables in their promoted mode. */
615 if (SUBREG_PROMOTED_VAR_P (temp
)
616 && REG_P (XEXP (temp
, 0)))
617 temp
= XEXP (temp
, 0);
619 temp
= copy_to_reg (temp
);
621 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
622 NE
, TYPE_UNSIGNED (TREE_TYPE (exp
)),
623 GET_MODE (temp
), NULL_RTX
,
624 if_false_label
, if_true_label
, prob
);
627 if (drop_through_label
)
629 do_pending_stack_adjust ();
630 emit_label (drop_through_label
);
634 /* Compare OP0 with OP1, word at a time, in mode MODE.
635 UNSIGNEDP says to do unsigned comparison.
636 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
639 do_jump_by_parts_greater_rtx (enum machine_mode mode
, int unsignedp
, rtx op0
,
640 rtx op1
, rtx if_false_label
, rtx if_true_label
,
643 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
644 rtx drop_through_label
= 0;
645 bool drop_through_if_true
= false, drop_through_if_false
= false;
646 enum rtx_code code
= GT
;
649 if (! if_true_label
|| ! if_false_label
)
650 drop_through_label
= gen_label_rtx ();
653 if_true_label
= drop_through_label
;
654 drop_through_if_true
= true;
656 if (! if_false_label
)
658 if_false_label
= drop_through_label
;
659 drop_through_if_false
= true;
662 /* Deal with the special case 0 > x: only one comparison is necessary and
663 we reverse it to avoid jumping to the drop-through label. */
664 if (op0
== const0_rtx
&& drop_through_if_true
&& !drop_through_if_false
)
667 if_true_label
= if_false_label
;
668 if_false_label
= drop_through_label
;
669 drop_through_if_true
= false;
670 drop_through_if_false
= true;
673 /* Compare a word at a time, high order first. */
674 for (i
= 0; i
< nwords
; i
++)
676 rtx op0_word
, op1_word
;
678 if (WORDS_BIG_ENDIAN
)
680 op0_word
= operand_subword_force (op0
, i
, mode
);
681 op1_word
= operand_subword_force (op1
, i
, mode
);
685 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
686 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
689 /* All but high-order word must be compared as unsigned. */
690 do_compare_rtx_and_jump (op0_word
, op1_word
, code
, (unsignedp
|| i
> 0),
691 word_mode
, NULL_RTX
, NULL_RTX
, if_true_label
,
694 /* Emit only one comparison for 0. Do not emit the last cond jump. */
695 if (op0
== const0_rtx
|| i
== nwords
- 1)
698 /* Consider lower words only if these are equal. */
699 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
700 NULL_RTX
, NULL_RTX
, if_false_label
, inv (prob
));
703 if (!drop_through_if_false
)
704 emit_jump (if_false_label
);
705 if (drop_through_label
)
706 emit_label (drop_through_label
);
709 /* Given a comparison expression EXP for values too wide to be compared
710 with one insn, test the comparison and jump to the appropriate label.
711 The code of EXP is ignored; we always test GT if SWAP is 0,
712 and LT if SWAP is 1. */
715 do_jump_by_parts_greater (tree treeop0
, tree treeop1
, int swap
,
716 rtx if_false_label
, rtx if_true_label
, int prob
)
718 rtx op0
= expand_normal (swap
? treeop1
: treeop0
);
719 rtx op1
= expand_normal (swap
? treeop0
: treeop1
);
720 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (treeop0
));
721 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (treeop0
));
723 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
,
724 if_true_label
, prob
);
727 /* Jump according to whether OP0 is 0. We assume that OP0 has an integer
728 mode, MODE, that is too wide for the available compare insns. Either
729 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
730 to indicate drop through. */
733 do_jump_by_parts_zero_rtx (enum machine_mode mode
, rtx op0
,
734 rtx if_false_label
, rtx if_true_label
, int prob
)
736 int nwords
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
739 rtx drop_through_label
= 0;
741 /* The fastest way of doing this comparison on almost any machine is to
742 "or" all the words and compare the result. If all have to be loaded
743 from memory and this is a very wide item, it's possible this may
744 be slower, but that's highly unlikely. */
746 part
= gen_reg_rtx (word_mode
);
747 emit_move_insn (part
, operand_subword_force (op0
, 0, mode
));
748 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
749 part
= expand_binop (word_mode
, ior_optab
, part
,
750 operand_subword_force (op0
, i
, mode
),
751 part
, 1, OPTAB_WIDEN
);
755 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
756 NULL_RTX
, if_false_label
, if_true_label
, prob
);
760 /* If we couldn't do the "or" simply, do this with a series of compares. */
761 if (! if_false_label
)
762 drop_through_label
= if_false_label
= gen_label_rtx ();
764 for (i
= 0; i
< nwords
; i
++)
765 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
766 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
,
767 if_false_label
, NULL_RTX
, prob
);
770 emit_jump (if_true_label
);
772 if (drop_through_label
)
773 emit_label (drop_through_label
);
776 /* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
777 where MODE is an integer mode too wide to be compared with one insn.
778 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
779 to indicate drop through. */
782 do_jump_by_parts_equality_rtx (enum machine_mode mode
, rtx op0
, rtx op1
,
783 rtx if_false_label
, rtx if_true_label
, int prob
)
785 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
786 rtx drop_through_label
= 0;
789 if (op1
== const0_rtx
)
791 do_jump_by_parts_zero_rtx (mode
, op0
, if_false_label
, if_true_label
,
795 else if (op0
== const0_rtx
)
797 do_jump_by_parts_zero_rtx (mode
, op1
, if_false_label
, if_true_label
,
802 if (! if_false_label
)
803 drop_through_label
= if_false_label
= gen_label_rtx ();
805 for (i
= 0; i
< nwords
; i
++)
806 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
807 operand_subword_force (op1
, i
, mode
),
808 EQ
, 0, word_mode
, NULL_RTX
,
809 if_false_label
, NULL_RTX
, prob
);
812 emit_jump (if_true_label
);
813 if (drop_through_label
)
814 emit_label (drop_through_label
);
817 /* Given an EQ_EXPR expression EXP for values too wide to be compared
818 with one insn, test the comparison and jump to the appropriate label. */
821 do_jump_by_parts_equality (tree treeop0
, tree treeop1
, rtx if_false_label
,
822 rtx if_true_label
, int prob
)
824 rtx op0
= expand_normal (treeop0
);
825 rtx op1
= expand_normal (treeop1
);
826 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (treeop0
));
827 do_jump_by_parts_equality_rtx (mode
, op0
, op1
, if_false_label
,
828 if_true_label
, prob
);
831 /* Split a comparison into two others, the second of which has the other
832 "orderedness". The first is always ORDERED or UNORDERED if MODE
833 does not honor NaNs (which means that it can be skipped in that case;
834 see do_compare_rtx_and_jump).
836 The two conditions are written in *CODE1 and *CODE2. Return true if
837 the conditions must be ANDed, false if they must be ORed. */
840 split_comparison (enum rtx_code code
, enum machine_mode mode
,
841 enum rtx_code
*code1
, enum rtx_code
*code2
)
890 /* Do not turn a trapping comparison into a non-trapping one. */
891 if (HONOR_SNANS (mode
))
909 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
910 The decision as to signed or unsigned comparison must be made by the caller.
912 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
916 do_compare_rtx_and_jump (rtx op0
, rtx op1
, enum rtx_code code
, int unsignedp
,
917 enum machine_mode mode
, rtx size
, rtx if_false_label
,
918 rtx if_true_label
, int prob
)
921 rtx dummy_label
= NULL_RTX
;
923 /* Reverse the comparison if that is safe and we want to jump if it is
924 false. Also convert to the reverse comparison if the target can
927 || ! can_compare_p (code
, mode
, ccp_jump
))
928 && (! FLOAT_MODE_P (mode
)
929 || code
== ORDERED
|| code
== UNORDERED
930 || (! HONOR_NANS (mode
) && (code
== LTGT
|| code
== UNEQ
))
931 || (! HONOR_SNANS (mode
) && (code
== EQ
|| code
== NE
))))
934 if (FLOAT_MODE_P (mode
))
935 rcode
= reverse_condition_maybe_unordered (code
);
937 rcode
= reverse_condition (code
);
939 /* Canonicalize to UNORDERED for the libcall. */
940 if (can_compare_p (rcode
, mode
, ccp_jump
)
941 || (code
== ORDERED
&& ! can_compare_p (ORDERED
, mode
, ccp_jump
)))
944 if_true_label
= if_false_label
;
945 if_false_label
= tem
;
951 /* If one operand is constant, make it the second one. Only do this
952 if the other operand is not constant as well. */
954 if (swap_commutative_operands_p (op0
, op1
))
959 code
= swap_condition (code
);
962 do_pending_stack_adjust ();
964 code
= unsignedp
? unsigned_condition (code
) : code
;
965 if (0 != (tem
= simplify_relational_operation (code
, mode
, VOIDmode
,
968 if (CONSTANT_P (tem
))
970 rtx label
= (tem
== const0_rtx
|| tem
== CONST0_RTX (mode
))
971 ? if_false_label
: if_true_label
;
977 code
= GET_CODE (tem
);
978 mode
= GET_MODE (tem
);
981 unsignedp
= (code
== GTU
|| code
== LTU
|| code
== GEU
|| code
== LEU
);
985 dummy_label
= if_true_label
= gen_label_rtx ();
987 if (GET_MODE_CLASS (mode
) == MODE_INT
988 && ! can_compare_p (code
, mode
, ccp_jump
))
993 do_jump_by_parts_greater_rtx (mode
, 1, op1
, op0
,
994 if_false_label
, if_true_label
, prob
);
998 do_jump_by_parts_greater_rtx (mode
, 1, op0
, op1
,
999 if_true_label
, if_false_label
,
1004 do_jump_by_parts_greater_rtx (mode
, 1, op0
, op1
,
1005 if_false_label
, if_true_label
, prob
);
1009 do_jump_by_parts_greater_rtx (mode
, 1, op1
, op0
,
1010 if_true_label
, if_false_label
,
1015 do_jump_by_parts_greater_rtx (mode
, 0, op1
, op0
,
1016 if_false_label
, if_true_label
, prob
);
1020 do_jump_by_parts_greater_rtx (mode
, 0, op0
, op1
,
1021 if_true_label
, if_false_label
,
1026 do_jump_by_parts_greater_rtx (mode
, 0, op0
, op1
,
1027 if_false_label
, if_true_label
, prob
);
1031 do_jump_by_parts_greater_rtx (mode
, 0, op1
, op0
,
1032 if_true_label
, if_false_label
,
1037 do_jump_by_parts_equality_rtx (mode
, op0
, op1
, if_false_label
,
1038 if_true_label
, prob
);
1042 do_jump_by_parts_equality_rtx (mode
, op0
, op1
, if_true_label
,
1043 if_false_label
, inv (prob
));
1052 if (SCALAR_FLOAT_MODE_P (mode
)
1053 && ! can_compare_p (code
, mode
, ccp_jump
)
1054 && can_compare_p (swap_condition (code
), mode
, ccp_jump
))
1057 code
= swap_condition (code
);
1062 else if (SCALAR_FLOAT_MODE_P (mode
)
1063 && ! can_compare_p (code
, mode
, ccp_jump
)
1064 /* Never split ORDERED and UNORDERED.
1065 These must be implemented. */
1066 && (code
!= ORDERED
&& code
!= UNORDERED
)
1067 /* Split a floating-point comparison if
1068 we can jump on other conditions... */
1069 && (have_insn_for (COMPARE
, mode
)
1070 /* ... or if there is no libcall for it. */
1071 || code_to_optab (code
) == unknown_optab
))
1073 enum rtx_code first_code
;
1074 bool and_them
= split_comparison (code
, mode
, &first_code
, &code
);
1076 /* If there are no NaNs, the first comparison should always fall
1078 if (!HONOR_NANS (mode
))
1079 gcc_assert (first_code
== (and_them
? ORDERED
: UNORDERED
));
1086 /* If we only jump if true, just bypass the second jump. */
1087 if (! if_false_label
)
1090 dummy_label
= gen_label_rtx ();
1091 dest_label
= dummy_label
;
1094 dest_label
= if_false_label
;
1095 do_compare_rtx_and_jump (op0
, op1
, first_code
, unsignedp
, mode
,
1096 size
, dest_label
, NULL_RTX
, prob
);
1099 do_compare_rtx_and_jump (op0
, op1
, first_code
, unsignedp
, mode
,
1100 size
, NULL_RTX
, if_true_label
, prob
);
1104 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
,
1105 if_true_label
, prob
);
1109 emit_jump (if_false_label
);
1111 emit_label (dummy_label
);
1114 /* Generate code for a comparison expression EXP (including code to compute
1115 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
1116 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
1117 generated code will drop through.
1118 SIGNED_CODE should be the rtx operation for this comparison for
1119 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
1121 We force a stack adjustment unless there are currently
1122 things pushed on the stack that aren't yet used. */
1125 do_compare_and_jump (tree treeop0
, tree treeop1
, enum rtx_code signed_code
,
1126 enum rtx_code unsigned_code
, rtx if_false_label
,
1127 rtx if_true_label
, int prob
)
1131 enum machine_mode mode
;
1135 /* Don't crash if the comparison was erroneous. */
1136 op0
= expand_normal (treeop0
);
1137 if (TREE_CODE (treeop0
) == ERROR_MARK
)
1140 op1
= expand_normal (treeop1
);
1141 if (TREE_CODE (treeop1
) == ERROR_MARK
)
1144 type
= TREE_TYPE (treeop0
);
1145 mode
= TYPE_MODE (type
);
1146 if (TREE_CODE (treeop0
) == INTEGER_CST
1147 && (TREE_CODE (treeop1
) != INTEGER_CST
1148 || (GET_MODE_BITSIZE (mode
)
1149 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (treeop1
))))))
1151 /* op0 might have been replaced by promoted constant, in which
1152 case the type of second argument should be used. */
1153 type
= TREE_TYPE (treeop1
);
1154 mode
= TYPE_MODE (type
);
1156 unsignedp
= TYPE_UNSIGNED (type
);
1157 code
= unsignedp
? unsigned_code
: signed_code
;
1159 #ifdef HAVE_canonicalize_funcptr_for_compare
1160 /* If function pointers need to be "canonicalized" before they can
1161 be reliably compared, then canonicalize them.
1162 Only do this if *both* sides of the comparison are function pointers.
1163 If one side isn't, we want a noncanonicalized comparison. See PR
1164 middle-end/17564. */
1165 if (HAVE_canonicalize_funcptr_for_compare
1166 && TREE_CODE (TREE_TYPE (treeop0
)) == POINTER_TYPE
1167 && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop0
)))
1169 && TREE_CODE (TREE_TYPE (treeop1
)) == POINTER_TYPE
1170 && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop1
)))
1173 rtx new_op0
= gen_reg_rtx (mode
);
1174 rtx new_op1
= gen_reg_rtx (mode
);
1176 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
1179 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
1184 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
1186 ? expr_size (treeop0
) : NULL_RTX
),
1187 if_false_label
, if_true_label
, prob
);
1190 #include "gt-dojump.h"