1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "insn-config.h"
29 #include "insn-attr.h"
30 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
33 #include "langhooks.h"
35 #include "basic-block.h"
38 static bool prefer_and_bit_test (enum machine_mode
, int);
39 static void do_jump_by_parts_greater (tree
, tree
, int, rtx
, rtx
, int);
40 static void do_jump_by_parts_equality (tree
, tree
, rtx
, rtx
, int);
41 static void do_compare_and_jump (tree
, tree
, enum rtx_code
, enum rtx_code
, rtx
,
44 /* Invert probability if there is any. -1 stands for unknown. */
49 return prob
== -1 ? -1 : REG_BR_PROB_BASE
- prob
;
52 /* At the start of a function, record that we have no previously-pushed
53 arguments waiting to be popped. */
56 init_pending_stack_adjust (void)
58 pending_stack_adjust
= 0;
61 /* Discard any pending stack adjustment. This avoid relying on the
62 RTL optimizers to remove useless adjustments when we know the
63 stack pointer value is dead. */
65 discard_pending_stack_adjust (void)
67 stack_pointer_delta
-= pending_stack_adjust
;
68 pending_stack_adjust
= 0;
71 /* When exiting from function, if safe, clear out any pending stack adjust
72 so the adjustment won't get done.
74 Note, if the current function calls alloca, then it must have a
75 frame pointer regardless of the value of flag_omit_frame_pointer. */
78 clear_pending_stack_adjust (void)
81 && (! flag_omit_frame_pointer
|| cfun
->calls_alloca
)
83 discard_pending_stack_adjust ();
86 /* Pop any previously-pushed arguments that have not been popped yet. */
89 do_pending_stack_adjust (void)
91 if (inhibit_defer_pop
== 0)
93 if (pending_stack_adjust
!= 0)
94 adjust_stack (GEN_INT (pending_stack_adjust
));
95 pending_stack_adjust
= 0;
99 /* Expand conditional expressions. */
101 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
102 LABEL is an rtx of code CODE_LABEL, in this function and all the
106 jumpifnot (tree exp
, rtx label
, int prob
)
108 do_jump (exp
, label
, NULL_RTX
, inv (prob
));
112 jumpifnot_1 (enum tree_code code
, tree op0
, tree op1
, rtx label
, int prob
)
114 do_jump_1 (code
, op0
, op1
, label
, NULL_RTX
, inv (prob
));
117 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
120 jumpif (tree exp
, rtx label
, int prob
)
122 do_jump (exp
, NULL_RTX
, label
, prob
);
126 jumpif_1 (enum tree_code code
, tree op0
, tree op1
, rtx label
, int prob
)
128 do_jump_1 (code
, op0
, op1
, NULL_RTX
, label
, prob
);
131 /* Used internally by prefer_and_bit_test. */
133 static GTY(()) rtx and_reg
;
134 static GTY(()) rtx and_test
;
135 static GTY(()) rtx shift_test
;
137 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
138 where X is an arbitrary register of mode MODE. Return true if the former
142 prefer_and_bit_test (enum machine_mode mode
, int bitnum
)
148 /* Set up rtxes for the two variations. Use NULL as a placeholder
149 for the BITNUM-based constants. */
150 and_reg
= gen_rtx_REG (mode
, FIRST_PSEUDO_REGISTER
);
151 and_test
= gen_rtx_AND (mode
, and_reg
, NULL
);
152 shift_test
= gen_rtx_AND (mode
, gen_rtx_ASHIFTRT (mode
, and_reg
, NULL
),
157 /* Change the mode of the previously-created rtxes. */
158 PUT_MODE (and_reg
, mode
);
159 PUT_MODE (and_test
, mode
);
160 PUT_MODE (shift_test
, mode
);
161 PUT_MODE (XEXP (shift_test
, 0), mode
);
164 /* Fill in the integers. */
166 = immed_double_int_const (double_int_zero
.set_bit (bitnum
), mode
);
167 XEXP (XEXP (shift_test
, 0), 1) = GEN_INT (bitnum
);
169 speed_p
= optimize_insn_for_speed_p ();
170 return (rtx_cost (and_test
, IF_THEN_ELSE
, 0, speed_p
)
171 <= rtx_cost (shift_test
, IF_THEN_ELSE
, 0, speed_p
));
174 /* Subroutine of do_jump, dealing with exploded comparisons of the type
175 OP0 CODE OP1 . IF_FALSE_LABEL and IF_TRUE_LABEL like in do_jump.
176 PROB is probability of jump to if_true_label, or -1 if unknown. */
179 do_jump_1 (enum tree_code code
, tree op0
, tree op1
,
180 rtx if_false_label
, rtx if_true_label
, int prob
)
182 enum machine_mode mode
;
183 rtx drop_through_label
= 0;
189 tree inner_type
= TREE_TYPE (op0
);
191 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
192 != MODE_COMPLEX_FLOAT
);
193 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
194 != MODE_COMPLEX_INT
);
196 if (integer_zerop (op1
))
197 do_jump (op0
, if_true_label
, if_false_label
, inv (prob
));
198 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
199 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
200 do_jump_by_parts_equality (op0
, op1
, if_false_label
, if_true_label
,
203 do_compare_and_jump (op0
, op1
, EQ
, EQ
, if_false_label
, if_true_label
,
210 tree inner_type
= TREE_TYPE (op0
);
212 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
213 != MODE_COMPLEX_FLOAT
);
214 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
215 != MODE_COMPLEX_INT
);
217 if (integer_zerop (op1
))
218 do_jump (op0
, if_false_label
, if_true_label
, prob
);
219 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
220 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
221 do_jump_by_parts_equality (op0
, op1
, if_true_label
, if_false_label
,
224 do_compare_and_jump (op0
, op1
, NE
, NE
, if_false_label
, if_true_label
,
230 mode
= TYPE_MODE (TREE_TYPE (op0
));
231 if (GET_MODE_CLASS (mode
) == MODE_INT
232 && ! can_compare_p (LT
, mode
, ccp_jump
))
233 do_jump_by_parts_greater (op0
, op1
, 1, if_false_label
, if_true_label
,
236 do_compare_and_jump (op0
, op1
, LT
, LTU
, if_false_label
, if_true_label
,
241 mode
= TYPE_MODE (TREE_TYPE (op0
));
242 if (GET_MODE_CLASS (mode
) == MODE_INT
243 && ! can_compare_p (LE
, mode
, ccp_jump
))
244 do_jump_by_parts_greater (op0
, op1
, 0, if_true_label
, if_false_label
,
247 do_compare_and_jump (op0
, op1
, LE
, LEU
, if_false_label
, if_true_label
,
252 mode
= TYPE_MODE (TREE_TYPE (op0
));
253 if (GET_MODE_CLASS (mode
) == MODE_INT
254 && ! can_compare_p (GT
, mode
, ccp_jump
))
255 do_jump_by_parts_greater (op0
, op1
, 0, if_false_label
, if_true_label
,
258 do_compare_and_jump (op0
, op1
, GT
, GTU
, if_false_label
, if_true_label
,
263 mode
= TYPE_MODE (TREE_TYPE (op0
));
264 if (GET_MODE_CLASS (mode
) == MODE_INT
265 && ! can_compare_p (GE
, mode
, ccp_jump
))
266 do_jump_by_parts_greater (op0
, op1
, 1, if_true_label
, if_false_label
,
269 do_compare_and_jump (op0
, op1
, GE
, GEU
, if_false_label
, if_true_label
,
274 do_compare_and_jump (op0
, op1
, ORDERED
, ORDERED
,
275 if_false_label
, if_true_label
, prob
);
279 do_compare_and_jump (op0
, op1
, UNORDERED
, UNORDERED
,
280 if_false_label
, if_true_label
, prob
);
284 do_compare_and_jump (op0
, op1
, UNLT
, UNLT
, if_false_label
, if_true_label
,
289 do_compare_and_jump (op0
, op1
, UNLE
, UNLE
, if_false_label
, if_true_label
,
294 do_compare_and_jump (op0
, op1
, UNGT
, UNGT
, if_false_label
, if_true_label
,
299 do_compare_and_jump (op0
, op1
, UNGE
, UNGE
, if_false_label
, if_true_label
,
304 do_compare_and_jump (op0
, op1
, UNEQ
, UNEQ
, if_false_label
, if_true_label
,
309 do_compare_and_jump (op0
, op1
, LTGT
, LTGT
, if_false_label
, if_true_label
,
313 case TRUTH_ANDIF_EXPR
:
315 /* Spread the probability that the expression is false evenly between
316 the two conditions. So the first condition is false half the total
317 probability of being false. The second condition is false the other
318 half of the total probability of being false, so its jump has a false
319 probability of half the total, relative to the probability we
320 reached it (i.e. the first condition was true). */
325 int false_prob
= inv (prob
);
326 int op0_false_prob
= false_prob
/ 2;
327 int op1_false_prob
= GCOV_COMPUTE_SCALE ((false_prob
/ 2),
328 inv (op0_false_prob
));
329 /* Get the probability that each jump below is true. */
330 op0_prob
= inv (op0_false_prob
);
331 op1_prob
= inv (op1_false_prob
);
333 if (if_false_label
== NULL_RTX
)
335 drop_through_label
= gen_label_rtx ();
336 do_jump (op0
, drop_through_label
, NULL_RTX
, op0_prob
);
337 do_jump (op1
, NULL_RTX
, if_true_label
, op1_prob
);
341 do_jump (op0
, if_false_label
, NULL_RTX
, op0_prob
);
342 do_jump (op1
, if_false_label
, if_true_label
, op1_prob
);
347 case TRUTH_ORIF_EXPR
:
349 /* Spread the probability evenly between the two conditions. So
350 the first condition has half the total probability of being true.
351 The second condition has the other half of the total probability,
352 so its jump has a probability of half the total, relative to
353 the probability we reached it (i.e. the first condition was false). */
359 op1_prob
= GCOV_COMPUTE_SCALE ((prob
/ 2), inv (op0_prob
));
361 if (if_true_label
== NULL_RTX
)
363 drop_through_label
= gen_label_rtx ();
364 do_jump (op0
, NULL_RTX
, drop_through_label
, op0_prob
);
365 do_jump (op1
, if_false_label
, NULL_RTX
, op1_prob
);
369 do_jump (op0
, NULL_RTX
, if_true_label
, op0_prob
);
370 do_jump (op1
, if_false_label
, if_true_label
, op1_prob
);
379 if (drop_through_label
)
381 do_pending_stack_adjust ();
382 emit_label (drop_through_label
);
386 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
387 the result is zero, or IF_TRUE_LABEL if the result is one.
388 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
389 meaning fall through in that case.
391 do_jump always does any pending stack adjust except when it does not
392 actually perform a jump. An example where there is no jump
393 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
395 PROB is probability of jump to if_true_label, or -1 if unknown. */
398 do_jump (tree exp
, rtx if_false_label
, rtx if_true_label
, int prob
)
400 enum tree_code code
= TREE_CODE (exp
);
404 enum machine_mode mode
;
405 rtx drop_through_label
= 0;
413 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
419 /* This is not true with #pragma weak */
421 /* The address of something can never be zero. */
423 emit_jump (if_true_label
);
428 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
429 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
430 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
431 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_RANGE_REF
)
434 /* If we are narrowing the operand, we have to do the compare in the
436 if ((TYPE_PRECISION (TREE_TYPE (exp
))
437 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
439 case NON_LVALUE_EXPR
:
444 /* These cannot change zero->nonzero or vice versa. */
445 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
, prob
);
449 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
,
455 rtx label1
= gen_label_rtx ();
456 if (!if_true_label
|| !if_false_label
)
458 drop_through_label
= gen_label_rtx ();
460 if_true_label
= drop_through_label
;
462 if_false_label
= drop_through_label
;
465 do_pending_stack_adjust ();
466 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
, -1);
467 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
, prob
);
469 do_jump (TREE_OPERAND (exp
, 2), if_false_label
, if_true_label
, prob
);
474 /* Lowered by gimplify.c. */
478 /* Nonzero iff operands of minus differ. */
496 case TRUTH_ANDIF_EXPR
:
497 case TRUTH_ORIF_EXPR
:
499 do_jump_1 (code
, TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
500 if_false_label
, if_true_label
, prob
);
504 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
505 See if the former is preferred for jump tests and restore it
507 if (integer_onep (TREE_OPERAND (exp
, 1)))
509 tree exp0
= TREE_OPERAND (exp
, 0);
510 rtx set_label
, clr_label
;
511 int setclr_prob
= prob
;
513 /* Strip narrowing integral type conversions. */
514 while (CONVERT_EXPR_P (exp0
)
515 && TREE_OPERAND (exp0
, 0) != error_mark_node
516 && TYPE_PRECISION (TREE_TYPE (exp0
))
517 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0
, 0))))
518 exp0
= TREE_OPERAND (exp0
, 0);
520 /* "exp0 ^ 1" inverts the sense of the single bit test. */
521 if (TREE_CODE (exp0
) == BIT_XOR_EXPR
522 && integer_onep (TREE_OPERAND (exp0
, 1)))
524 exp0
= TREE_OPERAND (exp0
, 0);
525 clr_label
= if_true_label
;
526 set_label
= if_false_label
;
527 setclr_prob
= inv (prob
);
531 clr_label
= if_false_label
;
532 set_label
= if_true_label
;
535 if (TREE_CODE (exp0
) == RSHIFT_EXPR
)
537 tree arg
= TREE_OPERAND (exp0
, 0);
538 tree shift
= TREE_OPERAND (exp0
, 1);
539 tree argtype
= TREE_TYPE (arg
);
540 if (TREE_CODE (shift
) == INTEGER_CST
541 && compare_tree_int (shift
, 0) >= 0
542 && compare_tree_int (shift
, HOST_BITS_PER_WIDE_INT
) < 0
543 && prefer_and_bit_test (TYPE_MODE (argtype
),
544 TREE_INT_CST_LOW (shift
)))
546 unsigned HOST_WIDE_INT mask
547 = (unsigned HOST_WIDE_INT
) 1 << TREE_INT_CST_LOW (shift
);
548 do_jump (build2 (BIT_AND_EXPR
, argtype
, arg
,
549 build_int_cstu (argtype
, mask
)),
550 clr_label
, set_label
, setclr_prob
);
556 /* If we are AND'ing with a small constant, do this comparison in the
557 smallest type that fits. If the machine doesn't have comparisons
558 that small, it will be converted back to the wider comparison.
559 This helps if we are testing the sign bit of a narrower object.
560 combine can't do this for us because it can't know whether a
561 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
563 if (! SLOW_BYTE_ACCESS
564 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
565 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
566 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
567 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
568 && (type
= lang_hooks
.types
.type_for_mode (mode
, 1)) != 0
569 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
570 && have_insn_for (COMPARE
, TYPE_MODE (type
)))
572 do_jump (fold_convert (type
, exp
), if_false_label
, if_true_label
,
577 if (TYPE_PRECISION (TREE_TYPE (exp
)) > 1
578 || TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
581 /* Boolean comparisons can be compiled as TRUTH_AND_EXPR. */
584 /* High branch cost, expand as the bitwise AND of the conditions.
585 Do the same if the RHS has side effects, because we're effectively
586 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */
587 if (BRANCH_COST (optimize_insn_for_speed_p (),
589 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
591 code
= TRUTH_ANDIF_EXPR
;
596 /* High branch cost, expand as the bitwise OR of the conditions.
597 Do the same if the RHS has side effects, because we're effectively
598 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */
599 if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4
600 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
602 code
= TRUTH_ORIF_EXPR
;
605 /* Fall through and generate the normal code. */
608 temp
= expand_normal (exp
);
609 do_pending_stack_adjust ();
610 /* The RTL optimizers prefer comparisons against pseudos. */
611 if (GET_CODE (temp
) == SUBREG
)
613 /* Compare promoted variables in their promoted mode. */
614 if (SUBREG_PROMOTED_VAR_P (temp
)
615 && REG_P (XEXP (temp
, 0)))
616 temp
= XEXP (temp
, 0);
618 temp
= copy_to_reg (temp
);
620 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
621 NE
, TYPE_UNSIGNED (TREE_TYPE (exp
)),
622 GET_MODE (temp
), NULL_RTX
,
623 if_false_label
, if_true_label
, prob
);
626 if (drop_through_label
)
628 do_pending_stack_adjust ();
629 emit_label (drop_through_label
);
633 /* Compare OP0 with OP1, word at a time, in mode MODE.
634 UNSIGNEDP says to do unsigned comparison.
635 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
638 do_jump_by_parts_greater_rtx (enum machine_mode mode
, int unsignedp
, rtx op0
,
639 rtx op1
, rtx if_false_label
, rtx if_true_label
,
642 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
643 rtx drop_through_label
= 0;
644 bool drop_through_if_true
= false, drop_through_if_false
= false;
645 enum rtx_code code
= GT
;
648 if (! if_true_label
|| ! if_false_label
)
649 drop_through_label
= gen_label_rtx ();
652 if_true_label
= drop_through_label
;
653 drop_through_if_true
= true;
655 if (! if_false_label
)
657 if_false_label
= drop_through_label
;
658 drop_through_if_false
= true;
661 /* Deal with the special case 0 > x: only one comparison is necessary and
662 we reverse it to avoid jumping to the drop-through label. */
663 if (op0
== const0_rtx
&& drop_through_if_true
&& !drop_through_if_false
)
666 if_true_label
= if_false_label
;
667 if_false_label
= drop_through_label
;
668 drop_through_if_true
= false;
669 drop_through_if_false
= true;
672 /* Compare a word at a time, high order first. */
673 for (i
= 0; i
< nwords
; i
++)
675 rtx op0_word
, op1_word
;
677 if (WORDS_BIG_ENDIAN
)
679 op0_word
= operand_subword_force (op0
, i
, mode
);
680 op1_word
= operand_subword_force (op1
, i
, mode
);
684 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
685 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
688 /* All but high-order word must be compared as unsigned. */
689 do_compare_rtx_and_jump (op0_word
, op1_word
, code
, (unsignedp
|| i
> 0),
690 word_mode
, NULL_RTX
, NULL_RTX
, if_true_label
,
693 /* Emit only one comparison for 0. Do not emit the last cond jump. */
694 if (op0
== const0_rtx
|| i
== nwords
- 1)
697 /* Consider lower words only if these are equal. */
698 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
699 NULL_RTX
, NULL_RTX
, if_false_label
, inv (prob
));
702 if (!drop_through_if_false
)
703 emit_jump (if_false_label
);
704 if (drop_through_label
)
705 emit_label (drop_through_label
);
708 /* Given a comparison expression EXP for values too wide to be compared
709 with one insn, test the comparison and jump to the appropriate label.
710 The code of EXP is ignored; we always test GT if SWAP is 0,
711 and LT if SWAP is 1. */
714 do_jump_by_parts_greater (tree treeop0
, tree treeop1
, int swap
,
715 rtx if_false_label
, rtx if_true_label
, int prob
)
717 rtx op0
= expand_normal (swap
? treeop1
: treeop0
);
718 rtx op1
= expand_normal (swap
? treeop0
: treeop1
);
719 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (treeop0
));
720 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (treeop0
));
722 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
,
723 if_true_label
, prob
);
726 /* Jump according to whether OP0 is 0. We assume that OP0 has an integer
727 mode, MODE, that is too wide for the available compare insns. Either
728 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
729 to indicate drop through. */
732 do_jump_by_parts_zero_rtx (enum machine_mode mode
, rtx op0
,
733 rtx if_false_label
, rtx if_true_label
, int prob
)
735 int nwords
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
738 rtx drop_through_label
= 0;
740 /* The fastest way of doing this comparison on almost any machine is to
741 "or" all the words and compare the result. If all have to be loaded
742 from memory and this is a very wide item, it's possible this may
743 be slower, but that's highly unlikely. */
745 part
= gen_reg_rtx (word_mode
);
746 emit_move_insn (part
, operand_subword_force (op0
, 0, mode
));
747 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
748 part
= expand_binop (word_mode
, ior_optab
, part
,
749 operand_subword_force (op0
, i
, mode
),
750 part
, 1, OPTAB_WIDEN
);
754 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
755 NULL_RTX
, if_false_label
, if_true_label
, prob
);
759 /* If we couldn't do the "or" simply, do this with a series of compares. */
760 if (! if_false_label
)
761 drop_through_label
= if_false_label
= gen_label_rtx ();
763 for (i
= 0; i
< nwords
; i
++)
764 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
765 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
,
766 if_false_label
, NULL_RTX
, prob
);
769 emit_jump (if_true_label
);
771 if (drop_through_label
)
772 emit_label (drop_through_label
);
775 /* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
776 where MODE is an integer mode too wide to be compared with one insn.
777 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
778 to indicate drop through. */
781 do_jump_by_parts_equality_rtx (enum machine_mode mode
, rtx op0
, rtx op1
,
782 rtx if_false_label
, rtx if_true_label
, int prob
)
784 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
785 rtx drop_through_label
= 0;
788 if (op1
== const0_rtx
)
790 do_jump_by_parts_zero_rtx (mode
, op0
, if_false_label
, if_true_label
,
794 else if (op0
== const0_rtx
)
796 do_jump_by_parts_zero_rtx (mode
, op1
, if_false_label
, if_true_label
,
801 if (! if_false_label
)
802 drop_through_label
= if_false_label
= gen_label_rtx ();
804 for (i
= 0; i
< nwords
; i
++)
805 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
806 operand_subword_force (op1
, i
, mode
),
807 EQ
, 0, word_mode
, NULL_RTX
,
808 if_false_label
, NULL_RTX
, prob
);
811 emit_jump (if_true_label
);
812 if (drop_through_label
)
813 emit_label (drop_through_label
);
816 /* Given an EQ_EXPR expression EXP for values too wide to be compared
817 with one insn, test the comparison and jump to the appropriate label. */
820 do_jump_by_parts_equality (tree treeop0
, tree treeop1
, rtx if_false_label
,
821 rtx if_true_label
, int prob
)
823 rtx op0
= expand_normal (treeop0
);
824 rtx op1
= expand_normal (treeop1
);
825 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (treeop0
));
826 do_jump_by_parts_equality_rtx (mode
, op0
, op1
, if_false_label
,
827 if_true_label
, prob
);
830 /* Split a comparison into two others, the second of which has the other
831 "orderedness". The first is always ORDERED or UNORDERED if MODE
832 does not honor NaNs (which means that it can be skipped in that case;
833 see do_compare_rtx_and_jump).
835 The two conditions are written in *CODE1 and *CODE2. Return true if
836 the conditions must be ANDed, false if they must be ORed. */
839 split_comparison (enum rtx_code code
, enum machine_mode mode
,
840 enum rtx_code
*code1
, enum rtx_code
*code2
)
889 /* Do not turn a trapping comparison into a non-trapping one. */
890 if (HONOR_SNANS (mode
))
908 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
909 The decision as to signed or unsigned comparison must be made by the caller.
911 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
915 do_compare_rtx_and_jump (rtx op0
, rtx op1
, enum rtx_code code
, int unsignedp
,
916 enum machine_mode mode
, rtx size
, rtx if_false_label
,
917 rtx if_true_label
, int prob
)
920 rtx dummy_label
= NULL_RTX
;
922 /* Reverse the comparison if that is safe and we want to jump if it is
923 false. Also convert to the reverse comparison if the target can
926 || ! can_compare_p (code
, mode
, ccp_jump
))
927 && (! FLOAT_MODE_P (mode
)
928 || code
== ORDERED
|| code
== UNORDERED
929 || (! HONOR_NANS (mode
) && (code
== LTGT
|| code
== UNEQ
))
930 || (! HONOR_SNANS (mode
) && (code
== EQ
|| code
== NE
))))
933 if (FLOAT_MODE_P (mode
))
934 rcode
= reverse_condition_maybe_unordered (code
);
936 rcode
= reverse_condition (code
);
938 /* Canonicalize to UNORDERED for the libcall. */
939 if (can_compare_p (rcode
, mode
, ccp_jump
)
940 || (code
== ORDERED
&& ! can_compare_p (ORDERED
, mode
, ccp_jump
)))
943 if_true_label
= if_false_label
;
944 if_false_label
= tem
;
950 /* If one operand is constant, make it the second one. Only do this
951 if the other operand is not constant as well. */
953 if (swap_commutative_operands_p (op0
, op1
))
958 code
= swap_condition (code
);
961 do_pending_stack_adjust ();
963 code
= unsignedp
? unsigned_condition (code
) : code
;
964 if (0 != (tem
= simplify_relational_operation (code
, mode
, VOIDmode
,
967 if (CONSTANT_P (tem
))
969 rtx label
= (tem
== const0_rtx
|| tem
== CONST0_RTX (mode
))
970 ? if_false_label
: if_true_label
;
976 code
= GET_CODE (tem
);
977 mode
= GET_MODE (tem
);
980 unsignedp
= (code
== GTU
|| code
== LTU
|| code
== GEU
|| code
== LEU
);
984 dummy_label
= if_true_label
= gen_label_rtx ();
986 if (GET_MODE_CLASS (mode
) == MODE_INT
987 && ! can_compare_p (code
, mode
, ccp_jump
))
992 do_jump_by_parts_greater_rtx (mode
, 1, op1
, op0
,
993 if_false_label
, if_true_label
, prob
);
997 do_jump_by_parts_greater_rtx (mode
, 1, op0
, op1
,
998 if_true_label
, if_false_label
,
1003 do_jump_by_parts_greater_rtx (mode
, 1, op0
, op1
,
1004 if_false_label
, if_true_label
, prob
);
1008 do_jump_by_parts_greater_rtx (mode
, 1, op1
, op0
,
1009 if_true_label
, if_false_label
,
1014 do_jump_by_parts_greater_rtx (mode
, 0, op1
, op0
,
1015 if_false_label
, if_true_label
, prob
);
1019 do_jump_by_parts_greater_rtx (mode
, 0, op0
, op1
,
1020 if_true_label
, if_false_label
,
1025 do_jump_by_parts_greater_rtx (mode
, 0, op0
, op1
,
1026 if_false_label
, if_true_label
, prob
);
1030 do_jump_by_parts_greater_rtx (mode
, 0, op1
, op0
,
1031 if_true_label
, if_false_label
,
1036 do_jump_by_parts_equality_rtx (mode
, op0
, op1
, if_false_label
,
1037 if_true_label
, prob
);
1041 do_jump_by_parts_equality_rtx (mode
, op0
, op1
, if_true_label
,
1042 if_false_label
, inv (prob
));
1051 if (SCALAR_FLOAT_MODE_P (mode
)
1052 && ! can_compare_p (code
, mode
, ccp_jump
)
1053 && can_compare_p (swap_condition (code
), mode
, ccp_jump
))
1056 code
= swap_condition (code
);
1061 else if (SCALAR_FLOAT_MODE_P (mode
)
1062 && ! can_compare_p (code
, mode
, ccp_jump
)
1063 /* Never split ORDERED and UNORDERED.
1064 These must be implemented. */
1065 && (code
!= ORDERED
&& code
!= UNORDERED
)
1066 /* Split a floating-point comparison if
1067 we can jump on other conditions... */
1068 && (have_insn_for (COMPARE
, mode
)
1069 /* ... or if there is no libcall for it. */
1070 || code_to_optab (code
) == unknown_optab
))
1072 enum rtx_code first_code
;
1073 bool and_them
= split_comparison (code
, mode
, &first_code
, &code
);
1075 /* If there are no NaNs, the first comparison should always fall
1077 if (!HONOR_NANS (mode
))
1078 gcc_assert (first_code
== (and_them
? ORDERED
: UNORDERED
));
1085 /* If we only jump if true, just bypass the second jump. */
1086 if (! if_false_label
)
1089 dummy_label
= gen_label_rtx ();
1090 dest_label
= dummy_label
;
1093 dest_label
= if_false_label
;
1094 do_compare_rtx_and_jump (op0
, op1
, first_code
, unsignedp
, mode
,
1095 size
, dest_label
, NULL_RTX
, prob
);
1098 do_compare_rtx_and_jump (op0
, op1
, first_code
, unsignedp
, mode
,
1099 size
, NULL_RTX
, if_true_label
, prob
);
1103 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
,
1104 if_true_label
, prob
);
1108 emit_jump (if_false_label
);
1110 emit_label (dummy_label
);
1113 /* Generate code for a comparison expression EXP (including code to compute
1114 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
1115 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
1116 generated code will drop through.
1117 SIGNED_CODE should be the rtx operation for this comparison for
1118 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
1120 We force a stack adjustment unless there are currently
1121 things pushed on the stack that aren't yet used. */
1124 do_compare_and_jump (tree treeop0
, tree treeop1
, enum rtx_code signed_code
,
1125 enum rtx_code unsigned_code
, rtx if_false_label
,
1126 rtx if_true_label
, int prob
)
1130 enum machine_mode mode
;
1134 /* Don't crash if the comparison was erroneous. */
1135 op0
= expand_normal (treeop0
);
1136 if (TREE_CODE (treeop0
) == ERROR_MARK
)
1139 op1
= expand_normal (treeop1
);
1140 if (TREE_CODE (treeop1
) == ERROR_MARK
)
1143 type
= TREE_TYPE (treeop0
);
1144 mode
= TYPE_MODE (type
);
1145 if (TREE_CODE (treeop0
) == INTEGER_CST
1146 && (TREE_CODE (treeop1
) != INTEGER_CST
1147 || (GET_MODE_BITSIZE (mode
)
1148 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (treeop1
))))))
1150 /* op0 might have been replaced by promoted constant, in which
1151 case the type of second argument should be used. */
1152 type
= TREE_TYPE (treeop1
);
1153 mode
= TYPE_MODE (type
);
1155 unsignedp
= TYPE_UNSIGNED (type
);
1156 code
= unsignedp
? unsigned_code
: signed_code
;
1158 #ifdef HAVE_canonicalize_funcptr_for_compare
1159 /* If function pointers need to be "canonicalized" before they can
1160 be reliably compared, then canonicalize them.
1161 Only do this if *both* sides of the comparison are function pointers.
1162 If one side isn't, we want a noncanonicalized comparison. See PR
1163 middle-end/17564. */
1164 if (HAVE_canonicalize_funcptr_for_compare
1165 && TREE_CODE (TREE_TYPE (treeop0
)) == POINTER_TYPE
1166 && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop0
)))
1168 && TREE_CODE (TREE_TYPE (treeop1
)) == POINTER_TYPE
1169 && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop1
)))
1172 rtx new_op0
= gen_reg_rtx (mode
);
1173 rtx new_op1
= gen_reg_rtx (mode
);
1175 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
1178 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
1183 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
1185 ? expr_size (treeop0
) : NULL_RTX
),
1186 if_false_label
, if_true_label
, prob
);
1189 #include "gt-dojump.h"