1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "insn-config.h"
29 #include "insn-attr.h"
30 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
33 #include "langhooks.h"
35 #include "basic-block.h"
38 static bool prefer_and_bit_test (enum machine_mode
, int);
39 static void do_jump_by_parts_greater (tree
, tree
, int, rtx
, rtx
, int);
40 static void do_jump_by_parts_equality (tree
, tree
, rtx
, rtx
, int);
41 static void do_compare_and_jump (tree
, tree
, enum rtx_code
, enum rtx_code
, rtx
,
44 /* Invert probability if there is any. -1 stands for unknown. */
49 return prob
== -1 ? -1 : REG_BR_PROB_BASE
- prob
;
52 /* At the start of a function, record that we have no previously-pushed
53 arguments waiting to be popped. */
56 init_pending_stack_adjust (void)
58 pending_stack_adjust
= 0;
61 /* Discard any pending stack adjustment. This avoid relying on the
62 RTL optimizers to remove useless adjustments when we know the
63 stack pointer value is dead. */
65 discard_pending_stack_adjust (void)
67 stack_pointer_delta
-= pending_stack_adjust
;
68 pending_stack_adjust
= 0;
71 /* When exiting from function, if safe, clear out any pending stack adjust
72 so the adjustment won't get done.
74 Note, if the current function calls alloca, then it must have a
75 frame pointer regardless of the value of flag_omit_frame_pointer. */
78 clear_pending_stack_adjust (void)
81 && (! flag_omit_frame_pointer
|| cfun
->calls_alloca
)
83 discard_pending_stack_adjust ();
86 /* Pop any previously-pushed arguments that have not been popped yet. */
89 do_pending_stack_adjust (void)
91 if (inhibit_defer_pop
== 0)
93 if (pending_stack_adjust
!= 0)
94 adjust_stack (GEN_INT (pending_stack_adjust
));
95 pending_stack_adjust
= 0;
99 /* Expand conditional expressions. */
101 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
102 LABEL is an rtx of code CODE_LABEL, in this function and all the
106 jumpifnot (tree exp
, rtx label
, int prob
)
108 do_jump (exp
, label
, NULL_RTX
, inv (prob
));
112 jumpifnot_1 (enum tree_code code
, tree op0
, tree op1
, rtx label
, int prob
)
114 do_jump_1 (code
, op0
, op1
, label
, NULL_RTX
, inv (prob
));
117 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
120 jumpif (tree exp
, rtx label
, int prob
)
122 do_jump (exp
, NULL_RTX
, label
, prob
);
126 jumpif_1 (enum tree_code code
, tree op0
, tree op1
, rtx label
, int prob
)
128 do_jump_1 (code
, op0
, op1
, NULL_RTX
, label
, prob
);
131 /* Used internally by prefer_and_bit_test. */
133 static GTY(()) rtx and_reg
;
134 static GTY(()) rtx and_test
;
135 static GTY(()) rtx shift_test
;
137 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
138 where X is an arbitrary register of mode MODE. Return true if the former
142 prefer_and_bit_test (enum machine_mode mode
, int bitnum
)
148 /* Set up rtxes for the two variations. Use NULL as a placeholder
149 for the BITNUM-based constants. */
150 and_reg
= gen_rtx_REG (mode
, FIRST_PSEUDO_REGISTER
);
151 and_test
= gen_rtx_AND (mode
, and_reg
, NULL
);
152 shift_test
= gen_rtx_AND (mode
, gen_rtx_ASHIFTRT (mode
, and_reg
, NULL
),
157 /* Change the mode of the previously-created rtxes. */
158 PUT_MODE (and_reg
, mode
);
159 PUT_MODE (and_test
, mode
);
160 PUT_MODE (shift_test
, mode
);
161 PUT_MODE (XEXP (shift_test
, 0), mode
);
164 /* Fill in the integers. */
166 = immed_double_int_const (double_int_zero
.set_bit (bitnum
), mode
);
167 XEXP (XEXP (shift_test
, 0), 1) = GEN_INT (bitnum
);
169 speed_p
= optimize_insn_for_speed_p ();
170 return (rtx_cost (and_test
, IF_THEN_ELSE
, 0, speed_p
)
171 <= rtx_cost (shift_test
, IF_THEN_ELSE
, 0, speed_p
));
174 /* Subroutine of do_jump, dealing with exploded comparisons of the type
175 OP0 CODE OP1 . IF_FALSE_LABEL and IF_TRUE_LABEL like in do_jump.
176 PROB is probability of jump to if_true_label, or -1 if unknown. */
179 do_jump_1 (enum tree_code code
, tree op0
, tree op1
,
180 rtx if_false_label
, rtx if_true_label
, int prob
)
182 enum machine_mode mode
;
183 rtx drop_through_label
= 0;
189 tree inner_type
= TREE_TYPE (op0
);
191 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
192 != MODE_COMPLEX_FLOAT
);
193 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
194 != MODE_COMPLEX_INT
);
196 if (integer_zerop (op1
))
197 do_jump (op0
, if_true_label
, if_false_label
, inv (prob
));
198 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
199 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
200 do_jump_by_parts_equality (op0
, op1
, if_false_label
, if_true_label
,
203 do_compare_and_jump (op0
, op1
, EQ
, EQ
, if_false_label
, if_true_label
,
210 tree inner_type
= TREE_TYPE (op0
);
212 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
213 != MODE_COMPLEX_FLOAT
);
214 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
215 != MODE_COMPLEX_INT
);
217 if (integer_zerop (op1
))
218 do_jump (op0
, if_false_label
, if_true_label
, prob
);
219 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
220 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
221 do_jump_by_parts_equality (op0
, op1
, if_true_label
, if_false_label
,
224 do_compare_and_jump (op0
, op1
, NE
, NE
, if_false_label
, if_true_label
,
230 mode
= TYPE_MODE (TREE_TYPE (op0
));
231 if (GET_MODE_CLASS (mode
) == MODE_INT
232 && ! can_compare_p (LT
, mode
, ccp_jump
))
233 do_jump_by_parts_greater (op0
, op1
, 1, if_false_label
, if_true_label
,
236 do_compare_and_jump (op0
, op1
, LT
, LTU
, if_false_label
, if_true_label
,
241 mode
= TYPE_MODE (TREE_TYPE (op0
));
242 if (GET_MODE_CLASS (mode
) == MODE_INT
243 && ! can_compare_p (LE
, mode
, ccp_jump
))
244 do_jump_by_parts_greater (op0
, op1
, 0, if_true_label
, if_false_label
,
247 do_compare_and_jump (op0
, op1
, LE
, LEU
, if_false_label
, if_true_label
,
252 mode
= TYPE_MODE (TREE_TYPE (op0
));
253 if (GET_MODE_CLASS (mode
) == MODE_INT
254 && ! can_compare_p (GT
, mode
, ccp_jump
))
255 do_jump_by_parts_greater (op0
, op1
, 0, if_false_label
, if_true_label
,
258 do_compare_and_jump (op0
, op1
, GT
, GTU
, if_false_label
, if_true_label
,
263 mode
= TYPE_MODE (TREE_TYPE (op0
));
264 if (GET_MODE_CLASS (mode
) == MODE_INT
265 && ! can_compare_p (GE
, mode
, ccp_jump
))
266 do_jump_by_parts_greater (op0
, op1
, 1, if_true_label
, if_false_label
,
269 do_compare_and_jump (op0
, op1
, GE
, GEU
, if_false_label
, if_true_label
,
274 do_compare_and_jump (op0
, op1
, ORDERED
, ORDERED
,
275 if_false_label
, if_true_label
, prob
);
279 do_compare_and_jump (op0
, op1
, UNORDERED
, UNORDERED
,
280 if_false_label
, if_true_label
, prob
);
284 do_compare_and_jump (op0
, op1
, UNLT
, UNLT
, if_false_label
, if_true_label
,
289 do_compare_and_jump (op0
, op1
, UNLE
, UNLE
, if_false_label
, if_true_label
,
294 do_compare_and_jump (op0
, op1
, UNGT
, UNGT
, if_false_label
, if_true_label
,
299 do_compare_and_jump (op0
, op1
, UNGE
, UNGE
, if_false_label
, if_true_label
,
304 do_compare_and_jump (op0
, op1
, UNEQ
, UNEQ
, if_false_label
, if_true_label
,
309 do_compare_and_jump (op0
, op1
, LTGT
, LTGT
, if_false_label
, if_true_label
,
313 case TRUTH_ANDIF_EXPR
:
314 if (if_false_label
== NULL_RTX
)
316 drop_through_label
= gen_label_rtx ();
317 do_jump (op0
, drop_through_label
, NULL_RTX
, prob
);
318 do_jump (op1
, NULL_RTX
, if_true_label
, prob
);
322 do_jump (op0
, if_false_label
, NULL_RTX
, prob
);
323 do_jump (op1
, if_false_label
, if_true_label
, prob
);
327 case TRUTH_ORIF_EXPR
:
328 if (if_true_label
== NULL_RTX
)
330 drop_through_label
= gen_label_rtx ();
331 do_jump (op0
, NULL_RTX
, drop_through_label
, prob
);
332 do_jump (op1
, if_false_label
, NULL_RTX
, prob
);
336 do_jump (op0
, NULL_RTX
, if_true_label
, prob
);
337 do_jump (op1
, if_false_label
, if_true_label
, prob
);
345 if (drop_through_label
)
347 do_pending_stack_adjust ();
348 emit_label (drop_through_label
);
352 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
353 the result is zero, or IF_TRUE_LABEL if the result is one.
354 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
355 meaning fall through in that case.
357 do_jump always does any pending stack adjust except when it does not
358 actually perform a jump. An example where there is no jump
359 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
361 PROB is probability of jump to if_true_label, or -1 if unknown. */
364 do_jump (tree exp
, rtx if_false_label
, rtx if_true_label
, int prob
)
366 enum tree_code code
= TREE_CODE (exp
);
370 enum machine_mode mode
;
371 rtx drop_through_label
= 0;
379 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
385 /* This is not true with #pragma weak */
387 /* The address of something can never be zero. */
389 emit_jump (if_true_label
);
394 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
395 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
396 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
397 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_RANGE_REF
)
400 /* If we are narrowing the operand, we have to do the compare in the
402 if ((TYPE_PRECISION (TREE_TYPE (exp
))
403 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
405 case NON_LVALUE_EXPR
:
410 /* These cannot change zero->nonzero or vice versa. */
411 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
, prob
);
415 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
,
421 rtx label1
= gen_label_rtx ();
422 if (!if_true_label
|| !if_false_label
)
424 drop_through_label
= gen_label_rtx ();
426 if_true_label
= drop_through_label
;
428 if_false_label
= drop_through_label
;
431 do_pending_stack_adjust ();
432 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
, -1);
433 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
, prob
);
435 do_jump (TREE_OPERAND (exp
, 2), if_false_label
, if_true_label
, prob
);
440 /* Lowered by gimplify.c. */
444 /* Nonzero iff operands of minus differ. */
462 case TRUTH_ANDIF_EXPR
:
463 case TRUTH_ORIF_EXPR
:
465 do_jump_1 (code
, TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
466 if_false_label
, if_true_label
, prob
);
470 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
471 See if the former is preferred for jump tests and restore it
473 if (integer_onep (TREE_OPERAND (exp
, 1)))
475 tree exp0
= TREE_OPERAND (exp
, 0);
476 rtx set_label
, clr_label
;
477 int setclr_prob
= prob
;
479 /* Strip narrowing integral type conversions. */
480 while (CONVERT_EXPR_P (exp0
)
481 && TREE_OPERAND (exp0
, 0) != error_mark_node
482 && TYPE_PRECISION (TREE_TYPE (exp0
))
483 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0
, 0))))
484 exp0
= TREE_OPERAND (exp0
, 0);
486 /* "exp0 ^ 1" inverts the sense of the single bit test. */
487 if (TREE_CODE (exp0
) == BIT_XOR_EXPR
488 && integer_onep (TREE_OPERAND (exp0
, 1)))
490 exp0
= TREE_OPERAND (exp0
, 0);
491 clr_label
= if_true_label
;
492 set_label
= if_false_label
;
493 setclr_prob
= inv (prob
);
497 clr_label
= if_false_label
;
498 set_label
= if_true_label
;
501 if (TREE_CODE (exp0
) == RSHIFT_EXPR
)
503 tree arg
= TREE_OPERAND (exp0
, 0);
504 tree shift
= TREE_OPERAND (exp0
, 1);
505 tree argtype
= TREE_TYPE (arg
);
506 if (TREE_CODE (shift
) == INTEGER_CST
507 && compare_tree_int (shift
, 0) >= 0
508 && compare_tree_int (shift
, HOST_BITS_PER_WIDE_INT
) < 0
509 && prefer_and_bit_test (TYPE_MODE (argtype
),
510 TREE_INT_CST_LOW (shift
)))
512 unsigned HOST_WIDE_INT mask
513 = (unsigned HOST_WIDE_INT
) 1 << TREE_INT_CST_LOW (shift
);
514 do_jump (build2 (BIT_AND_EXPR
, argtype
, arg
,
515 build_int_cstu (argtype
, mask
)),
516 clr_label
, set_label
, setclr_prob
);
522 /* If we are AND'ing with a small constant, do this comparison in the
523 smallest type that fits. If the machine doesn't have comparisons
524 that small, it will be converted back to the wider comparison.
525 This helps if we are testing the sign bit of a narrower object.
526 combine can't do this for us because it can't know whether a
527 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
529 if (! SLOW_BYTE_ACCESS
530 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
531 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
532 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
533 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
534 && (type
= lang_hooks
.types
.type_for_mode (mode
, 1)) != 0
535 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
536 && have_insn_for (COMPARE
, TYPE_MODE (type
)))
538 do_jump (fold_convert (type
, exp
), if_false_label
, if_true_label
,
543 if (TYPE_PRECISION (TREE_TYPE (exp
)) > 1
544 || TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
547 /* Boolean comparisons can be compiled as TRUTH_AND_EXPR. */
550 /* High branch cost, expand as the bitwise AND of the conditions.
551 Do the same if the RHS has side effects, because we're effectively
552 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */
553 if (BRANCH_COST (optimize_insn_for_speed_p (),
555 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
557 code
= TRUTH_ANDIF_EXPR
;
562 /* High branch cost, expand as the bitwise OR of the conditions.
563 Do the same if the RHS has side effects, because we're effectively
564 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */
565 if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4
566 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
568 code
= TRUTH_ORIF_EXPR
;
571 /* Fall through and generate the normal code. */
574 temp
= expand_normal (exp
);
575 do_pending_stack_adjust ();
576 /* The RTL optimizers prefer comparisons against pseudos. */
577 if (GET_CODE (temp
) == SUBREG
)
579 /* Compare promoted variables in their promoted mode. */
580 if (SUBREG_PROMOTED_VAR_P (temp
)
581 && REG_P (XEXP (temp
, 0)))
582 temp
= XEXP (temp
, 0);
584 temp
= copy_to_reg (temp
);
586 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
587 NE
, TYPE_UNSIGNED (TREE_TYPE (exp
)),
588 GET_MODE (temp
), NULL_RTX
,
589 if_false_label
, if_true_label
, prob
);
592 if (drop_through_label
)
594 do_pending_stack_adjust ();
595 emit_label (drop_through_label
);
599 /* Compare OP0 with OP1, word at a time, in mode MODE.
600 UNSIGNEDP says to do unsigned comparison.
601 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
604 do_jump_by_parts_greater_rtx (enum machine_mode mode
, int unsignedp
, rtx op0
,
605 rtx op1
, rtx if_false_label
, rtx if_true_label
,
608 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
609 rtx drop_through_label
= 0;
610 bool drop_through_if_true
= false, drop_through_if_false
= false;
611 enum rtx_code code
= GT
;
614 if (! if_true_label
|| ! if_false_label
)
615 drop_through_label
= gen_label_rtx ();
618 if_true_label
= drop_through_label
;
619 drop_through_if_true
= true;
621 if (! if_false_label
)
623 if_false_label
= drop_through_label
;
624 drop_through_if_false
= true;
627 /* Deal with the special case 0 > x: only one comparison is necessary and
628 we reverse it to avoid jumping to the drop-through label. */
629 if (op0
== const0_rtx
&& drop_through_if_true
&& !drop_through_if_false
)
632 if_true_label
= if_false_label
;
633 if_false_label
= drop_through_label
;
634 drop_through_if_true
= false;
635 drop_through_if_false
= true;
638 /* Compare a word at a time, high order first. */
639 for (i
= 0; i
< nwords
; i
++)
641 rtx op0_word
, op1_word
;
643 if (WORDS_BIG_ENDIAN
)
645 op0_word
= operand_subword_force (op0
, i
, mode
);
646 op1_word
= operand_subword_force (op1
, i
, mode
);
650 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
651 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
654 /* All but high-order word must be compared as unsigned. */
655 do_compare_rtx_and_jump (op0_word
, op1_word
, code
, (unsignedp
|| i
> 0),
656 word_mode
, NULL_RTX
, NULL_RTX
, if_true_label
,
659 /* Emit only one comparison for 0. Do not emit the last cond jump. */
660 if (op0
== const0_rtx
|| i
== nwords
- 1)
663 /* Consider lower words only if these are equal. */
664 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
665 NULL_RTX
, NULL_RTX
, if_false_label
, inv (prob
));
668 if (!drop_through_if_false
)
669 emit_jump (if_false_label
);
670 if (drop_through_label
)
671 emit_label (drop_through_label
);
674 /* Given a comparison expression EXP for values too wide to be compared
675 with one insn, test the comparison and jump to the appropriate label.
676 The code of EXP is ignored; we always test GT if SWAP is 0,
677 and LT if SWAP is 1. */
680 do_jump_by_parts_greater (tree treeop0
, tree treeop1
, int swap
,
681 rtx if_false_label
, rtx if_true_label
, int prob
)
683 rtx op0
= expand_normal (swap
? treeop1
: treeop0
);
684 rtx op1
= expand_normal (swap
? treeop0
: treeop1
);
685 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (treeop0
));
686 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (treeop0
));
688 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
,
689 if_true_label
, prob
);
692 /* Jump according to whether OP0 is 0. We assume that OP0 has an integer
693 mode, MODE, that is too wide for the available compare insns. Either
694 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
695 to indicate drop through. */
698 do_jump_by_parts_zero_rtx (enum machine_mode mode
, rtx op0
,
699 rtx if_false_label
, rtx if_true_label
, int prob
)
701 int nwords
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
704 rtx drop_through_label
= 0;
706 /* The fastest way of doing this comparison on almost any machine is to
707 "or" all the words and compare the result. If all have to be loaded
708 from memory and this is a very wide item, it's possible this may
709 be slower, but that's highly unlikely. */
711 part
= gen_reg_rtx (word_mode
);
712 emit_move_insn (part
, operand_subword_force (op0
, 0, mode
));
713 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
714 part
= expand_binop (word_mode
, ior_optab
, part
,
715 operand_subword_force (op0
, i
, mode
),
716 part
, 1, OPTAB_WIDEN
);
720 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
721 NULL_RTX
, if_false_label
, if_true_label
, prob
);
725 /* If we couldn't do the "or" simply, do this with a series of compares. */
726 if (! if_false_label
)
727 drop_through_label
= if_false_label
= gen_label_rtx ();
729 for (i
= 0; i
< nwords
; i
++)
730 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
731 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
,
732 if_false_label
, NULL_RTX
, prob
);
735 emit_jump (if_true_label
);
737 if (drop_through_label
)
738 emit_label (drop_through_label
);
741 /* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
742 where MODE is an integer mode too wide to be compared with one insn.
743 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
744 to indicate drop through. */
747 do_jump_by_parts_equality_rtx (enum machine_mode mode
, rtx op0
, rtx op1
,
748 rtx if_false_label
, rtx if_true_label
, int prob
)
750 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
751 rtx drop_through_label
= 0;
754 if (op1
== const0_rtx
)
756 do_jump_by_parts_zero_rtx (mode
, op0
, if_false_label
, if_true_label
,
760 else if (op0
== const0_rtx
)
762 do_jump_by_parts_zero_rtx (mode
, op1
, if_false_label
, if_true_label
,
767 if (! if_false_label
)
768 drop_through_label
= if_false_label
= gen_label_rtx ();
770 for (i
= 0; i
< nwords
; i
++)
771 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
772 operand_subword_force (op1
, i
, mode
),
773 EQ
, 0, word_mode
, NULL_RTX
,
774 if_false_label
, NULL_RTX
, prob
);
777 emit_jump (if_true_label
);
778 if (drop_through_label
)
779 emit_label (drop_through_label
);
782 /* Given an EQ_EXPR expression EXP for values too wide to be compared
783 with one insn, test the comparison and jump to the appropriate label. */
786 do_jump_by_parts_equality (tree treeop0
, tree treeop1
, rtx if_false_label
,
787 rtx if_true_label
, int prob
)
789 rtx op0
= expand_normal (treeop0
);
790 rtx op1
= expand_normal (treeop1
);
791 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (treeop0
));
792 do_jump_by_parts_equality_rtx (mode
, op0
, op1
, if_false_label
,
793 if_true_label
, prob
);
796 /* Split a comparison into two others, the second of which has the other
797 "orderedness". The first is always ORDERED or UNORDERED if MODE
798 does not honor NaNs (which means that it can be skipped in that case;
799 see do_compare_rtx_and_jump).
801 The two conditions are written in *CODE1 and *CODE2. Return true if
802 the conditions must be ANDed, false if they must be ORed. */
805 split_comparison (enum rtx_code code
, enum machine_mode mode
,
806 enum rtx_code
*code1
, enum rtx_code
*code2
)
855 /* Do not turn a trapping comparison into a non-trapping one. */
856 if (HONOR_SNANS (mode
))
874 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
875 The decision as to signed or unsigned comparison must be made by the caller.
877 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
881 do_compare_rtx_and_jump (rtx op0
, rtx op1
, enum rtx_code code
, int unsignedp
,
882 enum machine_mode mode
, rtx size
, rtx if_false_label
,
883 rtx if_true_label
, int prob
)
886 rtx dummy_label
= NULL_RTX
;
888 /* Reverse the comparison if that is safe and we want to jump if it is
889 false. Also convert to the reverse comparison if the target can
892 || ! can_compare_p (code
, mode
, ccp_jump
))
893 && (! FLOAT_MODE_P (mode
)
894 || code
== ORDERED
|| code
== UNORDERED
895 || (! HONOR_NANS (mode
) && (code
== LTGT
|| code
== UNEQ
))
896 || (! HONOR_SNANS (mode
) && (code
== EQ
|| code
== NE
))))
899 if (FLOAT_MODE_P (mode
))
900 rcode
= reverse_condition_maybe_unordered (code
);
902 rcode
= reverse_condition (code
);
904 /* Canonicalize to UNORDERED for the libcall. */
905 if (can_compare_p (rcode
, mode
, ccp_jump
)
906 || (code
== ORDERED
&& ! can_compare_p (ORDERED
, mode
, ccp_jump
)))
909 if_true_label
= if_false_label
;
910 if_false_label
= tem
;
916 /* If one operand is constant, make it the second one. Only do this
917 if the other operand is not constant as well. */
919 if (swap_commutative_operands_p (op0
, op1
))
924 code
= swap_condition (code
);
927 do_pending_stack_adjust ();
929 code
= unsignedp
? unsigned_condition (code
) : code
;
930 if (0 != (tem
= simplify_relational_operation (code
, mode
, VOIDmode
,
933 if (CONSTANT_P (tem
))
935 rtx label
= (tem
== const0_rtx
|| tem
== CONST0_RTX (mode
))
936 ? if_false_label
: if_true_label
;
942 code
= GET_CODE (tem
);
943 mode
= GET_MODE (tem
);
946 unsignedp
= (code
== GTU
|| code
== LTU
|| code
== GEU
|| code
== LEU
);
950 dummy_label
= if_true_label
= gen_label_rtx ();
952 if (GET_MODE_CLASS (mode
) == MODE_INT
953 && ! can_compare_p (code
, mode
, ccp_jump
))
958 do_jump_by_parts_greater_rtx (mode
, 1, op1
, op0
,
959 if_false_label
, if_true_label
, prob
);
963 do_jump_by_parts_greater_rtx (mode
, 1, op0
, op1
,
964 if_true_label
, if_false_label
,
969 do_jump_by_parts_greater_rtx (mode
, 1, op0
, op1
,
970 if_false_label
, if_true_label
, prob
);
974 do_jump_by_parts_greater_rtx (mode
, 1, op1
, op0
,
975 if_true_label
, if_false_label
,
980 do_jump_by_parts_greater_rtx (mode
, 0, op1
, op0
,
981 if_false_label
, if_true_label
, prob
);
985 do_jump_by_parts_greater_rtx (mode
, 0, op0
, op1
,
986 if_true_label
, if_false_label
,
991 do_jump_by_parts_greater_rtx (mode
, 0, op0
, op1
,
992 if_false_label
, if_true_label
, prob
);
996 do_jump_by_parts_greater_rtx (mode
, 0, op1
, op0
,
997 if_true_label
, if_false_label
,
1002 do_jump_by_parts_equality_rtx (mode
, op0
, op1
, if_false_label
,
1003 if_true_label
, prob
);
1007 do_jump_by_parts_equality_rtx (mode
, op0
, op1
, if_true_label
,
1008 if_false_label
, inv (prob
));
1017 if (SCALAR_FLOAT_MODE_P (mode
)
1018 && ! can_compare_p (code
, mode
, ccp_jump
)
1019 && can_compare_p (swap_condition (code
), mode
, ccp_jump
))
1022 code
= swap_condition (code
);
1027 else if (SCALAR_FLOAT_MODE_P (mode
)
1028 && ! can_compare_p (code
, mode
, ccp_jump
)
1029 /* Never split ORDERED and UNORDERED.
1030 These must be implemented. */
1031 && (code
!= ORDERED
&& code
!= UNORDERED
)
1032 /* Split a floating-point comparison if
1033 we can jump on other conditions... */
1034 && (have_insn_for (COMPARE
, mode
)
1035 /* ... or if there is no libcall for it. */
1036 || code_to_optab (code
) == unknown_optab
))
1038 enum rtx_code first_code
;
1039 bool and_them
= split_comparison (code
, mode
, &first_code
, &code
);
1041 /* If there are no NaNs, the first comparison should always fall
1043 if (!HONOR_NANS (mode
))
1044 gcc_assert (first_code
== (and_them
? ORDERED
: UNORDERED
));
1051 /* If we only jump if true, just bypass the second jump. */
1052 if (! if_false_label
)
1055 dummy_label
= gen_label_rtx ();
1056 dest_label
= dummy_label
;
1059 dest_label
= if_false_label
;
1060 do_compare_rtx_and_jump (op0
, op1
, first_code
, unsignedp
, mode
,
1061 size
, dest_label
, NULL_RTX
, prob
);
1064 do_compare_rtx_and_jump (op0
, op1
, first_code
, unsignedp
, mode
,
1065 size
, NULL_RTX
, if_true_label
, prob
);
1069 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
,
1070 if_true_label
, prob
);
1074 emit_jump (if_false_label
);
1076 emit_label (dummy_label
);
1079 /* Generate code for a comparison expression EXP (including code to compute
1080 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
1081 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
1082 generated code will drop through.
1083 SIGNED_CODE should be the rtx operation for this comparison for
1084 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
1086 We force a stack adjustment unless there are currently
1087 things pushed on the stack that aren't yet used. */
1090 do_compare_and_jump (tree treeop0
, tree treeop1
, enum rtx_code signed_code
,
1091 enum rtx_code unsigned_code
, rtx if_false_label
,
1092 rtx if_true_label
, int prob
)
1096 enum machine_mode mode
;
1100 /* Don't crash if the comparison was erroneous. */
1101 op0
= expand_normal (treeop0
);
1102 if (TREE_CODE (treeop0
) == ERROR_MARK
)
1105 op1
= expand_normal (treeop1
);
1106 if (TREE_CODE (treeop1
) == ERROR_MARK
)
1109 type
= TREE_TYPE (treeop0
);
1110 mode
= TYPE_MODE (type
);
1111 if (TREE_CODE (treeop0
) == INTEGER_CST
1112 && (TREE_CODE (treeop1
) != INTEGER_CST
1113 || (GET_MODE_BITSIZE (mode
)
1114 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (treeop1
))))))
1116 /* op0 might have been replaced by promoted constant, in which
1117 case the type of second argument should be used. */
1118 type
= TREE_TYPE (treeop1
);
1119 mode
= TYPE_MODE (type
);
1121 unsignedp
= TYPE_UNSIGNED (type
);
1122 code
= unsignedp
? unsigned_code
: signed_code
;
1124 #ifdef HAVE_canonicalize_funcptr_for_compare
1125 /* If function pointers need to be "canonicalized" before they can
1126 be reliably compared, then canonicalize them.
1127 Only do this if *both* sides of the comparison are function pointers.
1128 If one side isn't, we want a noncanonicalized comparison. See PR
1129 middle-end/17564. */
1130 if (HAVE_canonicalize_funcptr_for_compare
1131 && TREE_CODE (TREE_TYPE (treeop0
)) == POINTER_TYPE
1132 && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop0
)))
1134 && TREE_CODE (TREE_TYPE (treeop1
)) == POINTER_TYPE
1135 && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop1
)))
1138 rtx new_op0
= gen_reg_rtx (mode
);
1139 rtx new_op1
= gen_reg_rtx (mode
);
1141 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
1144 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
1149 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
1151 ? expr_size (treeop0
) : NULL_RTX
),
1152 if_false_label
, if_true_label
, prob
);
1155 #include "gt-dojump.h"