1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "fold-const.h"
28 #include "stor-layout.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
41 #include "insn-codes.h"
43 #include "langhooks.h"
47 static bool prefer_and_bit_test (machine_mode
, int);
48 static void do_jump_by_parts_greater (tree
, tree
, int,
49 rtx_code_label
*, rtx_code_label
*, int);
50 static void do_jump_by_parts_equality (tree
, tree
, rtx_code_label
*,
51 rtx_code_label
*, int);
52 static void do_compare_and_jump (tree
, tree
, enum rtx_code
, enum rtx_code
,
53 rtx_code_label
*, rtx_code_label
*, int);
55 /* Invert probability if there is any. -1 stands for unknown. */
60 return prob
== -1 ? -1 : REG_BR_PROB_BASE
- prob
;
63 /* At the start of a function, record that we have no previously-pushed
64 arguments waiting to be popped. */
67 init_pending_stack_adjust (void)
69 pending_stack_adjust
= 0;
72 /* Discard any pending stack adjustment. This avoid relying on the
73 RTL optimizers to remove useless adjustments when we know the
74 stack pointer value is dead. */
76 discard_pending_stack_adjust (void)
78 stack_pointer_delta
-= pending_stack_adjust
;
79 pending_stack_adjust
= 0;
82 /* When exiting from function, if safe, clear out any pending stack adjust
83 so the adjustment won't get done.
85 Note, if the current function calls alloca, then it must have a
86 frame pointer regardless of the value of flag_omit_frame_pointer. */
89 clear_pending_stack_adjust (void)
92 && (! flag_omit_frame_pointer
|| cfun
->calls_alloca
)
94 discard_pending_stack_adjust ();
97 /* Pop any previously-pushed arguments that have not been popped yet. */
100 do_pending_stack_adjust (void)
102 if (inhibit_defer_pop
== 0)
104 if (pending_stack_adjust
!= 0)
105 adjust_stack (GEN_INT (pending_stack_adjust
));
106 pending_stack_adjust
= 0;
110 /* Remember pending_stack_adjust/stack_pointer_delta.
111 To be used around code that may call do_pending_stack_adjust (),
112 but the generated code could be discarded e.g. using delete_insns_since. */
115 save_pending_stack_adjust (saved_pending_stack_adjust
*save
)
117 save
->x_pending_stack_adjust
= pending_stack_adjust
;
118 save
->x_stack_pointer_delta
= stack_pointer_delta
;
121 /* Restore the saved pending_stack_adjust/stack_pointer_delta. */
124 restore_pending_stack_adjust (saved_pending_stack_adjust
*save
)
126 if (inhibit_defer_pop
== 0)
128 pending_stack_adjust
= save
->x_pending_stack_adjust
;
129 stack_pointer_delta
= save
->x_stack_pointer_delta
;
133 /* Expand conditional expressions. */
135 /* Generate code to evaluate EXP and jump to LABEL if the value is zero. */
138 jumpifnot (tree exp
, rtx_code_label
*label
, int prob
)
140 do_jump (exp
, label
, NULL
, inv (prob
));
144 jumpifnot_1 (enum tree_code code
, tree op0
, tree op1
, rtx_code_label
*label
,
147 do_jump_1 (code
, op0
, op1
, label
, NULL
, inv (prob
));
150 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
153 jumpif (tree exp
, rtx_code_label
*label
, int prob
)
155 do_jump (exp
, NULL
, label
, prob
);
159 jumpif_1 (enum tree_code code
, tree op0
, tree op1
,
160 rtx_code_label
*label
, int prob
)
162 do_jump_1 (code
, op0
, op1
, NULL
, label
, prob
);
165 /* Used internally by prefer_and_bit_test. */
167 static GTY(()) rtx and_reg
;
168 static GTY(()) rtx and_test
;
169 static GTY(()) rtx shift_test
;
171 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
172 where X is an arbitrary register of mode MODE. Return true if the former
176 prefer_and_bit_test (machine_mode mode
, int bitnum
)
179 wide_int mask
= wi::set_bit_in_zero (bitnum
, GET_MODE_PRECISION (mode
));
183 /* Set up rtxes for the two variations. Use NULL as a placeholder
184 for the BITNUM-based constants. */
185 and_reg
= gen_rtx_REG (mode
, LAST_VIRTUAL_REGISTER
+ 1);
186 and_test
= gen_rtx_AND (mode
, and_reg
, NULL
);
187 shift_test
= gen_rtx_AND (mode
, gen_rtx_ASHIFTRT (mode
, and_reg
, NULL
),
192 /* Change the mode of the previously-created rtxes. */
193 PUT_MODE (and_reg
, mode
);
194 PUT_MODE (and_test
, mode
);
195 PUT_MODE (shift_test
, mode
);
196 PUT_MODE (XEXP (shift_test
, 0), mode
);
199 /* Fill in the integers. */
200 XEXP (and_test
, 1) = immed_wide_int_const (mask
, mode
);
201 XEXP (XEXP (shift_test
, 0), 1) = GEN_INT (bitnum
);
203 speed_p
= optimize_insn_for_speed_p ();
204 return (rtx_cost (and_test
, mode
, IF_THEN_ELSE
, 0, speed_p
)
205 <= rtx_cost (shift_test
, mode
, IF_THEN_ELSE
, 0, speed_p
));
208 /* Subroutine of do_jump, dealing with exploded comparisons of the type
209 OP0 CODE OP1 . IF_FALSE_LABEL and IF_TRUE_LABEL like in do_jump.
210 PROB is probability of jump to if_true_label, or -1 if unknown. */
213 do_jump_1 (enum tree_code code
, tree op0
, tree op1
,
214 rtx_code_label
*if_false_label
, rtx_code_label
*if_true_label
,
218 rtx_code_label
*drop_through_label
= 0;
224 tree inner_type
= TREE_TYPE (op0
);
226 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
227 != MODE_COMPLEX_FLOAT
);
228 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
229 != MODE_COMPLEX_INT
);
231 if (integer_zerop (op1
))
232 do_jump (op0
, if_true_label
, if_false_label
, inv (prob
));
233 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
234 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
235 do_jump_by_parts_equality (op0
, op1
, if_false_label
, if_true_label
,
238 do_compare_and_jump (op0
, op1
, EQ
, EQ
, if_false_label
, if_true_label
,
245 tree inner_type
= TREE_TYPE (op0
);
247 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
248 != MODE_COMPLEX_FLOAT
);
249 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
250 != MODE_COMPLEX_INT
);
252 if (integer_zerop (op1
))
253 do_jump (op0
, if_false_label
, if_true_label
, prob
);
254 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
255 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
256 do_jump_by_parts_equality (op0
, op1
, if_true_label
, if_false_label
,
259 do_compare_and_jump (op0
, op1
, NE
, NE
, if_false_label
, if_true_label
,
265 mode
= TYPE_MODE (TREE_TYPE (op0
));
266 if (GET_MODE_CLASS (mode
) == MODE_INT
267 && ! can_compare_p (LT
, mode
, ccp_jump
))
268 do_jump_by_parts_greater (op0
, op1
, 1, if_false_label
, if_true_label
,
271 do_compare_and_jump (op0
, op1
, LT
, LTU
, if_false_label
, if_true_label
,
276 mode
= TYPE_MODE (TREE_TYPE (op0
));
277 if (GET_MODE_CLASS (mode
) == MODE_INT
278 && ! can_compare_p (LE
, mode
, ccp_jump
))
279 do_jump_by_parts_greater (op0
, op1
, 0, if_true_label
, if_false_label
,
282 do_compare_and_jump (op0
, op1
, LE
, LEU
, if_false_label
, if_true_label
,
287 mode
= TYPE_MODE (TREE_TYPE (op0
));
288 if (GET_MODE_CLASS (mode
) == MODE_INT
289 && ! can_compare_p (GT
, mode
, ccp_jump
))
290 do_jump_by_parts_greater (op0
, op1
, 0, if_false_label
, if_true_label
,
293 do_compare_and_jump (op0
, op1
, GT
, GTU
, if_false_label
, if_true_label
,
298 mode
= TYPE_MODE (TREE_TYPE (op0
));
299 if (GET_MODE_CLASS (mode
) == MODE_INT
300 && ! can_compare_p (GE
, mode
, ccp_jump
))
301 do_jump_by_parts_greater (op0
, op1
, 1, if_true_label
, if_false_label
,
304 do_compare_and_jump (op0
, op1
, GE
, GEU
, if_false_label
, if_true_label
,
309 do_compare_and_jump (op0
, op1
, ORDERED
, ORDERED
,
310 if_false_label
, if_true_label
, prob
);
314 do_compare_and_jump (op0
, op1
, UNORDERED
, UNORDERED
,
315 if_false_label
, if_true_label
, prob
);
319 do_compare_and_jump (op0
, op1
, UNLT
, UNLT
, if_false_label
, if_true_label
,
324 do_compare_and_jump (op0
, op1
, UNLE
, UNLE
, if_false_label
, if_true_label
,
329 do_compare_and_jump (op0
, op1
, UNGT
, UNGT
, if_false_label
, if_true_label
,
334 do_compare_and_jump (op0
, op1
, UNGE
, UNGE
, if_false_label
, if_true_label
,
339 do_compare_and_jump (op0
, op1
, UNEQ
, UNEQ
, if_false_label
, if_true_label
,
344 do_compare_and_jump (op0
, op1
, LTGT
, LTGT
, if_false_label
, if_true_label
,
348 case TRUTH_ANDIF_EXPR
:
350 /* Spread the probability that the expression is false evenly between
351 the two conditions. So the first condition is false half the total
352 probability of being false. The second condition is false the other
353 half of the total probability of being false, so its jump has a false
354 probability of half the total, relative to the probability we
355 reached it (i.e. the first condition was true). */
360 int false_prob
= inv (prob
);
361 int op0_false_prob
= false_prob
/ 2;
362 int op1_false_prob
= GCOV_COMPUTE_SCALE ((false_prob
/ 2),
363 inv (op0_false_prob
));
364 /* Get the probability that each jump below is true. */
365 op0_prob
= inv (op0_false_prob
);
366 op1_prob
= inv (op1_false_prob
);
368 if (if_false_label
== NULL
)
370 drop_through_label
= gen_label_rtx ();
371 do_jump (op0
, drop_through_label
, NULL
, op0_prob
);
372 do_jump (op1
, NULL
, if_true_label
, op1_prob
);
376 do_jump (op0
, if_false_label
, NULL
, op0_prob
);
377 do_jump (op1
, if_false_label
, if_true_label
, op1_prob
);
382 case TRUTH_ORIF_EXPR
:
384 /* Spread the probability evenly between the two conditions. So
385 the first condition has half the total probability of being true.
386 The second condition has the other half of the total probability,
387 so its jump has a probability of half the total, relative to
388 the probability we reached it (i.e. the first condition was false). */
394 op1_prob
= GCOV_COMPUTE_SCALE ((prob
/ 2), inv (op0_prob
));
396 if (if_true_label
== NULL
)
398 drop_through_label
= gen_label_rtx ();
399 do_jump (op0
, NULL
, drop_through_label
, op0_prob
);
400 do_jump (op1
, if_false_label
, NULL
, op1_prob
);
404 do_jump (op0
, NULL
, if_true_label
, op0_prob
);
405 do_jump (op1
, if_false_label
, if_true_label
, op1_prob
);
414 if (drop_through_label
)
416 do_pending_stack_adjust ();
417 emit_label (drop_through_label
);
421 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
422 the result is zero, or IF_TRUE_LABEL if the result is one.
423 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
424 meaning fall through in that case.
426 do_jump always does any pending stack adjust except when it does not
427 actually perform a jump. An example where there is no jump
428 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
430 PROB is probability of jump to if_true_label, or -1 if unknown. */
433 do_jump (tree exp
, rtx_code_label
*if_false_label
,
434 rtx_code_label
*if_true_label
, int prob
)
436 enum tree_code code
= TREE_CODE (exp
);
441 rtx_code_label
*drop_through_label
= NULL
;
450 rtx_code_label
*lab
= integer_zerop (exp
) ? if_false_label
458 /* This is not true with #pragma weak */
460 /* The address of something can never be zero. */
462 emit_jump (if_true_label
);
467 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
468 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
469 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
470 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_RANGE_REF
)
473 /* If we are narrowing the operand, we have to do the compare in the
475 if ((TYPE_PRECISION (TREE_TYPE (exp
))
476 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
478 case NON_LVALUE_EXPR
:
483 /* These cannot change zero->nonzero or vice versa. */
484 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
, prob
);
488 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
,
494 rtx_code_label
*label1
= gen_label_rtx ();
495 if (!if_true_label
|| !if_false_label
)
497 drop_through_label
= gen_label_rtx ();
499 if_true_label
= drop_through_label
;
501 if_false_label
= drop_through_label
;
504 do_pending_stack_adjust ();
505 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL
, -1);
506 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
, prob
);
508 do_jump (TREE_OPERAND (exp
, 2), if_false_label
, if_true_label
, prob
);
513 /* Lowered by gimplify.c. */
517 /* Nonzero iff operands of minus differ. */
535 case TRUTH_ANDIF_EXPR
:
536 case TRUTH_ORIF_EXPR
:
538 do_jump_1 (code
, TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
539 if_false_label
, if_true_label
, prob
);
543 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
544 See if the former is preferred for jump tests and restore it
546 if (integer_onep (TREE_OPERAND (exp
, 1)))
548 tree exp0
= TREE_OPERAND (exp
, 0);
549 rtx_code_label
*set_label
, *clr_label
;
550 int setclr_prob
= prob
;
552 /* Strip narrowing integral type conversions. */
553 while (CONVERT_EXPR_P (exp0
)
554 && TREE_OPERAND (exp0
, 0) != error_mark_node
555 && TYPE_PRECISION (TREE_TYPE (exp0
))
556 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0
, 0))))
557 exp0
= TREE_OPERAND (exp0
, 0);
559 /* "exp0 ^ 1" inverts the sense of the single bit test. */
560 if (TREE_CODE (exp0
) == BIT_XOR_EXPR
561 && integer_onep (TREE_OPERAND (exp0
, 1)))
563 exp0
= TREE_OPERAND (exp0
, 0);
564 clr_label
= if_true_label
;
565 set_label
= if_false_label
;
566 setclr_prob
= inv (prob
);
570 clr_label
= if_false_label
;
571 set_label
= if_true_label
;
574 if (TREE_CODE (exp0
) == RSHIFT_EXPR
)
576 tree arg
= TREE_OPERAND (exp0
, 0);
577 tree shift
= TREE_OPERAND (exp0
, 1);
578 tree argtype
= TREE_TYPE (arg
);
579 if (TREE_CODE (shift
) == INTEGER_CST
580 && compare_tree_int (shift
, 0) >= 0
581 && compare_tree_int (shift
, HOST_BITS_PER_WIDE_INT
) < 0
582 && prefer_and_bit_test (TYPE_MODE (argtype
),
583 TREE_INT_CST_LOW (shift
)))
585 unsigned HOST_WIDE_INT mask
586 = (unsigned HOST_WIDE_INT
) 1 << TREE_INT_CST_LOW (shift
);
587 do_jump (build2 (BIT_AND_EXPR
, argtype
, arg
,
588 build_int_cstu (argtype
, mask
)),
589 clr_label
, set_label
, setclr_prob
);
595 /* If we are AND'ing with a small constant, do this comparison in the
596 smallest type that fits. If the machine doesn't have comparisons
597 that small, it will be converted back to the wider comparison.
598 This helps if we are testing the sign bit of a narrower object.
599 combine can't do this for us because it can't know whether a
600 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
602 if (! SLOW_BYTE_ACCESS
603 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
604 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
605 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
606 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
607 && (type
= lang_hooks
.types
.type_for_mode (mode
, 1)) != 0
608 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
609 && have_insn_for (COMPARE
, TYPE_MODE (type
)))
611 do_jump (fold_convert (type
, exp
), if_false_label
, if_true_label
,
616 if (TYPE_PRECISION (TREE_TYPE (exp
)) > 1
617 || TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
620 /* Boolean comparisons can be compiled as TRUTH_AND_EXPR. */
623 /* High branch cost, expand as the bitwise AND of the conditions.
624 Do the same if the RHS has side effects, because we're effectively
625 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */
626 if (BRANCH_COST (optimize_insn_for_speed_p (),
628 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
630 code
= TRUTH_ANDIF_EXPR
;
635 /* High branch cost, expand as the bitwise OR of the conditions.
636 Do the same if the RHS has side effects, because we're effectively
637 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */
638 if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4
639 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
641 code
= TRUTH_ORIF_EXPR
;
644 /* Fall through and generate the normal code. */
647 temp
= expand_normal (exp
);
648 do_pending_stack_adjust ();
649 /* The RTL optimizers prefer comparisons against pseudos. */
650 if (GET_CODE (temp
) == SUBREG
)
652 /* Compare promoted variables in their promoted mode. */
653 if (SUBREG_PROMOTED_VAR_P (temp
)
654 && REG_P (XEXP (temp
, 0)))
655 temp
= XEXP (temp
, 0);
657 temp
= copy_to_reg (temp
);
659 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
660 NE
, TYPE_UNSIGNED (TREE_TYPE (exp
)),
661 GET_MODE (temp
), NULL_RTX
,
662 if_false_label
, if_true_label
, prob
);
665 if (drop_through_label
)
667 do_pending_stack_adjust ();
668 emit_label (drop_through_label
);
672 /* Compare OP0 with OP1, word at a time, in mode MODE.
673 UNSIGNEDP says to do unsigned comparison.
674 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
677 do_jump_by_parts_greater_rtx (machine_mode mode
, int unsignedp
, rtx op0
,
678 rtx op1
, rtx_code_label
*if_false_label
,
679 rtx_code_label
*if_true_label
,
682 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
683 rtx_code_label
*drop_through_label
= 0;
684 bool drop_through_if_true
= false, drop_through_if_false
= false;
685 enum rtx_code code
= GT
;
688 if (! if_true_label
|| ! if_false_label
)
689 drop_through_label
= gen_label_rtx ();
692 if_true_label
= drop_through_label
;
693 drop_through_if_true
= true;
695 if (! if_false_label
)
697 if_false_label
= drop_through_label
;
698 drop_through_if_false
= true;
701 /* Deal with the special case 0 > x: only one comparison is necessary and
702 we reverse it to avoid jumping to the drop-through label. */
703 if (op0
== const0_rtx
&& drop_through_if_true
&& !drop_through_if_false
)
706 if_true_label
= if_false_label
;
707 if_false_label
= drop_through_label
;
708 drop_through_if_true
= false;
709 drop_through_if_false
= true;
712 /* Compare a word at a time, high order first. */
713 for (i
= 0; i
< nwords
; i
++)
715 rtx op0_word
, op1_word
;
717 if (WORDS_BIG_ENDIAN
)
719 op0_word
= operand_subword_force (op0
, i
, mode
);
720 op1_word
= operand_subword_force (op1
, i
, mode
);
724 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
725 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
728 /* All but high-order word must be compared as unsigned. */
729 do_compare_rtx_and_jump (op0_word
, op1_word
, code
, (unsignedp
|| i
> 0),
730 word_mode
, NULL_RTX
, NULL
, if_true_label
,
733 /* Emit only one comparison for 0. Do not emit the last cond jump. */
734 if (op0
== const0_rtx
|| i
== nwords
- 1)
737 /* Consider lower words only if these are equal. */
738 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
739 NULL_RTX
, NULL
, if_false_label
, inv (prob
));
742 if (!drop_through_if_false
)
743 emit_jump (if_false_label
);
744 if (drop_through_label
)
745 emit_label (drop_through_label
);
748 /* Given a comparison expression EXP for values too wide to be compared
749 with one insn, test the comparison and jump to the appropriate label.
750 The code of EXP is ignored; we always test GT if SWAP is 0,
751 and LT if SWAP is 1. */
754 do_jump_by_parts_greater (tree treeop0
, tree treeop1
, int swap
,
755 rtx_code_label
*if_false_label
,
756 rtx_code_label
*if_true_label
, int prob
)
758 rtx op0
= expand_normal (swap
? treeop1
: treeop0
);
759 rtx op1
= expand_normal (swap
? treeop0
: treeop1
);
760 machine_mode mode
= TYPE_MODE (TREE_TYPE (treeop0
));
761 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (treeop0
));
763 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
,
764 if_true_label
, prob
);
767 /* Jump according to whether OP0 is 0. We assume that OP0 has an integer
768 mode, MODE, that is too wide for the available compare insns. Either
769 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL
770 to indicate drop through. */
773 do_jump_by_parts_zero_rtx (machine_mode mode
, rtx op0
,
774 rtx_code_label
*if_false_label
,
775 rtx_code_label
*if_true_label
, int prob
)
777 int nwords
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
780 rtx_code_label
*drop_through_label
= NULL
;
782 /* The fastest way of doing this comparison on almost any machine is to
783 "or" all the words and compare the result. If all have to be loaded
784 from memory and this is a very wide item, it's possible this may
785 be slower, but that's highly unlikely. */
787 part
= gen_reg_rtx (word_mode
);
788 emit_move_insn (part
, operand_subword_force (op0
, 0, mode
));
789 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
790 part
= expand_binop (word_mode
, ior_optab
, part
,
791 operand_subword_force (op0
, i
, mode
),
792 part
, 1, OPTAB_WIDEN
);
796 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
797 NULL_RTX
, if_false_label
, if_true_label
, prob
);
801 /* If we couldn't do the "or" simply, do this with a series of compares. */
802 if (! if_false_label
)
803 if_false_label
= drop_through_label
= gen_label_rtx ();
805 for (i
= 0; i
< nwords
; i
++)
806 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
807 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
,
808 if_false_label
, NULL
, prob
);
811 emit_jump (if_true_label
);
813 if (drop_through_label
)
814 emit_label (drop_through_label
);
817 /* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
818 where MODE is an integer mode too wide to be compared with one insn.
819 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
820 to indicate drop through. */
823 do_jump_by_parts_equality_rtx (machine_mode mode
, rtx op0
, rtx op1
,
824 rtx_code_label
*if_false_label
,
825 rtx_code_label
*if_true_label
, int prob
)
827 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
828 rtx_code_label
*drop_through_label
= NULL
;
831 if (op1
== const0_rtx
)
833 do_jump_by_parts_zero_rtx (mode
, op0
, if_false_label
, if_true_label
,
837 else if (op0
== const0_rtx
)
839 do_jump_by_parts_zero_rtx (mode
, op1
, if_false_label
, if_true_label
,
844 if (! if_false_label
)
845 drop_through_label
= if_false_label
= gen_label_rtx ();
847 for (i
= 0; i
< nwords
; i
++)
848 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
849 operand_subword_force (op1
, i
, mode
),
850 EQ
, 0, word_mode
, NULL_RTX
,
851 if_false_label
, NULL
, prob
);
854 emit_jump (if_true_label
);
855 if (drop_through_label
)
856 emit_label (drop_through_label
);
859 /* Given an EQ_EXPR expression EXP for values too wide to be compared
860 with one insn, test the comparison and jump to the appropriate label. */
863 do_jump_by_parts_equality (tree treeop0
, tree treeop1
,
864 rtx_code_label
*if_false_label
,
865 rtx_code_label
*if_true_label
, int prob
)
867 rtx op0
= expand_normal (treeop0
);
868 rtx op1
= expand_normal (treeop1
);
869 machine_mode mode
= TYPE_MODE (TREE_TYPE (treeop0
));
870 do_jump_by_parts_equality_rtx (mode
, op0
, op1
, if_false_label
,
871 if_true_label
, prob
);
874 /* Split a comparison into two others, the second of which has the other
875 "orderedness". The first is always ORDERED or UNORDERED if MODE
876 does not honor NaNs (which means that it can be skipped in that case;
877 see do_compare_rtx_and_jump).
879 The two conditions are written in *CODE1 and *CODE2. Return true if
880 the conditions must be ANDed, false if they must be ORed. */
883 split_comparison (enum rtx_code code
, machine_mode mode
,
884 enum rtx_code
*code1
, enum rtx_code
*code2
)
933 /* Do not turn a trapping comparison into a non-trapping one. */
934 if (HONOR_SNANS (mode
))
952 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
953 The decision as to signed or unsigned comparison must be made by the caller.
955 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
959 do_compare_rtx_and_jump (rtx op0
, rtx op1
, enum rtx_code code
, int unsignedp
,
960 machine_mode mode
, rtx size
,
961 rtx_code_label
*if_false_label
,
962 rtx_code_label
*if_true_label
, int prob
)
965 rtx_code_label
*dummy_label
= NULL
;
967 /* Reverse the comparison if that is safe and we want to jump if it is
968 false. Also convert to the reverse comparison if the target can
971 || ! can_compare_p (code
, mode
, ccp_jump
))
972 && (! FLOAT_MODE_P (mode
)
973 || code
== ORDERED
|| code
== UNORDERED
974 || (! HONOR_NANS (mode
) && (code
== LTGT
|| code
== UNEQ
))
975 || (! HONOR_SNANS (mode
) && (code
== EQ
|| code
== NE
))))
978 if (FLOAT_MODE_P (mode
))
979 rcode
= reverse_condition_maybe_unordered (code
);
981 rcode
= reverse_condition (code
);
983 /* Canonicalize to UNORDERED for the libcall. */
984 if (can_compare_p (rcode
, mode
, ccp_jump
)
985 || (code
== ORDERED
&& ! can_compare_p (ORDERED
, mode
, ccp_jump
)))
987 std::swap (if_true_label
, if_false_label
);
993 /* If one operand is constant, make it the second one. Only do this
994 if the other operand is not constant as well. */
996 if (swap_commutative_operands_p (op0
, op1
))
998 std::swap (op0
, op1
);
999 code
= swap_condition (code
);
1002 do_pending_stack_adjust ();
1004 code
= unsignedp
? unsigned_condition (code
) : code
;
1005 if (0 != (tem
= simplify_relational_operation (code
, mode
, VOIDmode
,
1008 if (CONSTANT_P (tem
))
1010 rtx_code_label
*label
= (tem
== const0_rtx
1011 || tem
== CONST0_RTX (mode
))
1012 ? if_false_label
: if_true_label
;
1018 code
= GET_CODE (tem
);
1019 mode
= GET_MODE (tem
);
1020 op0
= XEXP (tem
, 0);
1021 op1
= XEXP (tem
, 1);
1022 unsignedp
= (code
== GTU
|| code
== LTU
|| code
== GEU
|| code
== LEU
);
1025 if (! if_true_label
)
1026 dummy_label
= if_true_label
= gen_label_rtx ();
1028 if (GET_MODE_CLASS (mode
) == MODE_INT
1029 && ! can_compare_p (code
, mode
, ccp_jump
))
1034 do_jump_by_parts_greater_rtx (mode
, 1, op1
, op0
,
1035 if_false_label
, if_true_label
, prob
);
1039 do_jump_by_parts_greater_rtx (mode
, 1, op0
, op1
,
1040 if_true_label
, if_false_label
,
1045 do_jump_by_parts_greater_rtx (mode
, 1, op0
, op1
,
1046 if_false_label
, if_true_label
, prob
);
1050 do_jump_by_parts_greater_rtx (mode
, 1, op1
, op0
,
1051 if_true_label
, if_false_label
,
1056 do_jump_by_parts_greater_rtx (mode
, 0, op1
, op0
,
1057 if_false_label
, if_true_label
, prob
);
1061 do_jump_by_parts_greater_rtx (mode
, 0, op0
, op1
,
1062 if_true_label
, if_false_label
,
1067 do_jump_by_parts_greater_rtx (mode
, 0, op0
, op1
,
1068 if_false_label
, if_true_label
, prob
);
1072 do_jump_by_parts_greater_rtx (mode
, 0, op1
, op0
,
1073 if_true_label
, if_false_label
,
1078 do_jump_by_parts_equality_rtx (mode
, op0
, op1
, if_false_label
,
1079 if_true_label
, prob
);
1083 do_jump_by_parts_equality_rtx (mode
, op0
, op1
, if_true_label
,
1084 if_false_label
, inv (prob
));
1093 if (SCALAR_FLOAT_MODE_P (mode
)
1094 && ! can_compare_p (code
, mode
, ccp_jump
)
1095 && can_compare_p (swap_condition (code
), mode
, ccp_jump
))
1097 code
= swap_condition (code
);
1098 std::swap (op0
, op1
);
1100 else if (SCALAR_FLOAT_MODE_P (mode
)
1101 && ! can_compare_p (code
, mode
, ccp_jump
)
1102 /* Never split ORDERED and UNORDERED.
1103 These must be implemented. */
1104 && (code
!= ORDERED
&& code
!= UNORDERED
)
1105 /* Split a floating-point comparison if
1106 we can jump on other conditions... */
1107 && (have_insn_for (COMPARE
, mode
)
1108 /* ... or if there is no libcall for it. */
1109 || code_to_optab (code
) == unknown_optab
))
1111 enum rtx_code first_code
;
1112 bool and_them
= split_comparison (code
, mode
, &first_code
, &code
);
1114 /* If there are no NaNs, the first comparison should always fall
1116 if (!HONOR_NANS (mode
))
1117 gcc_assert (first_code
== (and_them
? ORDERED
: UNORDERED
));
1121 int first_prob
= prob
;
1122 if (first_code
== UNORDERED
)
1123 first_prob
= REG_BR_PROB_BASE
/ 100;
1124 else if (first_code
== ORDERED
)
1125 first_prob
= REG_BR_PROB_BASE
- REG_BR_PROB_BASE
/ 100;
1128 rtx_code_label
*dest_label
;
1129 /* If we only jump if true, just bypass the second jump. */
1130 if (! if_false_label
)
1133 dummy_label
= gen_label_rtx ();
1134 dest_label
= dummy_label
;
1137 dest_label
= if_false_label
;
1138 do_compare_rtx_and_jump (op0
, op1
, first_code
, unsignedp
, mode
,
1139 size
, dest_label
, NULL
, first_prob
);
1142 do_compare_rtx_and_jump (op0
, op1
, first_code
, unsignedp
, mode
,
1143 size
, NULL
, if_true_label
, first_prob
);
1147 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
,
1148 if_true_label
, prob
);
1152 emit_jump (if_false_label
);
1154 emit_label (dummy_label
);
1157 /* Generate code for a comparison expression EXP (including code to compute
1158 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
1159 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
1160 generated code will drop through.
1161 SIGNED_CODE should be the rtx operation for this comparison for
1162 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
1164 We force a stack adjustment unless there are currently
1165 things pushed on the stack that aren't yet used. */
1168 do_compare_and_jump (tree treeop0
, tree treeop1
, enum rtx_code signed_code
,
1169 enum rtx_code unsigned_code
,
1170 rtx_code_label
*if_false_label
,
1171 rtx_code_label
*if_true_label
, int prob
)
1179 /* Don't crash if the comparison was erroneous. */
1180 op0
= expand_normal (treeop0
);
1181 if (TREE_CODE (treeop0
) == ERROR_MARK
)
1184 op1
= expand_normal (treeop1
);
1185 if (TREE_CODE (treeop1
) == ERROR_MARK
)
1188 type
= TREE_TYPE (treeop0
);
1189 mode
= TYPE_MODE (type
);
1190 if (TREE_CODE (treeop0
) == INTEGER_CST
1191 && (TREE_CODE (treeop1
) != INTEGER_CST
1192 || (GET_MODE_BITSIZE (mode
)
1193 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (treeop1
))))))
1195 /* op0 might have been replaced by promoted constant, in which
1196 case the type of second argument should be used. */
1197 type
= TREE_TYPE (treeop1
);
1198 mode
= TYPE_MODE (type
);
1200 unsignedp
= TYPE_UNSIGNED (type
);
1201 code
= unsignedp
? unsigned_code
: signed_code
;
1203 /* If function pointers need to be "canonicalized" before they can
1204 be reliably compared, then canonicalize them.
1205 Only do this if *both* sides of the comparison are function pointers.
1206 If one side isn't, we want a noncanonicalized comparison. See PR
1207 middle-end/17564. */
1208 if (targetm
.have_canonicalize_funcptr_for_compare ()
1209 && TREE_CODE (TREE_TYPE (treeop0
)) == POINTER_TYPE
1210 && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop0
)))
1212 && TREE_CODE (TREE_TYPE (treeop1
)) == POINTER_TYPE
1213 && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop1
)))
1216 rtx new_op0
= gen_reg_rtx (mode
);
1217 rtx new_op1
= gen_reg_rtx (mode
);
1219 emit_insn (targetm
.gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
1222 emit_insn (targetm
.gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
1226 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
1228 ? expr_size (treeop0
) : NULL_RTX
),
1229 if_false_label
, if_true_label
, prob
);
1232 #include "gt-dojump.h"