1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 #include "coretypes.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "langhooks.h"
38 static bool prefer_and_bit_test (enum machine_mode
, int);
39 static void do_jump_by_parts_greater (tree
, int, rtx
, rtx
);
40 static void do_jump_by_parts_equality (tree
, rtx
, rtx
);
41 static void do_compare_and_jump (tree
, enum rtx_code
, enum rtx_code
, rtx
,
44 /* At the start of a function, record that we have no previously-pushed
45 arguments waiting to be popped. */
48 init_pending_stack_adjust (void)
50 pending_stack_adjust
= 0;
53 /* Discard any pending stack adjustment. This avoid relying on the
54 RTL optimizers to remove useless adjustments when we know the
55 stack pointer value is dead. */
56 void discard_pending_stack_adjust (void)
58 stack_pointer_delta
-= pending_stack_adjust
;
59 pending_stack_adjust
= 0;
62 /* When exiting from function, if safe, clear out any pending stack adjust
63 so the adjustment won't get done.
65 Note, if the current function calls alloca, then it must have a
66 frame pointer regardless of the value of flag_omit_frame_pointer. */
69 clear_pending_stack_adjust (void)
72 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
74 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
))
75 discard_pending_stack_adjust ();
78 /* Pop any previously-pushed arguments that have not been popped yet. */
81 do_pending_stack_adjust (void)
83 if (inhibit_defer_pop
== 0)
85 if (pending_stack_adjust
!= 0)
86 adjust_stack (GEN_INT (pending_stack_adjust
));
87 pending_stack_adjust
= 0;
91 /* Expand conditional expressions. */
93 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
94 LABEL is an rtx of code CODE_LABEL, in this function and all the
98 jumpifnot (tree exp
, rtx label
)
100 do_jump (exp
, label
, NULL_RTX
);
103 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
106 jumpif (tree exp
, rtx label
)
108 do_jump (exp
, NULL_RTX
, label
);
111 /* Used internally by prefer_and_bit_test. */
113 static GTY(()) rtx and_reg
;
114 static GTY(()) rtx and_test
;
115 static GTY(()) rtx shift_test
;
117 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
118 where X is an arbitrary register of mode MODE. Return true if the former
122 prefer_and_bit_test (enum machine_mode mode
, int bitnum
)
126 /* Set up rtxes for the two variations. Use NULL as a placeholder
127 for the BITNUM-based constants. */
128 and_reg
= gen_rtx_REG (mode
, FIRST_PSEUDO_REGISTER
);
129 and_test
= gen_rtx_AND (mode
, and_reg
, NULL
);
130 shift_test
= gen_rtx_AND (mode
, gen_rtx_ASHIFTRT (mode
, and_reg
, NULL
),
135 /* Change the mode of the previously-created rtxes. */
136 PUT_MODE (and_reg
, mode
);
137 PUT_MODE (and_test
, mode
);
138 PUT_MODE (shift_test
, mode
);
139 PUT_MODE (XEXP (shift_test
, 0), mode
);
142 /* Fill in the integers. */
143 XEXP (and_test
, 1) = GEN_INT ((unsigned HOST_WIDE_INT
) 1 << bitnum
);
144 XEXP (XEXP (shift_test
, 0), 1) = GEN_INT (bitnum
);
146 return (rtx_cost (and_test
, IF_THEN_ELSE
)
147 <= rtx_cost (shift_test
, IF_THEN_ELSE
));
150 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
151 the result is zero, or IF_TRUE_LABEL if the result is one.
152 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
153 meaning fall through in that case.
155 do_jump always does any pending stack adjust except when it does not
156 actually perform a jump. An example where there is no jump
157 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. */
160 do_jump (tree exp
, rtx if_false_label
, rtx if_true_label
)
162 enum tree_code code
= TREE_CODE (exp
);
166 enum machine_mode mode
;
167 rtx drop_through_label
= 0;
175 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
181 /* This is not true with #pragma weak */
183 /* The address of something can never be zero. */
185 emit_jump (if_true_label
);
190 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
191 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
192 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
193 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_RANGE_REF
)
196 /* If we are narrowing the operand, we have to do the compare in the
198 if ((TYPE_PRECISION (TREE_TYPE (exp
))
199 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
201 case NON_LVALUE_EXPR
:
206 /* These cannot change zero->nonzero or vice versa. */
207 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
211 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
212 See if the former is preferred for jump tests and restore it
214 if (integer_onep (TREE_OPERAND (exp
, 1)))
216 tree exp0
= TREE_OPERAND (exp
, 0);
217 rtx set_label
, clr_label
;
219 /* Strip narrowing integral type conversions. */
220 while ((TREE_CODE (exp0
) == NOP_EXPR
221 || TREE_CODE (exp0
) == CONVERT_EXPR
222 || TREE_CODE (exp0
) == NON_LVALUE_EXPR
)
223 && TREE_OPERAND (exp0
, 0) != error_mark_node
224 && TYPE_PRECISION (TREE_TYPE (exp0
))
225 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0
, 0))))
226 exp0
= TREE_OPERAND (exp0
, 0);
228 /* "exp0 ^ 1" inverts the sense of the single bit test. */
229 if (TREE_CODE (exp0
) == BIT_XOR_EXPR
230 && integer_onep (TREE_OPERAND (exp0
, 1)))
232 exp0
= TREE_OPERAND (exp0
, 0);
233 clr_label
= if_true_label
;
234 set_label
= if_false_label
;
238 clr_label
= if_false_label
;
239 set_label
= if_true_label
;
242 if (TREE_CODE (exp0
) == RSHIFT_EXPR
)
244 tree arg
= TREE_OPERAND (exp0
, 0);
245 tree shift
= TREE_OPERAND (exp0
, 1);
246 tree argtype
= TREE_TYPE (arg
);
247 if (TREE_CODE (shift
) == INTEGER_CST
248 && compare_tree_int (shift
, 0) >= 0
249 && compare_tree_int (shift
, HOST_BITS_PER_WIDE_INT
) < 0
250 && prefer_and_bit_test (TYPE_MODE (argtype
),
251 TREE_INT_CST_LOW (shift
)))
253 HOST_WIDE_INT mask
= (HOST_WIDE_INT
) 1
254 << TREE_INT_CST_LOW (shift
);
255 do_jump (build2 (BIT_AND_EXPR
, argtype
, arg
,
256 build_int_cst_type (argtype
, mask
)),
257 clr_label
, set_label
);
263 /* If we are AND'ing with a small constant, do this comparison in the
264 smallest type that fits. If the machine doesn't have comparisons
265 that small, it will be converted back to the wider comparison.
266 This helps if we are testing the sign bit of a narrower object.
267 combine can't do this for us because it can't know whether a
268 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
270 if (! SLOW_BYTE_ACCESS
271 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
272 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
273 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
274 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
275 && (type
= lang_hooks
.types
.type_for_mode (mode
, 1)) != 0
276 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
277 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
278 != CODE_FOR_nothing
))
280 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
286 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
291 rtx label1
= gen_label_rtx ();
292 if (!if_true_label
|| !if_false_label
)
294 drop_through_label
= gen_label_rtx ();
296 if_true_label
= drop_through_label
;
298 if_false_label
= drop_through_label
;
301 do_pending_stack_adjust ();
302 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
303 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
305 do_jump (TREE_OPERAND (exp
, 2), if_false_label
, if_true_label
);
309 case TRUTH_ANDIF_EXPR
:
310 case TRUTH_ORIF_EXPR
:
312 /* Lowered by gimplify.c. */
318 case ARRAY_RANGE_REF
:
320 HOST_WIDE_INT bitsize
, bitpos
;
322 enum machine_mode mode
;
327 /* Get description of this reference. We don't actually care
328 about the underlying object here. */
329 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
330 &unsignedp
, &volatilep
, false);
332 type
= lang_hooks
.types
.type_for_size (bitsize
, unsignedp
);
333 if (! SLOW_BYTE_ACCESS
334 && type
!= 0 && bitsize
>= 0
335 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
336 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
337 != CODE_FOR_nothing
))
339 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
347 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
349 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
350 != MODE_COMPLEX_FLOAT
);
351 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
352 != MODE_COMPLEX_INT
);
354 if (integer_zerop (TREE_OPERAND (exp
, 1)))
355 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
356 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
357 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
358 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
360 do_compare_and_jump (exp
, EQ
, EQ
, if_false_label
, if_true_label
);
365 /* Nonzero iff operands of minus differ. */
366 exp
= build2 (NE_EXPR
, TREE_TYPE (exp
),
367 TREE_OPERAND (exp
, 0),
368 TREE_OPERAND (exp
, 1));
372 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
374 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
375 != MODE_COMPLEX_FLOAT
);
376 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type
))
377 != MODE_COMPLEX_INT
);
379 if (integer_zerop (TREE_OPERAND (exp
, 1)))
380 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
381 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
382 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
383 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
385 do_compare_and_jump (exp
, NE
, NE
, if_false_label
, if_true_label
);
390 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
391 if (GET_MODE_CLASS (mode
) == MODE_INT
392 && ! can_compare_p (LT
, mode
, ccp_jump
))
393 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
395 do_compare_and_jump (exp
, LT
, LTU
, if_false_label
, if_true_label
);
399 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
400 if (GET_MODE_CLASS (mode
) == MODE_INT
401 && ! can_compare_p (LE
, mode
, ccp_jump
))
402 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
404 do_compare_and_jump (exp
, LE
, LEU
, if_false_label
, if_true_label
);
408 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
409 if (GET_MODE_CLASS (mode
) == MODE_INT
410 && ! can_compare_p (GT
, mode
, ccp_jump
))
411 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
413 do_compare_and_jump (exp
, GT
, GTU
, if_false_label
, if_true_label
);
417 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
418 if (GET_MODE_CLASS (mode
) == MODE_INT
419 && ! can_compare_p (GE
, mode
, ccp_jump
))
420 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
422 do_compare_and_jump (exp
, GE
, GEU
, if_false_label
, if_true_label
);
428 enum rtx_code cmp
, rcmp
;
431 if (code
== UNORDERED_EXPR
)
432 cmp
= UNORDERED
, rcmp
= ORDERED
;
434 cmp
= ORDERED
, rcmp
= UNORDERED
;
435 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
438 if (! can_compare_p (cmp
, mode
, ccp_jump
)
439 && (can_compare_p (rcmp
, mode
, ccp_jump
)
440 /* If the target doesn't provide either UNORDERED or ORDERED
441 comparisons, canonicalize on UNORDERED for the library. */
442 || rcmp
== UNORDERED
))
446 do_compare_and_jump (exp
, cmp
, cmp
, if_false_label
, if_true_label
);
448 do_compare_and_jump (exp
, rcmp
, rcmp
, if_true_label
, if_false_label
);
453 enum rtx_code rcode1
;
454 enum tree_code tcode1
, tcode2
;
458 tcode1
= UNORDERED_EXPR
;
463 tcode1
= UNORDERED_EXPR
;
468 tcode1
= UNORDERED_EXPR
;
473 tcode1
= UNORDERED_EXPR
;
478 tcode1
= UNORDERED_EXPR
;
482 /* It is ok for LTGT_EXPR to trap when the result is unordered,
483 so expand to (a < b) || (a > b). */
490 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
491 if (can_compare_p (rcode1
, mode
, ccp_jump
))
492 do_compare_and_jump (exp
, rcode1
, rcode1
, if_false_label
,
496 tree op0
= save_expr (TREE_OPERAND (exp
, 0));
497 tree op1
= save_expr (TREE_OPERAND (exp
, 1));
500 /* If the target doesn't support combined unordered
501 compares, decompose into two comparisons. */
502 if (if_true_label
== 0)
503 drop_through_label
= if_true_label
= gen_label_rtx ();
505 cmp0
= fold_build2 (tcode1
, TREE_TYPE (exp
), op0
, op1
);
506 cmp1
= fold_build2 (tcode2
, TREE_TYPE (exp
), op0
, op1
);
507 do_jump (cmp0
, 0, if_true_label
);
508 do_jump (cmp1
, if_false_label
, if_true_label
);
514 /* High branch cost, expand as the bitwise AND of the conditions.
515 Do the same if the RHS has side effects, because we're effectively
516 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */
517 if (BRANCH_COST
>= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
520 if (if_false_label
== NULL_RTX
)
522 drop_through_label
= gen_label_rtx ();
523 do_jump (TREE_OPERAND (exp
, 0), drop_through_label
, NULL_RTX
);
524 do_jump (TREE_OPERAND (exp
, 1), NULL_RTX
, if_true_label
);
528 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
529 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
534 /* High branch cost, expand as the bitwise OR of the conditions.
535 Do the same if the RHS has side effects, because we're effectively
536 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */
537 if (BRANCH_COST
>= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
540 if (if_true_label
== NULL_RTX
)
542 drop_through_label
= gen_label_rtx ();
543 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, drop_through_label
);
544 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, NULL_RTX
);
548 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
549 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
554 __builtin_expect (<test>, 0) and
555 __builtin_expect (<test>, 1)
557 We need to do this here, so that <test> is not converted to a SCC
558 operation on machines that use condition code registers and COMPARE
559 like the PowerPC, and then the jump is done based on whether the SCC
560 operation produced a 1 or 0. */
562 /* Check for a built-in function. */
564 tree fndecl
= get_callee_fndecl (exp
);
565 tree arglist
= TREE_OPERAND (exp
, 1);
568 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
569 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
570 && arglist
!= NULL_TREE
571 && TREE_CHAIN (arglist
) != NULL_TREE
)
573 rtx seq
= expand_builtin_expect_jump (exp
, if_false_label
,
584 /* Fall through and generate the normal code. */
587 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
588 do_pending_stack_adjust ();
590 if (GET_CODE (temp
) == CONST_INT
591 || (GET_CODE (temp
) == CONST_DOUBLE
&& GET_MODE (temp
) == VOIDmode
)
592 || GET_CODE (temp
) == LABEL_REF
)
594 rtx target
= temp
== const0_rtx
? if_false_label
: if_true_label
;
598 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
599 && ! can_compare_p (NE
, GET_MODE (temp
), ccp_jump
))
600 /* Note swapping the labels gives us not-equal. */
601 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
604 gcc_assert (GET_MODE (temp
) != VOIDmode
);
606 /* The RTL optimizers prefer comparisons against pseudos. */
607 if (GET_CODE (temp
) == SUBREG
)
609 /* Compare promoted variables in their promoted mode. */
610 if (SUBREG_PROMOTED_VAR_P (temp
)
611 && REG_P (XEXP (temp
, 0)))
612 temp
= XEXP (temp
, 0);
614 temp
= copy_to_reg (temp
);
616 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
617 NE
, TYPE_UNSIGNED (TREE_TYPE (exp
)),
618 GET_MODE (temp
), NULL_RTX
,
619 if_false_label
, if_true_label
);
623 if (drop_through_label
)
625 do_pending_stack_adjust ();
626 emit_label (drop_through_label
);
630 /* Given a comparison expression EXP for values too wide to be compared
631 with one insn, test the comparison and jump to the appropriate label.
632 The code of EXP is ignored; we always test GT if SWAP is 0,
633 and LT if SWAP is 1. */
636 do_jump_by_parts_greater (tree exp
, int swap
, rtx if_false_label
,
639 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
640 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
641 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
642 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
644 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
,
648 /* Compare OP0 with OP1, word at a time, in mode MODE.
649 UNSIGNEDP says to do unsigned comparison.
650 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
653 do_jump_by_parts_greater_rtx (enum machine_mode mode
, int unsignedp
, rtx op0
,
654 rtx op1
, rtx if_false_label
, rtx if_true_label
)
656 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
657 rtx drop_through_label
= 0;
660 if (! if_true_label
|| ! if_false_label
)
661 drop_through_label
= gen_label_rtx ();
663 if_true_label
= drop_through_label
;
664 if (! if_false_label
)
665 if_false_label
= drop_through_label
;
667 /* Compare a word at a time, high order first. */
668 for (i
= 0; i
< nwords
; i
++)
670 rtx op0_word
, op1_word
;
672 if (WORDS_BIG_ENDIAN
)
674 op0_word
= operand_subword_force (op0
, i
, mode
);
675 op1_word
= operand_subword_force (op1
, i
, mode
);
679 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
680 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
683 /* All but high-order word must be compared as unsigned. */
684 do_compare_rtx_and_jump (op0_word
, op1_word
, GT
,
685 (unsignedp
|| i
> 0), word_mode
, NULL_RTX
,
686 NULL_RTX
, if_true_label
);
688 /* Consider lower words only if these are equal. */
689 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
690 NULL_RTX
, NULL_RTX
, if_false_label
);
694 emit_jump (if_false_label
);
695 if (drop_through_label
)
696 emit_label (drop_through_label
);
699 /* Given an EQ_EXPR expression EXP for values too wide to be compared
700 with one insn, test the comparison and jump to the appropriate label. */
703 do_jump_by_parts_equality (tree exp
, rtx if_false_label
, rtx if_true_label
)
705 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
706 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
707 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
708 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
710 rtx drop_through_label
= 0;
712 if (! if_false_label
)
713 drop_through_label
= if_false_label
= gen_label_rtx ();
715 for (i
= 0; i
< nwords
; i
++)
716 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
717 operand_subword_force (op1
, i
, mode
),
718 EQ
, TYPE_UNSIGNED (TREE_TYPE (exp
)),
719 word_mode
, NULL_RTX
, if_false_label
, NULL_RTX
);
722 emit_jump (if_true_label
);
723 if (drop_through_label
)
724 emit_label (drop_through_label
);
727 /* Jump according to whether OP0 is 0.
728 We assume that OP0 has an integer mode that is too wide
729 for the available compare insns. */
732 do_jump_by_parts_equality_rtx (rtx op0
, rtx if_false_label
, rtx if_true_label
)
734 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
737 rtx drop_through_label
= 0;
739 /* The fastest way of doing this comparison on almost any machine is to
740 "or" all the words and compare the result. If all have to be loaded
741 from memory and this is a very wide item, it's possible this may
742 be slower, but that's highly unlikely. */
744 part
= gen_reg_rtx (word_mode
);
745 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
746 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
747 part
= expand_binop (word_mode
, ior_optab
, part
,
748 operand_subword_force (op0
, i
, GET_MODE (op0
)),
749 part
, 1, OPTAB_WIDEN
);
753 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
754 NULL_RTX
, if_false_label
, if_true_label
);
759 /* If we couldn't do the "or" simply, do this with a series of compares. */
760 if (! if_false_label
)
761 drop_through_label
= if_false_label
= gen_label_rtx ();
763 for (i
= 0; i
< nwords
; i
++)
764 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, GET_MODE (op0
)),
765 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
,
766 if_false_label
, NULL_RTX
);
769 emit_jump (if_true_label
);
771 if (drop_through_label
)
772 emit_label (drop_through_label
);
775 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
776 MODE is the machine mode of the comparison, not of the result.
777 (including code to compute the values to be compared) and set CC0
778 according to the result. The decision as to signed or unsigned
779 comparison must be made by the caller.
781 We force a stack adjustment unless there are currently
782 things pushed on the stack that aren't yet used.
784 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
788 compare_from_rtx (rtx op0
, rtx op1
, enum rtx_code code
, int unsignedp
,
789 enum machine_mode mode
, rtx size
)
793 /* If one operand is constant, make it the second one. Only do this
794 if the other operand is not constant as well. */
796 if (swap_commutative_operands_p (op0
, op1
))
801 code
= swap_condition (code
);
804 do_pending_stack_adjust ();
806 code
= unsignedp
? unsigned_condition (code
) : code
;
807 tem
= simplify_relational_operation (code
, VOIDmode
, mode
, op0
, op1
);
810 if (CONSTANT_P (tem
))
813 if (COMPARISON_P (tem
))
815 code
= GET_CODE (tem
);
818 mode
= GET_MODE (op0
);
819 unsignedp
= (code
== GTU
|| code
== LTU
820 || code
== GEU
|| code
== LEU
);
824 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
);
827 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
829 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, op1
);
833 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
834 The decision as to signed or unsigned comparison must be made by the caller.
836 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
840 do_compare_rtx_and_jump (rtx op0
, rtx op1
, enum rtx_code code
, int unsignedp
,
841 enum machine_mode mode
, rtx size
, rtx if_false_label
,
845 int dummy_true_label
= 0;
847 /* Reverse the comparison if that is safe and we want to jump if it is
849 if (! if_true_label
&& ! FLOAT_MODE_P (mode
))
851 if_true_label
= if_false_label
;
853 code
= reverse_condition (code
);
856 /* If one operand is constant, make it the second one. Only do this
857 if the other operand is not constant as well. */
859 if (swap_commutative_operands_p (op0
, op1
))
864 code
= swap_condition (code
);
867 do_pending_stack_adjust ();
869 code
= unsignedp
? unsigned_condition (code
) : code
;
870 if (0 != (tem
= simplify_relational_operation (code
, mode
, VOIDmode
,
873 if (CONSTANT_P (tem
))
875 rtx label
= (tem
== const0_rtx
|| tem
== CONST0_RTX (mode
))
876 ? if_false_label
: if_true_label
;
882 code
= GET_CODE (tem
);
883 mode
= GET_MODE (tem
);
886 unsignedp
= (code
== GTU
|| code
== LTU
|| code
== GEU
|| code
== LEU
);
891 dummy_true_label
= 1;
892 if_true_label
= gen_label_rtx ();
895 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
,
899 emit_jump (if_false_label
);
900 if (dummy_true_label
)
901 emit_label (if_true_label
);
904 /* Generate code for a comparison expression EXP (including code to compute
905 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
906 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
907 generated code will drop through.
908 SIGNED_CODE should be the rtx operation for this comparison for
909 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
911 We force a stack adjustment unless there are currently
912 things pushed on the stack that aren't yet used. */
915 do_compare_and_jump (tree exp
, enum rtx_code signed_code
,
916 enum rtx_code unsigned_code
, rtx if_false_label
,
921 enum machine_mode mode
;
925 /* Don't crash if the comparison was erroneous. */
926 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
927 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
930 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
931 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == ERROR_MARK
)
934 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
935 mode
= TYPE_MODE (type
);
936 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
937 && (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
938 || (GET_MODE_BITSIZE (mode
)
939 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
,
942 /* op0 might have been replaced by promoted constant, in which
943 case the type of second argument should be used. */
944 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
945 mode
= TYPE_MODE (type
);
947 unsignedp
= TYPE_UNSIGNED (type
);
948 code
= unsignedp
? unsigned_code
: signed_code
;
950 #ifdef HAVE_canonicalize_funcptr_for_compare
951 /* If function pointers need to be "canonicalized" before they can
952 be reliably compared, then canonicalize them.
953 Only do this if *both* sides of the comparison are function pointers.
954 If one side isn't, we want a noncanonicalized comparison. See PR
956 if (HAVE_canonicalize_funcptr_for_compare
957 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
958 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
960 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
961 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
964 rtx new_op0
= gen_reg_rtx (mode
);
965 rtx new_op1
= gen_reg_rtx (mode
);
967 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
970 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
975 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
977 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
978 if_false_label
, if_true_label
);
981 #include "gt-dojump.h"