1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "langhooks.h"
38 static bool prefer_and_bit_test (enum machine_mode
, int);
39 static void do_jump_by_parts_greater (tree
, int, rtx
, rtx
);
40 static void do_jump_by_parts_equality (tree
, rtx
, rtx
);
41 static void do_compare_and_jump (tree
, enum rtx_code
, enum rtx_code
, rtx
,
44 /* At the start of a function, record that we have no previously-pushed
45 arguments waiting to be popped. */
48 init_pending_stack_adjust (void)
50 pending_stack_adjust
= 0;
53 /* When exiting from function, if safe, clear out any pending stack adjust
54 so the adjustment won't get done.
56 Note, if the current function calls alloca, then it must have a
57 frame pointer regardless of the value of flag_omit_frame_pointer. */
60 clear_pending_stack_adjust (void)
63 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
65 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
66 && ! flag_inline_functions
)
68 stack_pointer_delta
-= pending_stack_adjust
,
69 pending_stack_adjust
= 0;
73 /* Pop any previously-pushed arguments that have not been popped yet. */
76 do_pending_stack_adjust (void)
78 if (inhibit_defer_pop
== 0)
80 if (pending_stack_adjust
!= 0)
81 adjust_stack (GEN_INT (pending_stack_adjust
));
82 pending_stack_adjust
= 0;
86 /* Expand conditional expressions. */
88 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
89 LABEL is an rtx of code CODE_LABEL, in this function and all the
93 jumpifnot (tree exp
, rtx label
)
95 do_jump (exp
, label
, NULL_RTX
);
98 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
101 jumpif (tree exp
, rtx label
)
103 do_jump (exp
, NULL_RTX
, label
);
106 /* Used internally by prefer_and_bit_test. */
108 static GTY(()) rtx and_reg
;
109 static GTY(()) rtx and_test
;
110 static GTY(()) rtx shift_test
;
112 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
113 where X is an arbitrary register of mode MODE. Return true if the former
117 prefer_and_bit_test (enum machine_mode mode
, int bitnum
)
121 /* Set up rtxes for the two variations. Use NULL as a placeholder
122 for the BITNUM-based constants. */
123 and_reg
= gen_rtx_REG (mode
, FIRST_PSEUDO_REGISTER
);
124 and_test
= gen_rtx_AND (mode
, and_reg
, NULL
);
125 shift_test
= gen_rtx_AND (mode
, gen_rtx_ASHIFTRT (mode
, and_reg
, NULL
),
130 /* Change the mode of the previously-created rtxes. */
131 PUT_MODE (and_reg
, mode
);
132 PUT_MODE (and_test
, mode
);
133 PUT_MODE (shift_test
, mode
);
134 PUT_MODE (XEXP (shift_test
, 0), mode
);
137 /* Fill in the integers. */
138 XEXP (and_test
, 1) = GEN_INT ((unsigned HOST_WIDE_INT
) 1 << bitnum
);
139 XEXP (XEXP (shift_test
, 0), 1) = GEN_INT (bitnum
);
141 return (rtx_cost (and_test
, IF_THEN_ELSE
)
142 <= rtx_cost (shift_test
, IF_THEN_ELSE
));
145 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
146 the result is zero, or IF_TRUE_LABEL if the result is one.
147 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
148 meaning fall through in that case.
150 do_jump always does any pending stack adjust except when it does not
151 actually perform a jump. An example where there is no jump
152 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
154 This function is responsible for optimizing cases such as
155 &&, || and comparison operators in EXP. */
158 do_jump (tree exp
, rtx if_false_label
, rtx if_true_label
)
160 enum tree_code code
= TREE_CODE (exp
);
161 /* Some cases need to create a label to jump to
162 in order to properly fall through.
163 These cases set DROP_THROUGH_LABEL nonzero. */
164 rtx drop_through_label
= 0;
168 enum machine_mode mode
;
178 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
184 /* This is not true with #pragma weak */
186 /* The address of something can never be zero. */
188 emit_jump (if_true_label
);
193 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
194 TREE_OPERAND (exp
, 0)
195 = lang_hooks
.unsave_expr_now (TREE_OPERAND (exp
, 0));
199 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
200 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
201 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
202 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_RANGE_REF
)
205 /* If we are narrowing the operand, we have to do the compare in the
207 if ((TYPE_PRECISION (TREE_TYPE (exp
))
208 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
210 case NON_LVALUE_EXPR
:
215 /* These cannot change zero->nonzero or vice versa. */
216 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
220 /* This is never less insns than evaluating the PLUS_EXPR followed by
221 a test and can be longer if the test is eliminated. */
223 /* Reduce to minus. */
224 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
225 TREE_OPERAND (exp
, 0),
226 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
227 TREE_OPERAND (exp
, 1))));
228 /* Process as MINUS. */
232 /* Nonzero iff operands of minus differ. */
233 do_compare_and_jump (build (NE_EXPR
, TREE_TYPE (exp
),
234 TREE_OPERAND (exp
, 0),
235 TREE_OPERAND (exp
, 1)),
236 NE
, NE
, if_false_label
, if_true_label
);
240 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
241 See if the former is preferred for jump tests and restore it
243 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == RSHIFT_EXPR
244 && integer_onep (TREE_OPERAND (exp
, 1)))
246 tree arg
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
247 tree shift
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 1);
248 tree one
= TREE_OPERAND (exp
, 1);
249 tree argtype
= TREE_TYPE (arg
);
250 if (TREE_CODE (shift
) == INTEGER_CST
251 && compare_tree_int (shift
, 0) > 0
252 && compare_tree_int (shift
, HOST_BITS_PER_WIDE_INT
) < 0
253 && prefer_and_bit_test (TYPE_MODE (argtype
),
254 TREE_INT_CST_LOW (shift
)))
256 do_jump (build (BIT_AND_EXPR
, argtype
, arg
,
257 fold (build (LSHIFT_EXPR
, argtype
, one
, shift
))),
258 if_false_label
, if_true_label
);
263 /* If we are AND'ing with a small constant, do this comparison in the
264 smallest type that fits. If the machine doesn't have comparisons
265 that small, it will be converted back to the wider comparison.
266 This helps if we are testing the sign bit of a narrower object.
267 combine can't do this for us because it can't know whether a
268 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
270 if (! SLOW_BYTE_ACCESS
271 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
272 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
273 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
274 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
275 && (type
= lang_hooks
.types
.type_for_mode (mode
, 1)) != 0
276 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
277 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
278 != CODE_FOR_nothing
))
280 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
286 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
289 case TRUTH_ANDIF_EXPR
:
290 if (if_false_label
== 0)
291 if_false_label
= drop_through_label
= gen_label_rtx ();
292 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
293 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
296 case TRUTH_ORIF_EXPR
:
297 if (if_true_label
== 0)
298 if_true_label
= drop_through_label
= gen_label_rtx ();
299 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
300 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
305 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
306 preserve_temp_slots (NULL_RTX
);
310 do_pending_stack_adjust ();
311 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
317 case ARRAY_RANGE_REF
:
319 HOST_WIDE_INT bitsize
, bitpos
;
321 enum machine_mode mode
;
326 /* Get description of this reference. We don't actually care
327 about the underlying object here. */
328 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
329 &unsignedp
, &volatilep
);
331 type
= lang_hooks
.types
.type_for_size (bitsize
, unsignedp
);
332 if (! SLOW_BYTE_ACCESS
333 && type
!= 0 && bitsize
>= 0
334 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
335 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
336 != CODE_FOR_nothing
))
338 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
345 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
346 if (integer_onep (TREE_OPERAND (exp
, 1))
347 && integer_zerop (TREE_OPERAND (exp
, 2)))
348 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
350 else if (integer_zerop (TREE_OPERAND (exp
, 1))
351 && integer_onep (TREE_OPERAND (exp
, 2)))
352 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
356 rtx label1
= gen_label_rtx ();
357 drop_through_label
= gen_label_rtx ();
359 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
361 /* Now the THEN-expression. */
362 do_jump (TREE_OPERAND (exp
, 1),
363 if_false_label
? if_false_label
: drop_through_label
,
364 if_true_label
? if_true_label
: drop_through_label
);
365 /* In case the do_jump just above never jumps. */
366 do_pending_stack_adjust ();
369 /* Now the ELSE-expression. */
370 do_jump (TREE_OPERAND (exp
, 2),
371 if_false_label
? if_false_label
: drop_through_label
,
372 if_true_label
? if_true_label
: drop_through_label
);
378 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
380 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
381 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
383 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
384 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
387 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
388 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
389 fold (build1 (REALPART_EXPR
,
390 TREE_TYPE (inner_type
),
392 fold (build1 (REALPART_EXPR
,
393 TREE_TYPE (inner_type
),
395 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
396 fold (build1 (IMAGPART_EXPR
,
397 TREE_TYPE (inner_type
),
399 fold (build1 (IMAGPART_EXPR
,
400 TREE_TYPE (inner_type
),
402 if_false_label
, if_true_label
);
405 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
406 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
408 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
409 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
410 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
412 do_compare_and_jump (exp
, EQ
, EQ
, if_false_label
, if_true_label
);
418 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
420 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
421 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
423 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
424 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
427 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
428 fold (build (NE_EXPR
, TREE_TYPE (exp
),
429 fold (build1 (REALPART_EXPR
,
430 TREE_TYPE (inner_type
),
432 fold (build1 (REALPART_EXPR
,
433 TREE_TYPE (inner_type
),
435 fold (build (NE_EXPR
, TREE_TYPE (exp
),
436 fold (build1 (IMAGPART_EXPR
,
437 TREE_TYPE (inner_type
),
439 fold (build1 (IMAGPART_EXPR
,
440 TREE_TYPE (inner_type
),
442 if_false_label
, if_true_label
);
445 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
446 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
448 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
449 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
450 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
452 do_compare_and_jump (exp
, NE
, NE
, if_false_label
, if_true_label
);
457 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
458 if (GET_MODE_CLASS (mode
) == MODE_INT
459 && ! can_compare_p (LT
, mode
, ccp_jump
))
460 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
462 do_compare_and_jump (exp
, LT
, LTU
, if_false_label
, if_true_label
);
466 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
467 if (GET_MODE_CLASS (mode
) == MODE_INT
468 && ! can_compare_p (LE
, mode
, ccp_jump
))
469 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
471 do_compare_and_jump (exp
, LE
, LEU
, if_false_label
, if_true_label
);
475 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
476 if (GET_MODE_CLASS (mode
) == MODE_INT
477 && ! can_compare_p (GT
, mode
, ccp_jump
))
478 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
480 do_compare_and_jump (exp
, GT
, GTU
, if_false_label
, if_true_label
);
484 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
485 if (GET_MODE_CLASS (mode
) == MODE_INT
486 && ! can_compare_p (GE
, mode
, ccp_jump
))
487 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
489 do_compare_and_jump (exp
, GE
, GEU
, if_false_label
, if_true_label
);
495 enum rtx_code cmp
, rcmp
;
498 if (code
== UNORDERED_EXPR
)
499 cmp
= UNORDERED
, rcmp
= ORDERED
;
501 cmp
= ORDERED
, rcmp
= UNORDERED
;
502 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
505 if (! can_compare_p (cmp
, mode
, ccp_jump
)
506 && (can_compare_p (rcmp
, mode
, ccp_jump
)
507 /* If the target doesn't provide either UNORDERED or ORDERED
508 comparisons, canonicalize on UNORDERED for the library. */
509 || rcmp
== UNORDERED
))
513 do_compare_and_jump (exp
, cmp
, cmp
, if_false_label
, if_true_label
);
515 do_compare_and_jump (exp
, rcmp
, rcmp
, if_true_label
, if_false_label
);
520 enum rtx_code rcode1
;
521 enum tree_code tcode1
, tcode2
;
525 tcode1
= UNORDERED_EXPR
;
530 tcode1
= UNORDERED_EXPR
;
535 tcode1
= UNORDERED_EXPR
;
540 tcode1
= UNORDERED_EXPR
;
545 tcode1
= UNORDERED_EXPR
;
549 /* It is ok for LTGT_EXPR to trap when the result is unordered,
550 so expand to (a < b) || (a > b). */
557 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
558 if (can_compare_p (rcode1
, mode
, ccp_jump
))
559 do_compare_and_jump (exp
, rcode1
, rcode1
, if_false_label
,
563 tree op0
= save_expr (TREE_OPERAND (exp
, 0));
564 tree op1
= save_expr (TREE_OPERAND (exp
, 1));
567 /* If the target doesn't support combined unordered
568 compares, decompose into two comparisons. */
569 cmp0
= fold (build (tcode1
, TREE_TYPE (exp
), op0
, op1
));
570 cmp1
= fold (build (tcode2
, TREE_TYPE (exp
), op0
, op1
));
571 exp
= build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
), cmp0
, cmp1
);
572 do_jump (exp
, if_false_label
, if_true_label
);
578 __builtin_expect (<test>, 0) and
579 __builtin_expect (<test>, 1)
581 We need to do this here, so that <test> is not converted to a SCC
582 operation on machines that use condition code registers and COMPARE
583 like the PowerPC, and then the jump is done based on whether the SCC
584 operation produced a 1 or 0. */
586 /* Check for a built-in function. */
588 tree fndecl
= get_callee_fndecl (exp
);
589 tree arglist
= TREE_OPERAND (exp
, 1);
592 && DECL_BUILT_IN (fndecl
)
593 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
594 && arglist
!= NULL_TREE
595 && TREE_CHAIN (arglist
) != NULL_TREE
)
597 rtx seq
= expand_builtin_expect_jump (exp
, if_false_label
,
607 /* Fall through and generate the normal code. */
611 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
613 /* This is not needed any more and causes poor code since it causes
614 comparisons and tests from non-SI objects to have different code
616 /* Copy to register to avoid generating bad insns by cse
617 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
618 if (!cse_not_expected
&& MEM_P (temp
))
619 temp
= copy_to_reg (temp
);
621 do_pending_stack_adjust ();
622 /* Do any postincrements in the expression that was tested. */
625 if (GET_CODE (temp
) == CONST_INT
626 || (GET_CODE (temp
) == CONST_DOUBLE
&& GET_MODE (temp
) == VOIDmode
)
627 || GET_CODE (temp
) == LABEL_REF
)
629 rtx target
= temp
== const0_rtx
? if_false_label
: if_true_label
;
633 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
634 && ! can_compare_p (NE
, GET_MODE (temp
), ccp_jump
))
635 /* Note swapping the labels gives us not-equal. */
636 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
637 else if (GET_MODE (temp
) != VOIDmode
)
639 /* The RTL optimizers prefer comparisons against pseudos. */
640 if (GET_CODE (temp
) == SUBREG
)
642 /* Compare promoted variables in their promoted mode. */
643 if (SUBREG_PROMOTED_VAR_P (temp
)
644 && REG_P (XEXP (temp
, 0)))
645 temp
= XEXP (temp
, 0);
647 temp
= copy_to_reg (temp
);
649 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
650 NE
, TYPE_UNSIGNED (TREE_TYPE (exp
)),
651 GET_MODE (temp
), NULL_RTX
,
652 if_false_label
, if_true_label
);
658 if (drop_through_label
)
660 /* If do_jump produces code that might be jumped around,
661 do any stack adjusts from that code, before the place
662 where control merges in. */
663 do_pending_stack_adjust ();
664 emit_label (drop_through_label
);
668 /* Given a comparison expression EXP for values too wide to be compared
669 with one insn, test the comparison and jump to the appropriate label.
670 The code of EXP is ignored; we always test GT if SWAP is 0,
671 and LT if SWAP is 1. */
674 do_jump_by_parts_greater (tree exp
, int swap
, rtx if_false_label
,
677 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
678 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
679 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
680 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
682 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
,
686 /* Compare OP0 with OP1, word at a time, in mode MODE.
687 UNSIGNEDP says to do unsigned comparison.
688 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
691 do_jump_by_parts_greater_rtx (enum machine_mode mode
, int unsignedp
, rtx op0
,
692 rtx op1
, rtx if_false_label
, rtx if_true_label
)
694 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
695 rtx drop_through_label
= 0;
698 if (! if_true_label
|| ! if_false_label
)
699 drop_through_label
= gen_label_rtx ();
701 if_true_label
= drop_through_label
;
702 if (! if_false_label
)
703 if_false_label
= drop_through_label
;
705 /* Compare a word at a time, high order first. */
706 for (i
= 0; i
< nwords
; i
++)
708 rtx op0_word
, op1_word
;
710 if (WORDS_BIG_ENDIAN
)
712 op0_word
= operand_subword_force (op0
, i
, mode
);
713 op1_word
= operand_subword_force (op1
, i
, mode
);
717 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
718 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
721 /* All but high-order word must be compared as unsigned. */
722 do_compare_rtx_and_jump (op0_word
, op1_word
, GT
,
723 (unsignedp
|| i
> 0), word_mode
, NULL_RTX
,
724 NULL_RTX
, if_true_label
);
726 /* Consider lower words only if these are equal. */
727 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
728 NULL_RTX
, NULL_RTX
, if_false_label
);
732 emit_jump (if_false_label
);
733 if (drop_through_label
)
734 emit_label (drop_through_label
);
737 /* Given an EQ_EXPR expression EXP for values too wide to be compared
738 with one insn, test the comparison and jump to the appropriate label. */
741 do_jump_by_parts_equality (tree exp
, rtx if_false_label
, rtx if_true_label
)
743 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
744 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
745 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
746 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
748 rtx drop_through_label
= 0;
750 if (! if_false_label
)
751 drop_through_label
= if_false_label
= gen_label_rtx ();
753 for (i
= 0; i
< nwords
; i
++)
754 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
755 operand_subword_force (op1
, i
, mode
),
756 EQ
, TYPE_UNSIGNED (TREE_TYPE (exp
)),
757 word_mode
, NULL_RTX
, if_false_label
, NULL_RTX
);
760 emit_jump (if_true_label
);
761 if (drop_through_label
)
762 emit_label (drop_through_label
);
765 /* Jump according to whether OP0 is 0.
766 We assume that OP0 has an integer mode that is too wide
767 for the available compare insns. */
770 do_jump_by_parts_equality_rtx (rtx op0
, rtx if_false_label
, rtx if_true_label
)
772 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
775 rtx drop_through_label
= 0;
777 /* The fastest way of doing this comparison on almost any machine is to
778 "or" all the words and compare the result. If all have to be loaded
779 from memory and this is a very wide item, it's possible this may
780 be slower, but that's highly unlikely. */
782 part
= gen_reg_rtx (word_mode
);
783 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
784 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
785 part
= expand_binop (word_mode
, ior_optab
, part
,
786 operand_subword_force (op0
, i
, GET_MODE (op0
)),
787 part
, 1, OPTAB_WIDEN
);
791 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
792 NULL_RTX
, if_false_label
, if_true_label
);
797 /* If we couldn't do the "or" simply, do this with a series of compares. */
798 if (! if_false_label
)
799 drop_through_label
= if_false_label
= gen_label_rtx ();
801 for (i
= 0; i
< nwords
; i
++)
802 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, GET_MODE (op0
)),
803 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
,
804 if_false_label
, NULL_RTX
);
807 emit_jump (if_true_label
);
809 if (drop_through_label
)
810 emit_label (drop_through_label
);
813 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
814 (including code to compute the values to be compared)
815 and set (CC0) according to the result.
816 The decision as to signed or unsigned comparison must be made by the caller.
818 We force a stack adjustment unless there are currently
819 things pushed on the stack that aren't yet used.
821 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
825 compare_from_rtx (rtx op0
, rtx op1
, enum rtx_code code
, int unsignedp
,
826 enum machine_mode mode
, rtx size
)
830 /* If one operand is constant, make it the second one. Only do this
831 if the other operand is not constant as well. */
833 if (swap_commutative_operands_p (op0
, op1
))
838 code
= swap_condition (code
);
843 op0
= force_not_mem (op0
);
844 op1
= force_not_mem (op1
);
847 do_pending_stack_adjust ();
849 code
= unsignedp
? unsigned_condition (code
) : code
;
850 if (0 != (tem
= simplify_relational_operation (code
, mode
, VOIDmode
,
853 if (CONSTANT_P (tem
))
856 code
= GET_CODE (tem
);
857 mode
= GET_MODE (tem
);
860 unsignedp
= (code
== GTU
|| code
== LTU
|| code
== GEU
|| code
== LEU
);
863 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
);
866 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
868 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, op1
);
872 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
873 The decision as to signed or unsigned comparison must be made by the caller.
875 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
879 do_compare_rtx_and_jump (rtx op0
, rtx op1
, enum rtx_code code
, int unsignedp
,
880 enum machine_mode mode
, rtx size
, rtx if_false_label
,
884 int dummy_true_label
= 0;
886 /* Reverse the comparison if that is safe and we want to jump if it is
888 if (! if_true_label
&& ! FLOAT_MODE_P (mode
))
890 if_true_label
= if_false_label
;
892 code
= reverse_condition (code
);
895 /* If one operand is constant, make it the second one. Only do this
896 if the other operand is not constant as well. */
898 if (swap_commutative_operands_p (op0
, op1
))
903 code
= swap_condition (code
);
908 op0
= force_not_mem (op0
);
909 op1
= force_not_mem (op1
);
912 do_pending_stack_adjust ();
914 code
= unsignedp
? unsigned_condition (code
) : code
;
915 if (0 != (tem
= simplify_relational_operation (code
, mode
, VOIDmode
,
918 if (CONSTANT_P (tem
))
920 rtx label
= (tem
== const0_rtx
|| tem
== CONST0_RTX (mode
))
921 ? if_false_label
: if_true_label
;
927 code
= GET_CODE (tem
);
928 mode
= GET_MODE (tem
);
931 unsignedp
= (code
== GTU
|| code
== LTU
|| code
== GEU
|| code
== LEU
);
936 dummy_true_label
= 1;
937 if_true_label
= gen_label_rtx ();
940 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
,
944 emit_jump (if_false_label
);
945 if (dummy_true_label
)
946 emit_label (if_true_label
);
949 /* Generate code for a comparison expression EXP (including code to compute
950 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
951 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
952 generated code will drop through.
953 SIGNED_CODE should be the rtx operation for this comparison for
954 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
956 We force a stack adjustment unless there are currently
957 things pushed on the stack that aren't yet used. */
960 do_compare_and_jump (tree exp
, enum rtx_code signed_code
,
961 enum rtx_code unsigned_code
, rtx if_false_label
,
966 enum machine_mode mode
;
970 /* Don't crash if the comparison was erroneous. */
971 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
972 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
975 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
976 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == ERROR_MARK
)
979 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
980 mode
= TYPE_MODE (type
);
981 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
982 && (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
983 || (GET_MODE_BITSIZE (mode
)
984 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
,
987 /* op0 might have been replaced by promoted constant, in which
988 case the type of second argument should be used. */
989 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
990 mode
= TYPE_MODE (type
);
992 unsignedp
= TYPE_UNSIGNED (type
);
993 code
= unsignedp
? unsigned_code
: signed_code
;
995 #ifdef HAVE_canonicalize_funcptr_for_compare
996 /* If function pointers need to be "canonicalized" before they can
997 be reliably compared, then canonicalize them. */
998 if (HAVE_canonicalize_funcptr_for_compare
999 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
1000 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1003 rtx new_op0
= gen_reg_rtx (mode
);
1005 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
1009 if (HAVE_canonicalize_funcptr_for_compare
1010 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
1011 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
1014 rtx new_op1
= gen_reg_rtx (mode
);
1016 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
1021 /* Do any postincrements in the expression that was tested. */
1024 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
1026 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
1027 if_false_label
, if_true_label
);
1030 #include "gt-dojump.h"