1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "langhooks.h"
38 static bool prefer_and_bit_test (enum machine_mode
, int);
39 static void do_jump_by_parts_greater (tree
, int, rtx
, rtx
);
40 static void do_jump_by_parts_equality (tree
, rtx
, rtx
);
41 static void do_compare_and_jump (tree
, enum rtx_code
, enum rtx_code
, rtx
,
44 /* At the start of a function, record that we have no previously-pushed
45 arguments waiting to be popped. */
48 init_pending_stack_adjust (void)
50 pending_stack_adjust
= 0;
53 /* Discard any pending stack adjustment. This avoid relying on the
54 RTL optimizers to remove useless adjustments when we know the
55 stack pointer value is dead. */
56 void discard_pending_stack_adjust (void)
58 stack_pointer_delta
-= pending_stack_adjust
;
59 pending_stack_adjust
= 0;
62 /* When exiting from function, if safe, clear out any pending stack adjust
63 so the adjustment won't get done.
65 Note, if the current function calls alloca, then it must have a
66 frame pointer regardless of the value of flag_omit_frame_pointer. */
69 clear_pending_stack_adjust (void)
72 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
74 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
75 && ! flag_inline_functions
)
76 discard_pending_stack_adjust ();
79 /* Pop any previously-pushed arguments that have not been popped yet. */
82 do_pending_stack_adjust (void)
84 if (inhibit_defer_pop
== 0)
86 if (pending_stack_adjust
!= 0)
87 adjust_stack (GEN_INT (pending_stack_adjust
));
88 pending_stack_adjust
= 0;
92 /* Expand conditional expressions. */
94 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
95 LABEL is an rtx of code CODE_LABEL, in this function and all the
99 jumpifnot (tree exp
, rtx label
)
101 do_jump (exp
, label
, NULL_RTX
);
104 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
107 jumpif (tree exp
, rtx label
)
109 do_jump (exp
, NULL_RTX
, label
);
112 /* Used internally by prefer_and_bit_test. */
114 static GTY(()) rtx and_reg
;
115 static GTY(()) rtx and_test
;
116 static GTY(()) rtx shift_test
;
118 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1"
119 where X is an arbitrary register of mode MODE. Return true if the former
123 prefer_and_bit_test (enum machine_mode mode
, int bitnum
)
127 /* Set up rtxes for the two variations. Use NULL as a placeholder
128 for the BITNUM-based constants. */
129 and_reg
= gen_rtx_REG (mode
, FIRST_PSEUDO_REGISTER
);
130 and_test
= gen_rtx_AND (mode
, and_reg
, NULL
);
131 shift_test
= gen_rtx_AND (mode
, gen_rtx_ASHIFTRT (mode
, and_reg
, NULL
),
136 /* Change the mode of the previously-created rtxes. */
137 PUT_MODE (and_reg
, mode
);
138 PUT_MODE (and_test
, mode
);
139 PUT_MODE (shift_test
, mode
);
140 PUT_MODE (XEXP (shift_test
, 0), mode
);
143 /* Fill in the integers. */
144 XEXP (and_test
, 1) = GEN_INT ((unsigned HOST_WIDE_INT
) 1 << bitnum
);
145 XEXP (XEXP (shift_test
, 0), 1) = GEN_INT (bitnum
);
147 return (rtx_cost (and_test
, IF_THEN_ELSE
)
148 <= rtx_cost (shift_test
, IF_THEN_ELSE
));
151 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
152 the result is zero, or IF_TRUE_LABEL if the result is one.
153 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
154 meaning fall through in that case.
156 do_jump always does any pending stack adjust except when it does not
157 actually perform a jump. An example where there is no jump
158 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
160 This function is responsible for optimizing cases such as
161 &&, || and comparison operators in EXP. */
164 do_jump (tree exp
, rtx if_false_label
, rtx if_true_label
)
166 enum tree_code code
= TREE_CODE (exp
);
167 /* Some cases need to create a label to jump to
168 in order to properly fall through.
169 These cases set DROP_THROUGH_LABEL nonzero. */
170 rtx drop_through_label
= 0;
174 enum machine_mode mode
;
184 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
190 /* This is not true with #pragma weak */
192 /* The address of something can never be zero. */
194 emit_jump (if_true_label
);
199 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
200 TREE_OPERAND (exp
, 0)
201 = (*lang_hooks
.unsave_expr_now
) (TREE_OPERAND (exp
, 0));
205 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
206 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
207 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
208 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_RANGE_REF
)
211 /* If we are narrowing the operand, we have to do the compare in the
213 if ((TYPE_PRECISION (TREE_TYPE (exp
))
214 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
216 case NON_LVALUE_EXPR
:
222 /* These cannot change zero->nonzero or vice versa. */
223 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
226 case WITH_RECORD_EXPR
:
227 /* Put the object on the placeholder list, recurse through our first
228 operand, and pop the list. */
229 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
231 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
232 placeholder_list
= TREE_CHAIN (placeholder_list
);
236 /* This is never less insns than evaluating the PLUS_EXPR followed by
237 a test and can be longer if the test is eliminated. */
239 /* Reduce to minus. */
240 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
241 TREE_OPERAND (exp
, 0),
242 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
243 TREE_OPERAND (exp
, 1))));
244 /* Process as MINUS. */
248 /* Nonzero iff operands of minus differ. */
249 do_compare_and_jump (build (NE_EXPR
, TREE_TYPE (exp
),
250 TREE_OPERAND (exp
, 0),
251 TREE_OPERAND (exp
, 1)),
252 NE
, NE
, if_false_label
, if_true_label
);
256 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
257 See if the former is preferred for jump tests and restore it
259 if (integer_onep (TREE_OPERAND (exp
, 1)))
261 tree exp0
= TREE_OPERAND (exp
, 0);
262 rtx set_label
, clr_label
;
264 /* Strip narrowing integral type conversions. */
265 while ((TREE_CODE (exp0
) == NOP_EXPR
266 || TREE_CODE (exp0
) == CONVERT_EXPR
267 || TREE_CODE (exp0
) == NON_LVALUE_EXPR
)
268 && TREE_OPERAND (exp0
, 0) != error_mark_node
269 && TYPE_PRECISION (TREE_TYPE (exp0
))
270 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0
, 0))))
271 exp0
= TREE_OPERAND (exp0
, 0);
273 /* "exp0 ^ 1" inverts the sense of the single bit test. */
274 if (TREE_CODE (exp0
) == BIT_XOR_EXPR
275 && integer_onep (TREE_OPERAND (exp0
, 1)))
277 exp0
= TREE_OPERAND (exp0
, 0);
278 clr_label
= if_true_label
;
279 set_label
= if_false_label
;
283 clr_label
= if_false_label
;
284 set_label
= if_true_label
;
287 if (TREE_CODE (exp0
) == RSHIFT_EXPR
)
289 tree arg
= TREE_OPERAND (exp0
, 0);
290 tree shift
= TREE_OPERAND (exp0
, 1);
291 tree argtype
= TREE_TYPE (arg
);
292 if (TREE_CODE (shift
) == INTEGER_CST
293 && compare_tree_int (shift
, 0) >= 0
294 && compare_tree_int (shift
, HOST_BITS_PER_WIDE_INT
) < 0
295 && prefer_and_bit_test (TYPE_MODE (argtype
),
296 TREE_INT_CST_LOW (shift
)))
298 HOST_WIDE_INT mask
= (HOST_WIDE_INT
) 1
299 << TREE_INT_CST_LOW (shift
);
300 tree t
= build_int_2 (mask
, 0);
301 TREE_TYPE (t
) = argtype
;
302 do_jump (build (BIT_AND_EXPR
, argtype
, arg
, t
),
303 clr_label
, set_label
);
309 /* If we are AND'ing with a small constant, do this comparison in the
310 smallest type that fits. If the machine doesn't have comparisons
311 that small, it will be converted back to the wider comparison.
312 This helps if we are testing the sign bit of a narrower object.
313 combine can't do this for us because it can't know whether a
314 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
316 if (! SLOW_BYTE_ACCESS
317 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
318 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
319 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
320 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
321 && (type
= (*lang_hooks
.types
.type_for_mode
) (mode
, 1)) != 0
322 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
323 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
324 != CODE_FOR_nothing
))
326 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
332 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
335 case TRUTH_ANDIF_EXPR
:
336 if (if_false_label
== 0)
337 if_false_label
= drop_through_label
= gen_label_rtx ();
338 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
339 start_cleanup_deferral ();
340 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
341 end_cleanup_deferral ();
344 case TRUTH_ORIF_EXPR
:
345 if (if_true_label
== 0)
346 if_true_label
= drop_through_label
= gen_label_rtx ();
347 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
348 start_cleanup_deferral ();
349 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
350 end_cleanup_deferral ();
355 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
356 preserve_temp_slots (NULL_RTX
);
360 do_pending_stack_adjust ();
361 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
367 case ARRAY_RANGE_REF
:
369 HOST_WIDE_INT bitsize
, bitpos
;
371 enum machine_mode mode
;
376 /* Get description of this reference. We don't actually care
377 about the underlying object here. */
378 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
379 &unsignedp
, &volatilep
);
381 type
= (*lang_hooks
.types
.type_for_size
) (bitsize
, unsignedp
);
382 if (! SLOW_BYTE_ACCESS
383 && type
!= 0 && bitsize
>= 0
384 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
385 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
386 != CODE_FOR_nothing
))
388 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
395 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
396 if (integer_onep (TREE_OPERAND (exp
, 1))
397 && integer_zerop (TREE_OPERAND (exp
, 2)))
398 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
400 else if (integer_zerop (TREE_OPERAND (exp
, 1))
401 && integer_onep (TREE_OPERAND (exp
, 2)))
402 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
406 rtx label1
= gen_label_rtx ();
407 drop_through_label
= gen_label_rtx ();
409 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
411 start_cleanup_deferral ();
412 /* Now the THEN-expression. */
413 do_jump (TREE_OPERAND (exp
, 1),
414 if_false_label
? if_false_label
: drop_through_label
,
415 if_true_label
? if_true_label
: drop_through_label
);
416 /* In case the do_jump just above never jumps. */
417 do_pending_stack_adjust ();
420 /* Now the ELSE-expression. */
421 do_jump (TREE_OPERAND (exp
, 2),
422 if_false_label
? if_false_label
: drop_through_label
,
423 if_true_label
? if_true_label
: drop_through_label
);
424 end_cleanup_deferral ();
430 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
432 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
433 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
435 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
436 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
439 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
440 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
441 fold (build1 (REALPART_EXPR
,
442 TREE_TYPE (inner_type
),
444 fold (build1 (REALPART_EXPR
,
445 TREE_TYPE (inner_type
),
447 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
448 fold (build1 (IMAGPART_EXPR
,
449 TREE_TYPE (inner_type
),
451 fold (build1 (IMAGPART_EXPR
,
452 TREE_TYPE (inner_type
),
454 if_false_label
, if_true_label
);
457 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
458 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
460 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
461 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
462 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
464 do_compare_and_jump (exp
, EQ
, EQ
, if_false_label
, if_true_label
);
470 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
472 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
473 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
475 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
476 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
479 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
480 fold (build (NE_EXPR
, TREE_TYPE (exp
),
481 fold (build1 (REALPART_EXPR
,
482 TREE_TYPE (inner_type
),
484 fold (build1 (REALPART_EXPR
,
485 TREE_TYPE (inner_type
),
487 fold (build (NE_EXPR
, TREE_TYPE (exp
),
488 fold (build1 (IMAGPART_EXPR
,
489 TREE_TYPE (inner_type
),
491 fold (build1 (IMAGPART_EXPR
,
492 TREE_TYPE (inner_type
),
494 if_false_label
, if_true_label
);
497 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
498 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
500 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
501 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
502 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
504 do_compare_and_jump (exp
, NE
, NE
, if_false_label
, if_true_label
);
509 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
510 if (GET_MODE_CLASS (mode
) == MODE_INT
511 && ! can_compare_p (LT
, mode
, ccp_jump
))
512 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
514 do_compare_and_jump (exp
, LT
, LTU
, if_false_label
, if_true_label
);
518 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
519 if (GET_MODE_CLASS (mode
) == MODE_INT
520 && ! can_compare_p (LE
, mode
, ccp_jump
))
521 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
523 do_compare_and_jump (exp
, LE
, LEU
, if_false_label
, if_true_label
);
527 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
528 if (GET_MODE_CLASS (mode
) == MODE_INT
529 && ! can_compare_p (GT
, mode
, ccp_jump
))
530 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
532 do_compare_and_jump (exp
, GT
, GTU
, if_false_label
, if_true_label
);
536 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
537 if (GET_MODE_CLASS (mode
) == MODE_INT
538 && ! can_compare_p (GE
, mode
, ccp_jump
))
539 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
541 do_compare_and_jump (exp
, GE
, GEU
, if_false_label
, if_true_label
);
547 enum rtx_code cmp
, rcmp
;
550 if (code
== UNORDERED_EXPR
)
551 cmp
= UNORDERED
, rcmp
= ORDERED
;
553 cmp
= ORDERED
, rcmp
= UNORDERED
;
554 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
557 if (! can_compare_p (cmp
, mode
, ccp_jump
)
558 && (can_compare_p (rcmp
, mode
, ccp_jump
)
559 /* If the target doesn't provide either UNORDERED or ORDERED
560 comparisons, canonicalize on UNORDERED for the library. */
561 || rcmp
== UNORDERED
))
565 do_compare_and_jump (exp
, cmp
, cmp
, if_false_label
, if_true_label
);
567 do_compare_and_jump (exp
, rcmp
, rcmp
, if_true_label
, if_false_label
);
572 enum rtx_code rcode1
;
573 enum tree_code tcode2
;
597 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
598 if (can_compare_p (rcode1
, mode
, ccp_jump
))
599 do_compare_and_jump (exp
, rcode1
, rcode1
, if_false_label
,
603 tree op0
= save_expr (TREE_OPERAND (exp
, 0));
604 tree op1
= save_expr (TREE_OPERAND (exp
, 1));
607 /* If the target doesn't support combined unordered
608 compares, decompose into UNORDERED + comparison. */
609 cmp0
= fold (build (UNORDERED_EXPR
, TREE_TYPE (exp
), op0
, op1
));
610 cmp1
= fold (build (tcode2
, TREE_TYPE (exp
), op0
, op1
));
611 exp
= build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
), cmp0
, cmp1
);
612 do_jump (exp
, if_false_label
, if_true_label
);
618 __builtin_expect (<test>, 0) and
619 __builtin_expect (<test>, 1)
621 We need to do this here, so that <test> is not converted to a SCC
622 operation on machines that use condition code registers and COMPARE
623 like the PowerPC, and then the jump is done based on whether the SCC
624 operation produced a 1 or 0. */
626 /* Check for a built-in function. */
628 tree fndecl
= get_callee_fndecl (exp
);
629 tree arglist
= TREE_OPERAND (exp
, 1);
632 && DECL_BUILT_IN (fndecl
)
633 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
634 && arglist
!= NULL_TREE
635 && TREE_CHAIN (arglist
) != NULL_TREE
)
637 rtx seq
= expand_builtin_expect_jump (exp
, if_false_label
,
647 /* Fall through and generate the normal code. */
651 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
653 /* This is not needed any more and causes poor code since it causes
654 comparisons and tests from non-SI objects to have different code
656 /* Copy to register to avoid generating bad insns by cse
657 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
658 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
659 temp
= copy_to_reg (temp
);
661 do_pending_stack_adjust ();
662 /* Do any postincrements in the expression that was tested. */
665 if (GET_CODE (temp
) == CONST_INT
666 || (GET_CODE (temp
) == CONST_DOUBLE
&& GET_MODE (temp
) == VOIDmode
)
667 || GET_CODE (temp
) == LABEL_REF
)
669 rtx target
= temp
== const0_rtx
? if_false_label
: if_true_label
;
673 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
674 && ! can_compare_p (NE
, GET_MODE (temp
), ccp_jump
))
675 /* Note swapping the labels gives us not-equal. */
676 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
677 else if (GET_MODE (temp
) != VOIDmode
)
679 /* The RTL optimizers prefer comparisons against pseudos. */
680 if (GET_CODE (temp
) == SUBREG
)
682 /* Compare promoted variables in their promoted mode. */
683 if (SUBREG_PROMOTED_VAR_P (temp
)
684 && GET_CODE (XEXP (temp
, 0)) == REG
)
685 temp
= XEXP (temp
, 0);
687 temp
= copy_to_reg (temp
);
689 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
690 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
691 GET_MODE (temp
), NULL_RTX
,
692 if_false_label
, if_true_label
);
698 if (drop_through_label
)
700 /* If do_jump produces code that might be jumped around,
701 do any stack adjusts from that code, before the place
702 where control merges in. */
703 do_pending_stack_adjust ();
704 emit_label (drop_through_label
);
708 /* Given a comparison expression EXP for values too wide to be compared
709 with one insn, test the comparison and jump to the appropriate label.
710 The code of EXP is ignored; we always test GT if SWAP is 0,
711 and LT if SWAP is 1. */
714 do_jump_by_parts_greater (tree exp
, int swap
, rtx if_false_label
,
717 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
718 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
719 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
720 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
722 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
);
725 /* Compare OP0 with OP1, word at a time, in mode MODE.
726 UNSIGNEDP says to do unsigned comparison.
727 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
730 do_jump_by_parts_greater_rtx (enum machine_mode mode
, int unsignedp
, rtx op0
,
731 rtx op1
, rtx if_false_label
, rtx if_true_label
)
733 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
734 rtx drop_through_label
= 0;
737 if (! if_true_label
|| ! if_false_label
)
738 drop_through_label
= gen_label_rtx ();
740 if_true_label
= drop_through_label
;
741 if (! if_false_label
)
742 if_false_label
= drop_through_label
;
744 /* Compare a word at a time, high order first. */
745 for (i
= 0; i
< nwords
; i
++)
747 rtx op0_word
, op1_word
;
749 if (WORDS_BIG_ENDIAN
)
751 op0_word
= operand_subword_force (op0
, i
, mode
);
752 op1_word
= operand_subword_force (op1
, i
, mode
);
756 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
757 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
760 /* All but high-order word must be compared as unsigned. */
761 do_compare_rtx_and_jump (op0_word
, op1_word
, GT
,
762 (unsignedp
|| i
> 0), word_mode
, NULL_RTX
,
763 NULL_RTX
, if_true_label
);
765 /* Consider lower words only if these are equal. */
766 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
767 NULL_RTX
, NULL_RTX
, if_false_label
);
771 emit_jump (if_false_label
);
772 if (drop_through_label
)
773 emit_label (drop_through_label
);
776 /* Given an EQ_EXPR expression EXP for values too wide to be compared
777 with one insn, test the comparison and jump to the appropriate label. */
780 do_jump_by_parts_equality (tree exp
, rtx if_false_label
, rtx if_true_label
)
782 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
783 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
784 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
785 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
787 rtx drop_through_label
= 0;
789 if (! if_false_label
)
790 drop_through_label
= if_false_label
= gen_label_rtx ();
792 for (i
= 0; i
< nwords
; i
++)
793 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
794 operand_subword_force (op1
, i
, mode
),
795 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
796 word_mode
, NULL_RTX
, if_false_label
, NULL_RTX
);
799 emit_jump (if_true_label
);
800 if (drop_through_label
)
801 emit_label (drop_through_label
);
804 /* Jump according to whether OP0 is 0.
805 We assume that OP0 has an integer mode that is too wide
806 for the available compare insns. */
809 do_jump_by_parts_equality_rtx (rtx op0
, rtx if_false_label
, rtx if_true_label
)
811 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
814 rtx drop_through_label
= 0;
816 /* The fastest way of doing this comparison on almost any machine is to
817 "or" all the words and compare the result. If all have to be loaded
818 from memory and this is a very wide item, it's possible this may
819 be slower, but that's highly unlikely. */
821 part
= gen_reg_rtx (word_mode
);
822 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
823 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
824 part
= expand_binop (word_mode
, ior_optab
, part
,
825 operand_subword_force (op0
, i
, GET_MODE (op0
)),
826 part
, 1, OPTAB_WIDEN
);
830 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
831 NULL_RTX
, if_false_label
, if_true_label
);
836 /* If we couldn't do the "or" simply, do this with a series of compares. */
837 if (! if_false_label
)
838 drop_through_label
= if_false_label
= gen_label_rtx ();
840 for (i
= 0; i
< nwords
; i
++)
841 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, GET_MODE (op0
)),
842 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
,
843 if_false_label
, NULL_RTX
);
846 emit_jump (if_true_label
);
848 if (drop_through_label
)
849 emit_label (drop_through_label
);
852 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
853 (including code to compute the values to be compared)
854 and set (CC0) according to the result.
855 The decision as to signed or unsigned comparison must be made by the caller.
857 We force a stack adjustment unless there are currently
858 things pushed on the stack that aren't yet used.
860 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
864 compare_from_rtx (rtx op0
, rtx op1
, enum rtx_code code
, int unsignedp
,
865 enum machine_mode mode
, rtx size
)
870 /* If one operand is constant, make it the second one. Only do this
871 if the other operand is not constant as well. */
873 if (swap_commutative_operands_p (op0
, op1
))
878 code
= swap_condition (code
);
883 op0
= force_not_mem (op0
);
884 op1
= force_not_mem (op1
);
887 do_pending_stack_adjust ();
889 ucode
= unsignedp
? unsigned_condition (code
) : code
;
890 if ((tem
= simplify_relational_operation (ucode
, mode
, op0
, op1
)) != 0)
894 /* There's no need to do this now that combine.c can eliminate lots of
895 sign extensions. This can be less efficient in certain cases on other
898 /* If this is a signed equality comparison, we can do it as an
899 unsigned comparison since zero-extension is cheaper than sign
900 extension and comparisons with zero are done as unsigned. This is
901 the case even on machines that can do fast sign extension, since
902 zero-extension is easier to combine with other operations than
903 sign-extension is. If we are comparing against a constant, we must
904 convert it to what it would look like unsigned. */
905 if ((code
== EQ
|| code
== NE
) && ! unsignedp
906 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
908 if (GET_CODE (op1
) == CONST_INT
909 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
910 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
915 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
);
918 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
920 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, op1
);
924 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
925 The decision as to signed or unsigned comparison must be made by the caller.
927 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
931 do_compare_rtx_and_jump (rtx op0
, rtx op1
, enum rtx_code code
, int unsignedp
,
932 enum machine_mode mode
, rtx size
, rtx if_false_label
,
937 int dummy_true_label
= 0;
939 /* Reverse the comparison if that is safe and we want to jump if it is
941 if (! if_true_label
&& ! FLOAT_MODE_P (mode
))
943 if_true_label
= if_false_label
;
945 code
= reverse_condition (code
);
948 /* If one operand is constant, make it the second one. Only do this
949 if the other operand is not constant as well. */
951 if (swap_commutative_operands_p (op0
, op1
))
956 code
= swap_condition (code
);
961 op0
= force_not_mem (op0
);
962 op1
= force_not_mem (op1
);
965 do_pending_stack_adjust ();
967 ucode
= unsignedp
? unsigned_condition (code
) : code
;
968 if ((tem
= simplify_relational_operation (ucode
, mode
, op0
, op1
)) != 0)
970 if (tem
== const_true_rtx
)
973 emit_jump (if_true_label
);
978 emit_jump (if_false_label
);
984 /* There's no need to do this now that combine.c can eliminate lots of
985 sign extensions. This can be less efficient in certain cases on other
988 /* If this is a signed equality comparison, we can do it as an
989 unsigned comparison since zero-extension is cheaper than sign
990 extension and comparisons with zero are done as unsigned. This is
991 the case even on machines that can do fast sign extension, since
992 zero-extension is easier to combine with other operations than
993 sign-extension is. If we are comparing against a constant, we must
994 convert it to what it would look like unsigned. */
995 if ((code
== EQ
|| code
== NE
) && ! unsignedp
996 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
998 if (GET_CODE (op1
) == CONST_INT
999 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
1000 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
1005 if (! if_true_label
)
1007 dummy_true_label
= 1;
1008 if_true_label
= gen_label_rtx ();
1011 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
,
1015 emit_jump (if_false_label
);
1016 if (dummy_true_label
)
1017 emit_label (if_true_label
);
1020 /* Generate code for a comparison expression EXP (including code to compute
1021 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
1022 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
1023 generated code will drop through.
1024 SIGNED_CODE should be the rtx operation for this comparison for
1025 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
1027 We force a stack adjustment unless there are currently
1028 things pushed on the stack that aren't yet used. */
1031 do_compare_and_jump (tree exp
, enum rtx_code signed_code
,
1032 enum rtx_code unsigned_code
, rtx if_false_label
,
1037 enum machine_mode mode
;
1041 /* Don't crash if the comparison was erroneous. */
1042 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
1043 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
1046 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
1047 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == ERROR_MARK
)
1050 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
1051 mode
= TYPE_MODE (type
);
1052 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
1053 && (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
1054 || (GET_MODE_BITSIZE (mode
)
1055 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
,
1058 /* op0 might have been replaced by promoted constant, in which
1059 case the type of second argument should be used. */
1060 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
1061 mode
= TYPE_MODE (type
);
1063 unsignedp
= TREE_UNSIGNED (type
);
1064 code
= unsignedp
? unsigned_code
: signed_code
;
1066 #ifdef HAVE_canonicalize_funcptr_for_compare
1067 /* If function pointers need to be "canonicalized" before they can
1068 be reliably compared, then canonicalize them. */
1069 if (HAVE_canonicalize_funcptr_for_compare
1070 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
1071 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1074 rtx new_op0
= gen_reg_rtx (mode
);
1076 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
1080 if (HAVE_canonicalize_funcptr_for_compare
1081 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
1082 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
1085 rtx new_op1
= gen_reg_rtx (mode
);
1087 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
1092 /* Do any postincrements in the expression that was tested. */
1095 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
1097 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
1098 if_false_label
, if_true_label
);
1101 #include "gt-dojump.h"