1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "langhooks.h"
37 static void do_jump_by_parts_greater (tree
, int, rtx
, rtx
);
38 static void do_jump_by_parts_equality (tree
, rtx
, rtx
);
39 static void do_compare_and_jump (tree
, enum rtx_code
, enum rtx_code
, rtx
,
42 /* At the start of a function, record that we have no previously-pushed
43 arguments waiting to be popped. */
46 init_pending_stack_adjust (void)
48 pending_stack_adjust
= 0;
51 /* When exiting from function, if safe, clear out any pending stack adjust
52 so the adjustment won't get done.
54 Note, if the current function calls alloca, then it must have a
55 frame pointer regardless of the value of flag_omit_frame_pointer. */
58 clear_pending_stack_adjust (void)
61 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
63 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
64 && ! flag_inline_functions
)
66 stack_pointer_delta
-= pending_stack_adjust
,
67 pending_stack_adjust
= 0;
71 /* Pop any previously-pushed arguments that have not been popped yet. */
74 do_pending_stack_adjust (void)
76 if (inhibit_defer_pop
== 0)
78 if (pending_stack_adjust
!= 0)
79 adjust_stack (GEN_INT (pending_stack_adjust
));
80 pending_stack_adjust
= 0;
84 /* Expand conditional expressions. */
86 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
87 LABEL is an rtx of code CODE_LABEL, in this function and all the
91 jumpifnot (tree exp
, rtx label
)
93 do_jump (exp
, label
, NULL_RTX
);
96 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
99 jumpif (tree exp
, rtx label
)
101 do_jump (exp
, NULL_RTX
, label
);
104 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
105 the result is zero, or IF_TRUE_LABEL if the result is one.
106 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
107 meaning fall through in that case.
109 do_jump always does any pending stack adjust except when it does not
110 actually perform a jump. An example where there is no jump
111 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
113 This function is responsible for optimizing cases such as
114 &&, || and comparison operators in EXP. */
117 do_jump (tree exp
, rtx if_false_label
, rtx if_true_label
)
119 enum tree_code code
= TREE_CODE (exp
);
120 /* Some cases need to create a label to jump to
121 in order to properly fall through.
122 These cases set DROP_THROUGH_LABEL nonzero. */
123 rtx drop_through_label
= 0;
127 enum machine_mode mode
;
137 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
143 /* This is not true with #pragma weak */
145 /* The address of something can never be zero. */
147 emit_jump (if_true_label
);
152 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
153 TREE_OPERAND (exp
, 0)
154 = (*lang_hooks
.unsave_expr_now
) (TREE_OPERAND (exp
, 0));
158 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
159 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
160 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
161 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_RANGE_REF
)
164 /* If we are narrowing the operand, we have to do the compare in the
166 if ((TYPE_PRECISION (TREE_TYPE (exp
))
167 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
169 case NON_LVALUE_EXPR
:
175 /* These cannot change zero->nonzero or vice versa. */
176 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
179 case WITH_RECORD_EXPR
:
180 /* Put the object on the placeholder list, recurse through our first
181 operand, and pop the list. */
182 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
184 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
185 placeholder_list
= TREE_CHAIN (placeholder_list
);
189 /* This is never less insns than evaluating the PLUS_EXPR followed by
190 a test and can be longer if the test is eliminated. */
192 /* Reduce to minus. */
193 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
194 TREE_OPERAND (exp
, 0),
195 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
196 TREE_OPERAND (exp
, 1))));
197 /* Process as MINUS. */
201 /* Nonzero iff operands of minus differ. */
202 do_compare_and_jump (build (NE_EXPR
, TREE_TYPE (exp
),
203 TREE_OPERAND (exp
, 0),
204 TREE_OPERAND (exp
, 1)),
205 NE
, NE
, if_false_label
, if_true_label
);
209 /* If we are AND'ing with a small constant, do this comparison in the
210 smallest type that fits. If the machine doesn't have comparisons
211 that small, it will be converted back to the wider comparison.
212 This helps if we are testing the sign bit of a narrower object.
213 combine can't do this for us because it can't know whether a
214 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
216 if (! SLOW_BYTE_ACCESS
217 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
218 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
219 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
220 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
221 && (type
= (*lang_hooks
.types
.type_for_mode
) (mode
, 1)) != 0
222 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
223 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
224 != CODE_FOR_nothing
))
226 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
232 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
235 case TRUTH_ANDIF_EXPR
:
236 if (if_false_label
== 0)
237 if_false_label
= drop_through_label
= gen_label_rtx ();
238 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
239 start_cleanup_deferral ();
240 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
241 end_cleanup_deferral ();
244 case TRUTH_ORIF_EXPR
:
245 if (if_true_label
== 0)
246 if_true_label
= drop_through_label
= gen_label_rtx ();
247 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
248 start_cleanup_deferral ();
249 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
250 end_cleanup_deferral ();
255 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
256 preserve_temp_slots (NULL_RTX
);
260 do_pending_stack_adjust ();
261 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
267 case ARRAY_RANGE_REF
:
269 HOST_WIDE_INT bitsize
, bitpos
;
271 enum machine_mode mode
;
276 /* Get description of this reference. We don't actually care
277 about the underlying object here. */
278 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
279 &unsignedp
, &volatilep
);
281 type
= (*lang_hooks
.types
.type_for_size
) (bitsize
, unsignedp
);
282 if (! SLOW_BYTE_ACCESS
283 && type
!= 0 && bitsize
>= 0
284 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
285 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
286 != CODE_FOR_nothing
))
288 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
295 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
296 if (integer_onep (TREE_OPERAND (exp
, 1))
297 && integer_zerop (TREE_OPERAND (exp
, 2)))
298 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
300 else if (integer_zerop (TREE_OPERAND (exp
, 1))
301 && integer_onep (TREE_OPERAND (exp
, 2)))
302 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
306 rtx label1
= gen_label_rtx ();
307 drop_through_label
= gen_label_rtx ();
309 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
311 start_cleanup_deferral ();
312 /* Now the THEN-expression. */
313 do_jump (TREE_OPERAND (exp
, 1),
314 if_false_label
? if_false_label
: drop_through_label
,
315 if_true_label
? if_true_label
: drop_through_label
);
316 /* In case the do_jump just above never jumps. */
317 do_pending_stack_adjust ();
320 /* Now the ELSE-expression. */
321 do_jump (TREE_OPERAND (exp
, 2),
322 if_false_label
? if_false_label
: drop_through_label
,
323 if_true_label
? if_true_label
: drop_through_label
);
324 end_cleanup_deferral ();
330 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
332 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
333 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
335 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
336 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
339 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
340 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
341 fold (build1 (REALPART_EXPR
,
342 TREE_TYPE (inner_type
),
344 fold (build1 (REALPART_EXPR
,
345 TREE_TYPE (inner_type
),
347 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
348 fold (build1 (IMAGPART_EXPR
,
349 TREE_TYPE (inner_type
),
351 fold (build1 (IMAGPART_EXPR
,
352 TREE_TYPE (inner_type
),
354 if_false_label
, if_true_label
);
357 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
358 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
360 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
361 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
362 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
364 do_compare_and_jump (exp
, EQ
, EQ
, if_false_label
, if_true_label
);
370 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
372 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
373 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
375 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
376 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
379 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
380 fold (build (NE_EXPR
, TREE_TYPE (exp
),
381 fold (build1 (REALPART_EXPR
,
382 TREE_TYPE (inner_type
),
384 fold (build1 (REALPART_EXPR
,
385 TREE_TYPE (inner_type
),
387 fold (build (NE_EXPR
, TREE_TYPE (exp
),
388 fold (build1 (IMAGPART_EXPR
,
389 TREE_TYPE (inner_type
),
391 fold (build1 (IMAGPART_EXPR
,
392 TREE_TYPE (inner_type
),
394 if_false_label
, if_true_label
);
397 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
398 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
400 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
401 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
402 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
404 do_compare_and_jump (exp
, NE
, NE
, if_false_label
, if_true_label
);
409 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
410 if (GET_MODE_CLASS (mode
) == MODE_INT
411 && ! can_compare_p (LT
, mode
, ccp_jump
))
412 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
414 do_compare_and_jump (exp
, LT
, LTU
, if_false_label
, if_true_label
);
418 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
419 if (GET_MODE_CLASS (mode
) == MODE_INT
420 && ! can_compare_p (LE
, mode
, ccp_jump
))
421 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
423 do_compare_and_jump (exp
, LE
, LEU
, if_false_label
, if_true_label
);
427 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
428 if (GET_MODE_CLASS (mode
) == MODE_INT
429 && ! can_compare_p (GT
, mode
, ccp_jump
))
430 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
432 do_compare_and_jump (exp
, GT
, GTU
, if_false_label
, if_true_label
);
436 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
437 if (GET_MODE_CLASS (mode
) == MODE_INT
438 && ! can_compare_p (GE
, mode
, ccp_jump
))
439 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
441 do_compare_and_jump (exp
, GE
, GEU
, if_false_label
, if_true_label
);
447 enum rtx_code cmp
, rcmp
;
450 if (code
== UNORDERED_EXPR
)
451 cmp
= UNORDERED
, rcmp
= ORDERED
;
453 cmp
= ORDERED
, rcmp
= UNORDERED
;
454 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
457 if (! can_compare_p (cmp
, mode
, ccp_jump
)
458 && (can_compare_p (rcmp
, mode
, ccp_jump
)
459 /* If the target doesn't provide either UNORDERED or ORDERED
460 comparisons, canonicalize on UNORDERED for the library. */
461 || rcmp
== UNORDERED
))
465 do_compare_and_jump (exp
, cmp
, cmp
, if_false_label
, if_true_label
);
467 do_compare_and_jump (exp
, rcmp
, rcmp
, if_true_label
, if_false_label
);
472 enum rtx_code rcode1
;
473 enum tree_code tcode2
;
497 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
498 if (can_compare_p (rcode1
, mode
, ccp_jump
))
499 do_compare_and_jump (exp
, rcode1
, rcode1
, if_false_label
,
503 tree op0
= save_expr (TREE_OPERAND (exp
, 0));
504 tree op1
= save_expr (TREE_OPERAND (exp
, 1));
507 /* If the target doesn't support combined unordered
508 compares, decompose into UNORDERED + comparison. */
509 cmp0
= fold (build (UNORDERED_EXPR
, TREE_TYPE (exp
), op0
, op1
));
510 cmp1
= fold (build (tcode2
, TREE_TYPE (exp
), op0
, op1
));
511 exp
= build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
), cmp0
, cmp1
);
512 do_jump (exp
, if_false_label
, if_true_label
);
518 __builtin_expect (<test>, 0) and
519 __builtin_expect (<test>, 1)
521 We need to do this here, so that <test> is not converted to a SCC
522 operation on machines that use condition code registers and COMPARE
523 like the PowerPC, and then the jump is done based on whether the SCC
524 operation produced a 1 or 0. */
526 /* Check for a built-in function. */
528 tree fndecl
= get_callee_fndecl (exp
);
529 tree arglist
= TREE_OPERAND (exp
, 1);
532 && DECL_BUILT_IN (fndecl
)
533 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
534 && arglist
!= NULL_TREE
535 && TREE_CHAIN (arglist
) != NULL_TREE
)
537 rtx seq
= expand_builtin_expect_jump (exp
, if_false_label
,
547 /* Fall through and generate the normal code. */
551 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
553 /* This is not needed any more and causes poor code since it causes
554 comparisons and tests from non-SI objects to have different code
556 /* Copy to register to avoid generating bad insns by cse
557 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
558 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
559 temp
= copy_to_reg (temp
);
561 do_pending_stack_adjust ();
562 /* Do any postincrements in the expression that was tested. */
565 if (GET_CODE (temp
) == CONST_INT
566 || (GET_CODE (temp
) == CONST_DOUBLE
&& GET_MODE (temp
) == VOIDmode
)
567 || GET_CODE (temp
) == LABEL_REF
)
569 rtx target
= temp
== const0_rtx
? if_false_label
: if_true_label
;
573 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
574 && ! can_compare_p (NE
, GET_MODE (temp
), ccp_jump
))
575 /* Note swapping the labels gives us not-equal. */
576 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
577 else if (GET_MODE (temp
) != VOIDmode
)
579 /* The RTL optimizers prefer comparisons against pseudos. */
580 if (GET_CODE (temp
) == SUBREG
)
582 /* Compare promoted variables in their promoted mode. */
583 if (SUBREG_PROMOTED_VAR_P (temp
)
584 && GET_CODE (XEXP (temp
, 0)) == REG
)
585 temp
= XEXP (temp
, 0);
587 temp
= copy_to_reg (temp
);
589 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
590 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
591 GET_MODE (temp
), NULL_RTX
,
592 if_false_label
, if_true_label
);
598 if (drop_through_label
)
600 /* If do_jump produces code that might be jumped around,
601 do any stack adjusts from that code, before the place
602 where control merges in. */
603 do_pending_stack_adjust ();
604 emit_label (drop_through_label
);
608 /* Given a comparison expression EXP for values too wide to be compared
609 with one insn, test the comparison and jump to the appropriate label.
610 The code of EXP is ignored; we always test GT if SWAP is 0,
611 and LT if SWAP is 1. */
614 do_jump_by_parts_greater (tree exp
, int swap
, rtx if_false_label
,
617 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
618 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
619 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
620 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
622 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
);
625 /* Compare OP0 with OP1, word at a time, in mode MODE.
626 UNSIGNEDP says to do unsigned comparison.
627 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
630 do_jump_by_parts_greater_rtx (enum machine_mode mode
, int unsignedp
, rtx op0
,
631 rtx op1
, rtx if_false_label
, rtx if_true_label
)
633 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
634 rtx drop_through_label
= 0;
637 if (! if_true_label
|| ! if_false_label
)
638 drop_through_label
= gen_label_rtx ();
640 if_true_label
= drop_through_label
;
641 if (! if_false_label
)
642 if_false_label
= drop_through_label
;
644 /* Compare a word at a time, high order first. */
645 for (i
= 0; i
< nwords
; i
++)
647 rtx op0_word
, op1_word
;
649 if (WORDS_BIG_ENDIAN
)
651 op0_word
= operand_subword_force (op0
, i
, mode
);
652 op1_word
= operand_subword_force (op1
, i
, mode
);
656 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
657 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
660 /* All but high-order word must be compared as unsigned. */
661 do_compare_rtx_and_jump (op0_word
, op1_word
, GT
,
662 (unsignedp
|| i
> 0), word_mode
, NULL_RTX
,
663 NULL_RTX
, if_true_label
);
665 /* Consider lower words only if these are equal. */
666 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
667 NULL_RTX
, NULL_RTX
, if_false_label
);
671 emit_jump (if_false_label
);
672 if (drop_through_label
)
673 emit_label (drop_through_label
);
676 /* Given an EQ_EXPR expression EXP for values too wide to be compared
677 with one insn, test the comparison and jump to the appropriate label. */
680 do_jump_by_parts_equality (tree exp
, rtx if_false_label
, rtx if_true_label
)
682 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
683 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
684 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
685 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
687 rtx drop_through_label
= 0;
689 if (! if_false_label
)
690 drop_through_label
= if_false_label
= gen_label_rtx ();
692 for (i
= 0; i
< nwords
; i
++)
693 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
694 operand_subword_force (op1
, i
, mode
),
695 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
696 word_mode
, NULL_RTX
, if_false_label
, NULL_RTX
);
699 emit_jump (if_true_label
);
700 if (drop_through_label
)
701 emit_label (drop_through_label
);
704 /* Jump according to whether OP0 is 0.
705 We assume that OP0 has an integer mode that is too wide
706 for the available compare insns. */
709 do_jump_by_parts_equality_rtx (rtx op0
, rtx if_false_label
, rtx if_true_label
)
711 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
714 rtx drop_through_label
= 0;
716 /* The fastest way of doing this comparison on almost any machine is to
717 "or" all the words and compare the result. If all have to be loaded
718 from memory and this is a very wide item, it's possible this may
719 be slower, but that's highly unlikely. */
721 part
= gen_reg_rtx (word_mode
);
722 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
723 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
724 part
= expand_binop (word_mode
, ior_optab
, part
,
725 operand_subword_force (op0
, i
, GET_MODE (op0
)),
726 part
, 1, OPTAB_WIDEN
);
730 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
731 NULL_RTX
, if_false_label
, if_true_label
);
736 /* If we couldn't do the "or" simply, do this with a series of compares. */
737 if (! if_false_label
)
738 drop_through_label
= if_false_label
= gen_label_rtx ();
740 for (i
= 0; i
< nwords
; i
++)
741 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, GET_MODE (op0
)),
742 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
,
743 if_false_label
, NULL_RTX
);
746 emit_jump (if_true_label
);
748 if (drop_through_label
)
749 emit_label (drop_through_label
);
752 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
753 (including code to compute the values to be compared)
754 and set (CC0) according to the result.
755 The decision as to signed or unsigned comparison must be made by the caller.
757 We force a stack adjustment unless there are currently
758 things pushed on the stack that aren't yet used.
760 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
764 compare_from_rtx (rtx op0
, rtx op1
, enum rtx_code code
, int unsignedp
,
765 enum machine_mode mode
, rtx size
)
770 /* If one operand is constant, make it the second one. Only do this
771 if the other operand is not constant as well. */
773 if (swap_commutative_operands_p (op0
, op1
))
778 code
= swap_condition (code
);
783 op0
= force_not_mem (op0
);
784 op1
= force_not_mem (op1
);
787 do_pending_stack_adjust ();
789 ucode
= unsignedp
? unsigned_condition (code
) : code
;
790 if ((tem
= simplify_relational_operation (ucode
, mode
, op0
, op1
)) != 0)
794 /* There's no need to do this now that combine.c can eliminate lots of
795 sign extensions. This can be less efficient in certain cases on other
798 /* If this is a signed equality comparison, we can do it as an
799 unsigned comparison since zero-extension is cheaper than sign
800 extension and comparisons with zero are done as unsigned. This is
801 the case even on machines that can do fast sign extension, since
802 zero-extension is easier to combine with other operations than
803 sign-extension is. If we are comparing against a constant, we must
804 convert it to what it would look like unsigned. */
805 if ((code
== EQ
|| code
== NE
) && ! unsignedp
806 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
808 if (GET_CODE (op1
) == CONST_INT
809 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
810 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
815 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
);
818 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
820 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, op1
);
824 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
825 The decision as to signed or unsigned comparison must be made by the caller.
827 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
831 do_compare_rtx_and_jump (rtx op0
, rtx op1
, enum rtx_code code
, int unsignedp
,
832 enum machine_mode mode
, rtx size
, rtx if_false_label
,
837 int dummy_true_label
= 0;
839 /* Reverse the comparison if that is safe and we want to jump if it is
841 if (! if_true_label
&& ! FLOAT_MODE_P (mode
))
843 if_true_label
= if_false_label
;
845 code
= reverse_condition (code
);
848 /* If one operand is constant, make it the second one. Only do this
849 if the other operand is not constant as well. */
851 if (swap_commutative_operands_p (op0
, op1
))
856 code
= swap_condition (code
);
861 op0
= force_not_mem (op0
);
862 op1
= force_not_mem (op1
);
865 do_pending_stack_adjust ();
867 ucode
= unsignedp
? unsigned_condition (code
) : code
;
868 if ((tem
= simplify_relational_operation (ucode
, mode
, op0
, op1
)) != 0)
870 if (tem
== const_true_rtx
)
873 emit_jump (if_true_label
);
878 emit_jump (if_false_label
);
884 /* There's no need to do this now that combine.c can eliminate lots of
885 sign extensions. This can be less efficient in certain cases on other
888 /* If this is a signed equality comparison, we can do it as an
889 unsigned comparison since zero-extension is cheaper than sign
890 extension and comparisons with zero are done as unsigned. This is
891 the case even on machines that can do fast sign extension, since
892 zero-extension is easier to combine with other operations than
893 sign-extension is. If we are comparing against a constant, we must
894 convert it to what it would look like unsigned. */
895 if ((code
== EQ
|| code
== NE
) && ! unsignedp
896 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
898 if (GET_CODE (op1
) == CONST_INT
899 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
900 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
907 dummy_true_label
= 1;
908 if_true_label
= gen_label_rtx ();
911 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
,
915 emit_jump (if_false_label
);
916 if (dummy_true_label
)
917 emit_label (if_true_label
);
920 /* Generate code for a comparison expression EXP (including code to compute
921 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
922 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
923 generated code will drop through.
924 SIGNED_CODE should be the rtx operation for this comparison for
925 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
927 We force a stack adjustment unless there are currently
928 things pushed on the stack that aren't yet used. */
931 do_compare_and_jump (tree exp
, enum rtx_code signed_code
,
932 enum rtx_code unsigned_code
, rtx if_false_label
,
937 enum machine_mode mode
;
941 /* Don't crash if the comparison was erroneous. */
942 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
943 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
946 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
947 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == ERROR_MARK
)
950 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
951 mode
= TYPE_MODE (type
);
952 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
953 && (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
954 || (GET_MODE_BITSIZE (mode
)
955 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
,
958 /* op0 might have been replaced by promoted constant, in which
959 case the type of second argument should be used. */
960 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
961 mode
= TYPE_MODE (type
);
963 unsignedp
= TREE_UNSIGNED (type
);
964 code
= unsignedp
? unsigned_code
: signed_code
;
966 #ifdef HAVE_canonicalize_funcptr_for_compare
967 /* If function pointers need to be "canonicalized" before they can
968 be reliably compared, then canonicalize them. */
969 if (HAVE_canonicalize_funcptr_for_compare
970 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
971 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
974 rtx new_op0
= gen_reg_rtx (mode
);
976 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
980 if (HAVE_canonicalize_funcptr_for_compare
981 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
982 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
985 rtx new_op1
= gen_reg_rtx (mode
);
987 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
992 /* Do any postincrements in the expression that was tested. */
995 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
997 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
998 if_false_label
, if_true_label
);