Daily bump.
[official-gcc.git] / gcc / dojump.c
blobe7afb7ef0a8540b02150b82d7bfd3c82beef78d4
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
33 #include "expr.h"
34 #include "optabs.h"
35 #include "langhooks.h"
36 #include "ggc.h"
38 static bool prefer_and_bit_test (enum machine_mode, int);
39 static void do_jump_by_parts_greater (tree, int, rtx, rtx);
40 static void do_jump_by_parts_equality (tree, rtx, rtx);
41 static void do_compare_and_jump (tree, enum rtx_code, enum rtx_code, rtx,
42 rtx);
44 /* At the start of a function, record that we have no previously-pushed
45 arguments waiting to be popped. */
47 void
48 init_pending_stack_adjust (void)
50 pending_stack_adjust = 0;
53 /* Discard any pending stack adjustment. This avoid relying on the
54 RTL optimizers to remove useless adjustments when we know the
55 stack pointer value is dead. */
56 void
57 discard_pending_stack_adjust (void)
59 stack_pointer_delta -= pending_stack_adjust;
60 pending_stack_adjust = 0;
63 /* When exiting from function, if safe, clear out any pending stack adjust
64 so the adjustment won't get done.
66 Note, if the current function calls alloca, then it must have a
67 frame pointer regardless of the value of flag_omit_frame_pointer. */
69 void
70 clear_pending_stack_adjust (void)
72 if (optimize > 0
73 && (! flag_omit_frame_pointer || current_function_calls_alloca)
74 && EXIT_IGNORE_STACK
75 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline))
76 discard_pending_stack_adjust ();
79 /* Pop any previously-pushed arguments that have not been popped yet. */
81 void
82 do_pending_stack_adjust (void)
84 if (inhibit_defer_pop == 0)
86 if (pending_stack_adjust != 0)
87 adjust_stack (GEN_INT (pending_stack_adjust));
88 pending_stack_adjust = 0;
92 /* Expand conditional expressions. */
94 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
95 LABEL is an rtx of code CODE_LABEL, in this function and all the
96 functions here. */
98 void
99 jumpifnot (tree exp, rtx label)
101 do_jump (exp, label, NULL_RTX);
104 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
106 void
107 jumpif (tree exp, rtx label)
109 do_jump (exp, NULL_RTX, label);
112 /* Used internally by prefer_and_bit_test. */
114 static GTY(()) rtx and_reg;
115 static GTY(()) rtx and_test;
116 static GTY(()) rtx shift_test;
118 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
119 where X is an arbitrary register of mode MODE. Return true if the former
120 is preferred. */
122 static bool
123 prefer_and_bit_test (enum machine_mode mode, int bitnum)
125 if (and_test == 0)
127 /* Set up rtxes for the two variations. Use NULL as a placeholder
128 for the BITNUM-based constants. */
129 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
130 and_test = gen_rtx_AND (mode, and_reg, NULL);
131 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
132 const1_rtx);
134 else
136 /* Change the mode of the previously-created rtxes. */
137 PUT_MODE (and_reg, mode);
138 PUT_MODE (and_test, mode);
139 PUT_MODE (shift_test, mode);
140 PUT_MODE (XEXP (shift_test, 0), mode);
143 /* Fill in the integers. */
144 XEXP (and_test, 1) = GEN_INT ((unsigned HOST_WIDE_INT) 1 << bitnum);
145 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
147 return (rtx_cost (and_test, IF_THEN_ELSE)
148 <= rtx_cost (shift_test, IF_THEN_ELSE));
151 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
152 the result is zero, or IF_TRUE_LABEL if the result is one.
153 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
154 meaning fall through in that case.
156 do_jump always does any pending stack adjust except when it does not
157 actually perform a jump. An example where there is no jump
158 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. */
160 void
161 do_jump (tree exp, rtx if_false_label, rtx if_true_label)
163 enum tree_code code = TREE_CODE (exp);
164 rtx temp;
165 int i;
166 tree type;
167 enum machine_mode mode;
168 rtx drop_through_label = 0;
170 switch (code)
172 case ERROR_MARK:
173 break;
175 case INTEGER_CST:
176 temp = integer_zerop (exp) ? if_false_label : if_true_label;
177 if (temp)
178 emit_jump (temp);
179 break;
181 #if 0
182 /* This is not true with #pragma weak */
183 case ADDR_EXPR:
184 /* The address of something can never be zero. */
185 if (if_true_label)
186 emit_jump (if_true_label);
187 break;
188 #endif
190 case NOP_EXPR:
191 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
192 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
193 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
194 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
195 goto normal;
196 case CONVERT_EXPR:
197 /* If we are narrowing the operand, we have to do the compare in the
198 narrower mode. */
199 if ((TYPE_PRECISION (TREE_TYPE (exp))
200 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
201 goto normal;
202 case NON_LVALUE_EXPR:
203 case ABS_EXPR:
204 case NEGATE_EXPR:
205 case LROTATE_EXPR:
206 case RROTATE_EXPR:
207 /* These cannot change zero->nonzero or vice versa. */
208 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
209 break;
211 case BIT_AND_EXPR:
212 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
213 See if the former is preferred for jump tests and restore it
214 if so. */
215 if (integer_onep (TREE_OPERAND (exp, 1)))
217 tree exp0 = TREE_OPERAND (exp, 0);
218 rtx set_label, clr_label;
220 /* Strip narrowing integral type conversions. */
221 while ((TREE_CODE (exp0) == NOP_EXPR
222 || TREE_CODE (exp0) == CONVERT_EXPR)
223 && TREE_OPERAND (exp0, 0) != error_mark_node
224 && TYPE_PRECISION (TREE_TYPE (exp0))
225 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
226 exp0 = TREE_OPERAND (exp0, 0);
228 /* "exp0 ^ 1" inverts the sense of the single bit test. */
229 if (TREE_CODE (exp0) == BIT_XOR_EXPR
230 && integer_onep (TREE_OPERAND (exp0, 1)))
232 exp0 = TREE_OPERAND (exp0, 0);
233 clr_label = if_true_label;
234 set_label = if_false_label;
236 else
238 clr_label = if_false_label;
239 set_label = if_true_label;
242 if (TREE_CODE (exp0) == RSHIFT_EXPR)
244 tree arg = TREE_OPERAND (exp0, 0);
245 tree shift = TREE_OPERAND (exp0, 1);
246 tree argtype = TREE_TYPE (arg);
247 if (TREE_CODE (shift) == INTEGER_CST
248 && compare_tree_int (shift, 0) >= 0
249 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
250 && prefer_and_bit_test (TYPE_MODE (argtype),
251 TREE_INT_CST_LOW (shift)))
253 HOST_WIDE_INT mask = (HOST_WIDE_INT) 1
254 << TREE_INT_CST_LOW (shift);
255 do_jump (build2 (BIT_AND_EXPR, argtype, arg,
256 build_int_cst_type (argtype, mask)),
257 clr_label, set_label);
258 break;
263 /* If we are AND'ing with a small constant, do this comparison in the
264 smallest type that fits. If the machine doesn't have comparisons
265 that small, it will be converted back to the wider comparison.
266 This helps if we are testing the sign bit of a narrower object.
267 combine can't do this for us because it can't know whether a
268 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
270 if (! SLOW_BYTE_ACCESS
271 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
272 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
273 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
274 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
275 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
276 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
277 && (optab_handler (cmp_optab, TYPE_MODE (type))->insn_code
278 != CODE_FOR_nothing))
280 do_jump (fold_convert (type, exp), if_false_label, if_true_label);
281 break;
283 goto normal;
285 case TRUTH_NOT_EXPR:
286 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
287 break;
289 case COND_EXPR:
291 rtx label1 = gen_label_rtx ();
292 if (!if_true_label || !if_false_label)
294 drop_through_label = gen_label_rtx ();
295 if (!if_true_label)
296 if_true_label = drop_through_label;
297 if (!if_false_label)
298 if_false_label = drop_through_label;
301 do_pending_stack_adjust ();
302 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
303 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
304 emit_label (label1);
305 do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label);
306 break;
309 case TRUTH_ANDIF_EXPR:
310 case TRUTH_ORIF_EXPR:
311 case COMPOUND_EXPR:
312 /* Lowered by gimplify.c. */
313 gcc_unreachable ();
315 case COMPONENT_REF:
316 case BIT_FIELD_REF:
317 case ARRAY_REF:
318 case ARRAY_RANGE_REF:
320 HOST_WIDE_INT bitsize, bitpos;
321 int unsignedp;
322 enum machine_mode mode;
323 tree type;
324 tree offset;
325 int volatilep = 0;
327 /* Get description of this reference. We don't actually care
328 about the underlying object here. */
329 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
330 &unsignedp, &volatilep, false);
332 type = lang_hooks.types.type_for_size (bitsize, unsignedp);
333 if (! SLOW_BYTE_ACCESS
334 && type != 0 && bitsize >= 0
335 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
336 && (optab_handler (cmp_optab, TYPE_MODE (type))->insn_code
337 != CODE_FOR_nothing))
339 do_jump (fold_convert (type, exp), if_false_label, if_true_label);
340 break;
342 goto normal;
345 case EQ_EXPR:
347 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
349 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
350 != MODE_COMPLEX_FLOAT);
351 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
352 != MODE_COMPLEX_INT);
354 if (integer_zerop (TREE_OPERAND (exp, 1)))
355 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
356 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
357 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
358 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
359 else
360 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
361 break;
364 case MINUS_EXPR:
365 /* Nonzero iff operands of minus differ. */
366 exp = build2 (NE_EXPR, TREE_TYPE (exp),
367 TREE_OPERAND (exp, 0),
368 TREE_OPERAND (exp, 1));
369 /* FALLTHRU */
370 case NE_EXPR:
372 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
374 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
375 != MODE_COMPLEX_FLOAT);
376 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
377 != MODE_COMPLEX_INT);
379 if (integer_zerop (TREE_OPERAND (exp, 1)))
380 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
381 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
382 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
383 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
384 else
385 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
386 break;
389 case LT_EXPR:
390 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
391 if (GET_MODE_CLASS (mode) == MODE_INT
392 && ! can_compare_p (LT, mode, ccp_jump))
393 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
394 else
395 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
396 break;
398 case LE_EXPR:
399 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
400 if (GET_MODE_CLASS (mode) == MODE_INT
401 && ! can_compare_p (LE, mode, ccp_jump))
402 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
403 else
404 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
405 break;
407 case GT_EXPR:
408 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
409 if (GET_MODE_CLASS (mode) == MODE_INT
410 && ! can_compare_p (GT, mode, ccp_jump))
411 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
412 else
413 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
414 break;
416 case GE_EXPR:
417 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
418 if (GET_MODE_CLASS (mode) == MODE_INT
419 && ! can_compare_p (GE, mode, ccp_jump))
420 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
421 else
422 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
423 break;
425 case UNORDERED_EXPR:
426 case ORDERED_EXPR:
428 enum rtx_code cmp, rcmp;
429 int do_rev;
431 if (code == UNORDERED_EXPR)
432 cmp = UNORDERED, rcmp = ORDERED;
433 else
434 cmp = ORDERED, rcmp = UNORDERED;
435 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
437 do_rev = 0;
438 if (! can_compare_p (cmp, mode, ccp_jump)
439 && (can_compare_p (rcmp, mode, ccp_jump)
440 /* If the target doesn't provide either UNORDERED or ORDERED
441 comparisons, canonicalize on UNORDERED for the library. */
442 || rcmp == UNORDERED))
443 do_rev = 1;
445 if (! do_rev)
446 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
447 else
448 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
450 break;
453 enum rtx_code rcode1;
454 enum tree_code tcode1, tcode2;
456 case UNLT_EXPR:
457 rcode1 = UNLT;
458 tcode1 = UNORDERED_EXPR;
459 tcode2 = LT_EXPR;
460 goto unordered_bcc;
461 case UNLE_EXPR:
462 rcode1 = UNLE;
463 tcode1 = UNORDERED_EXPR;
464 tcode2 = LE_EXPR;
465 goto unordered_bcc;
466 case UNGT_EXPR:
467 rcode1 = UNGT;
468 tcode1 = UNORDERED_EXPR;
469 tcode2 = GT_EXPR;
470 goto unordered_bcc;
471 case UNGE_EXPR:
472 rcode1 = UNGE;
473 tcode1 = UNORDERED_EXPR;
474 tcode2 = GE_EXPR;
475 goto unordered_bcc;
476 case UNEQ_EXPR:
477 rcode1 = UNEQ;
478 tcode1 = UNORDERED_EXPR;
479 tcode2 = EQ_EXPR;
480 goto unordered_bcc;
481 case LTGT_EXPR:
482 /* It is ok for LTGT_EXPR to trap when the result is unordered,
483 so expand to (a < b) || (a > b). */
484 rcode1 = LTGT;
485 tcode1 = LT_EXPR;
486 tcode2 = GT_EXPR;
487 goto unordered_bcc;
489 unordered_bcc:
490 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
491 if (can_compare_p (rcode1, mode, ccp_jump))
492 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
493 if_true_label);
494 else
496 tree op0 = save_expr (TREE_OPERAND (exp, 0));
497 tree op1 = save_expr (TREE_OPERAND (exp, 1));
498 tree cmp0, cmp1;
500 /* If the target doesn't support combined unordered
501 compares, decompose into two comparisons. */
502 if (if_true_label == 0)
503 drop_through_label = if_true_label = gen_label_rtx ();
505 cmp0 = fold_build2 (tcode1, TREE_TYPE (exp), op0, op1);
506 cmp1 = fold_build2 (tcode2, TREE_TYPE (exp), op0, op1);
507 do_jump (cmp0, 0, if_true_label);
508 do_jump (cmp1, if_false_label, if_true_label);
511 break;
513 case TRUTH_AND_EXPR:
514 /* High branch cost, expand as the bitwise AND of the conditions.
515 Do the same if the RHS has side effects, because we're effectively
516 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */
517 if (BRANCH_COST >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
518 goto normal;
520 if (if_false_label == NULL_RTX)
522 drop_through_label = gen_label_rtx ();
523 do_jump (TREE_OPERAND (exp, 0), drop_through_label, NULL_RTX);
524 do_jump (TREE_OPERAND (exp, 1), NULL_RTX, if_true_label);
526 else
528 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
529 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
531 break;
533 case TRUTH_OR_EXPR:
534 /* High branch cost, expand as the bitwise OR of the conditions.
535 Do the same if the RHS has side effects, because we're effectively
536 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */
537 if (BRANCH_COST >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
538 goto normal;
540 if (if_true_label == NULL_RTX)
542 drop_through_label = gen_label_rtx ();
543 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, drop_through_label);
544 do_jump (TREE_OPERAND (exp, 1), if_false_label, NULL_RTX);
546 else
548 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
549 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
551 break;
553 /* Fall through and generate the normal code. */
554 default:
555 normal:
556 temp = expand_normal (exp);
557 do_pending_stack_adjust ();
558 /* The RTL optimizers prefer comparisons against pseudos. */
559 if (GET_CODE (temp) == SUBREG)
561 /* Compare promoted variables in their promoted mode. */
562 if (SUBREG_PROMOTED_VAR_P (temp)
563 && REG_P (XEXP (temp, 0)))
564 temp = XEXP (temp, 0);
565 else
566 temp = copy_to_reg (temp);
568 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
569 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
570 GET_MODE (temp), NULL_RTX,
571 if_false_label, if_true_label);
574 if (drop_through_label)
576 do_pending_stack_adjust ();
577 emit_label (drop_through_label);
581 /* Compare OP0 with OP1, word at a time, in mode MODE.
582 UNSIGNEDP says to do unsigned comparison.
583 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
585 static void
586 do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
587 rtx op1, rtx if_false_label, rtx if_true_label)
589 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
590 rtx drop_through_label = 0;
591 int i;
593 if (! if_true_label || ! if_false_label)
594 drop_through_label = gen_label_rtx ();
595 if (! if_true_label)
596 if_true_label = drop_through_label;
597 if (! if_false_label)
598 if_false_label = drop_through_label;
600 /* Compare a word at a time, high order first. */
601 for (i = 0; i < nwords; i++)
603 rtx op0_word, op1_word;
605 if (WORDS_BIG_ENDIAN)
607 op0_word = operand_subword_force (op0, i, mode);
608 op1_word = operand_subword_force (op1, i, mode);
610 else
612 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
613 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
616 /* All but high-order word must be compared as unsigned. */
617 do_compare_rtx_and_jump (op0_word, op1_word, GT,
618 (unsignedp || i > 0), word_mode, NULL_RTX,
619 NULL_RTX, if_true_label);
621 /* Consider lower words only if these are equal. */
622 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
623 NULL_RTX, NULL_RTX, if_false_label);
626 if (if_false_label)
627 emit_jump (if_false_label);
628 if (drop_through_label)
629 emit_label (drop_through_label);
632 /* Given a comparison expression EXP for values too wide to be compared
633 with one insn, test the comparison and jump to the appropriate label.
634 The code of EXP is ignored; we always test GT if SWAP is 0,
635 and LT if SWAP is 1. */
637 static void
638 do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label,
639 rtx if_true_label)
641 rtx op0 = expand_normal (TREE_OPERAND (exp, swap));
642 rtx op1 = expand_normal (TREE_OPERAND (exp, !swap));
643 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
644 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
646 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
647 if_true_label);
650 /* Jump according to whether OP0 is 0. We assume that OP0 has an integer
651 mode, MODE, that is too wide for the available compare insns. Either
652 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
653 to indicate drop through. */
655 static void
656 do_jump_by_parts_zero_rtx (enum machine_mode mode, rtx op0,
657 rtx if_false_label, rtx if_true_label)
659 int nwords = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
660 rtx part;
661 int i;
662 rtx drop_through_label = 0;
664 /* The fastest way of doing this comparison on almost any machine is to
665 "or" all the words and compare the result. If all have to be loaded
666 from memory and this is a very wide item, it's possible this may
667 be slower, but that's highly unlikely. */
669 part = gen_reg_rtx (word_mode);
670 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
671 for (i = 1; i < nwords && part != 0; i++)
672 part = expand_binop (word_mode, ior_optab, part,
673 operand_subword_force (op0, i, GET_MODE (op0)),
674 part, 1, OPTAB_WIDEN);
676 if (part != 0)
678 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
679 NULL_RTX, if_false_label, if_true_label);
681 return;
684 /* If we couldn't do the "or" simply, do this with a series of compares. */
685 if (! if_false_label)
686 drop_through_label = if_false_label = gen_label_rtx ();
688 for (i = 0; i < nwords; i++)
689 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
690 const0_rtx, EQ, 1, word_mode, NULL_RTX,
691 if_false_label, NULL_RTX);
693 if (if_true_label)
694 emit_jump (if_true_label);
696 if (drop_through_label)
697 emit_label (drop_through_label);
700 /* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
701 where MODE is an integer mode too wide to be compared with one insn.
702 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
703 to indicate drop through. */
705 static void
706 do_jump_by_parts_equality_rtx (enum machine_mode mode, rtx op0, rtx op1,
707 rtx if_false_label, rtx if_true_label)
709 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
710 rtx drop_through_label = 0;
711 int i;
713 if (op1 == const0_rtx)
715 do_jump_by_parts_zero_rtx (mode, op0, if_false_label, if_true_label);
716 return;
718 else if (op0 == const0_rtx)
720 do_jump_by_parts_zero_rtx (mode, op1, if_false_label, if_true_label);
721 return;
724 if (! if_false_label)
725 drop_through_label = if_false_label = gen_label_rtx ();
727 for (i = 0; i < nwords; i++)
728 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
729 operand_subword_force (op1, i, mode),
730 EQ, 0, word_mode, NULL_RTX,
731 if_false_label, NULL_RTX);
733 if (if_true_label)
734 emit_jump (if_true_label);
735 if (drop_through_label)
736 emit_label (drop_through_label);
739 /* Given an EQ_EXPR expression EXP for values too wide to be compared
740 with one insn, test the comparison and jump to the appropriate label. */
742 static void
743 do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label)
745 rtx op0 = expand_normal (TREE_OPERAND (exp, 0));
746 rtx op1 = expand_normal (TREE_OPERAND (exp, 1));
747 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
748 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
749 if_true_label);
752 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
753 MODE is the machine mode of the comparison, not of the result.
754 (including code to compute the values to be compared) and set CC0
755 according to the result. The decision as to signed or unsigned
756 comparison must be made by the caller.
758 We force a stack adjustment unless there are currently
759 things pushed on the stack that aren't yet used.
761 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
762 compared. */
765 compare_from_rtx (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
766 enum machine_mode mode, rtx size)
768 rtx tem;
770 /* If one operand is constant, make it the second one. Only do this
771 if the other operand is not constant as well. */
773 if (swap_commutative_operands_p (op0, op1))
775 tem = op0;
776 op0 = op1;
777 op1 = tem;
778 code = swap_condition (code);
781 do_pending_stack_adjust ();
783 code = unsignedp ? unsigned_condition (code) : code;
784 tem = simplify_relational_operation (code, VOIDmode, mode, op0, op1);
785 if (tem)
787 if (CONSTANT_P (tem))
788 return tem;
790 if (COMPARISON_P (tem))
792 code = GET_CODE (tem);
793 op0 = XEXP (tem, 0);
794 op1 = XEXP (tem, 1);
795 mode = GET_MODE (op0);
796 unsignedp = (code == GTU || code == LTU
797 || code == GEU || code == LEU);
801 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
803 #if HAVE_cc0
804 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
805 #else
806 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
807 #endif
810 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
811 The decision as to signed or unsigned comparison must be made by the caller.
813 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
814 compared. */
816 void
817 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
818 enum machine_mode mode, rtx size, rtx if_false_label,
819 rtx if_true_label)
821 rtx tem;
822 int dummy_true_label = 0;
824 /* Reverse the comparison if that is safe and we want to jump if it is
825 false. */
826 if (! if_true_label && ! FLOAT_MODE_P (mode))
828 if_true_label = if_false_label;
829 if_false_label = 0;
830 code = reverse_condition (code);
833 /* If one operand is constant, make it the second one. Only do this
834 if the other operand is not constant as well. */
836 if (swap_commutative_operands_p (op0, op1))
838 tem = op0;
839 op0 = op1;
840 op1 = tem;
841 code = swap_condition (code);
844 do_pending_stack_adjust ();
846 code = unsignedp ? unsigned_condition (code) : code;
847 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
848 op0, op1)))
850 if (CONSTANT_P (tem))
852 rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
853 ? if_false_label : if_true_label;
854 if (label)
855 emit_jump (label);
856 return;
859 code = GET_CODE (tem);
860 mode = GET_MODE (tem);
861 op0 = XEXP (tem, 0);
862 op1 = XEXP (tem, 1);
863 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
867 if (! if_true_label)
869 dummy_true_label = 1;
870 if_true_label = gen_label_rtx ();
873 if (GET_MODE_CLASS (mode) == MODE_INT
874 && ! can_compare_p (code, mode, ccp_jump))
876 switch (code)
878 case LTU:
879 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
880 if_false_label, if_true_label);
881 break;
883 case LEU:
884 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
885 if_true_label, if_false_label);
886 break;
888 case GTU:
889 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
890 if_false_label, if_true_label);
891 break;
893 case GEU:
894 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
895 if_true_label, if_false_label);
896 break;
898 case LT:
899 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
900 if_false_label, if_true_label);
901 break;
903 case LE:
904 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
905 if_true_label, if_false_label);
906 break;
908 case GT:
909 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
910 if_false_label, if_true_label);
911 break;
913 case GE:
914 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
915 if_true_label, if_false_label);
916 break;
918 case EQ:
919 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
920 if_true_label);
921 break;
923 case NE:
924 do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label,
925 if_false_label);
926 break;
928 default:
929 gcc_unreachable ();
932 else
933 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
934 if_true_label);
936 if (if_false_label)
937 emit_jump (if_false_label);
938 if (dummy_true_label)
939 emit_label (if_true_label);
942 /* Generate code for a comparison expression EXP (including code to compute
943 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
944 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
945 generated code will drop through.
946 SIGNED_CODE should be the rtx operation for this comparison for
947 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
949 We force a stack adjustment unless there are currently
950 things pushed on the stack that aren't yet used. */
952 static void
953 do_compare_and_jump (tree exp, enum rtx_code signed_code,
954 enum rtx_code unsigned_code, rtx if_false_label,
955 rtx if_true_label)
957 rtx op0, op1;
958 tree type;
959 enum machine_mode mode;
960 int unsignedp;
961 enum rtx_code code;
963 /* Don't crash if the comparison was erroneous. */
964 op0 = expand_normal (TREE_OPERAND (exp, 0));
965 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
966 return;
968 op1 = expand_normal (TREE_OPERAND (exp, 1));
969 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
970 return;
972 type = TREE_TYPE (TREE_OPERAND (exp, 0));
973 mode = TYPE_MODE (type);
974 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
975 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
976 || (GET_MODE_BITSIZE (mode)
977 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
978 1)))))))
980 /* op0 might have been replaced by promoted constant, in which
981 case the type of second argument should be used. */
982 type = TREE_TYPE (TREE_OPERAND (exp, 1));
983 mode = TYPE_MODE (type);
985 unsignedp = TYPE_UNSIGNED (type);
986 code = unsignedp ? unsigned_code : signed_code;
988 #ifdef HAVE_canonicalize_funcptr_for_compare
989 /* If function pointers need to be "canonicalized" before they can
990 be reliably compared, then canonicalize them.
991 Only do this if *both* sides of the comparison are function pointers.
992 If one side isn't, we want a noncanonicalized comparison. See PR
993 middle-end/17564. */
994 if (HAVE_canonicalize_funcptr_for_compare
995 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
996 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
997 == FUNCTION_TYPE
998 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
999 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
1000 == FUNCTION_TYPE)
1002 rtx new_op0 = gen_reg_rtx (mode);
1003 rtx new_op1 = gen_reg_rtx (mode);
1005 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
1006 op0 = new_op0;
1008 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
1009 op1 = new_op1;
1011 #endif
1013 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
1014 ((mode == BLKmode)
1015 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
1016 if_false_label, if_true_label);
1019 #include "gt-dojump.h"