PR c++/15745
[official-gcc.git] / gcc / dojump.c
blobb619e0f49ded0859aeb4d7fcae6e4e61e68ecf3c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
33 #include "expr.h"
34 #include "optabs.h"
35 #include "langhooks.h"
36 #include "ggc.h"
38 static bool prefer_and_bit_test (enum machine_mode, int);
39 static void do_jump_by_parts_greater (tree, int, rtx, rtx);
40 static void do_jump_by_parts_equality (tree, rtx, rtx);
41 static void do_compare_and_jump (tree, enum rtx_code, enum rtx_code, rtx,
42 rtx);
44 /* At the start of a function, record that we have no previously-pushed
45 arguments waiting to be popped. */
47 void
48 init_pending_stack_adjust (void)
50 pending_stack_adjust = 0;
53 /* Discard any pending stack adjustment. This avoid relying on the
54 RTL optimizers to remove useless adjustments when we know the
55 stack pointer value is dead. */
56 void
57 discard_pending_stack_adjust (void)
59 stack_pointer_delta -= pending_stack_adjust;
60 pending_stack_adjust = 0;
63 /* When exiting from function, if safe, clear out any pending stack adjust
64 so the adjustment won't get done.
66 Note, if the current function calls alloca, then it must have a
67 frame pointer regardless of the value of flag_omit_frame_pointer. */
69 void
70 clear_pending_stack_adjust (void)
72 if (optimize > 0
73 && (! flag_omit_frame_pointer || current_function_calls_alloca)
74 && EXIT_IGNORE_STACK
75 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline))
76 discard_pending_stack_adjust ();
79 /* Pop any previously-pushed arguments that have not been popped yet. */
81 void
82 do_pending_stack_adjust (void)
84 if (inhibit_defer_pop == 0)
86 if (pending_stack_adjust != 0)
87 adjust_stack (GEN_INT (pending_stack_adjust));
88 pending_stack_adjust = 0;
92 /* Expand conditional expressions. */
94 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
95 LABEL is an rtx of code CODE_LABEL, in this function and all the
96 functions here. */
98 void
99 jumpifnot (tree exp, rtx label)
101 do_jump (exp, label, NULL_RTX);
104 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
106 void
107 jumpif (tree exp, rtx label)
109 do_jump (exp, NULL_RTX, label);
112 /* Used internally by prefer_and_bit_test. */
114 static GTY(()) rtx and_reg;
115 static GTY(()) rtx and_test;
116 static GTY(()) rtx shift_test;
118 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
119 where X is an arbitrary register of mode MODE. Return true if the former
120 is preferred. */
122 static bool
123 prefer_and_bit_test (enum machine_mode mode, int bitnum)
125 if (and_test == 0)
127 /* Set up rtxes for the two variations. Use NULL as a placeholder
128 for the BITNUM-based constants. */
129 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
130 and_test = gen_rtx_AND (mode, and_reg, NULL);
131 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
132 const1_rtx);
134 else
136 /* Change the mode of the previously-created rtxes. */
137 PUT_MODE (and_reg, mode);
138 PUT_MODE (and_test, mode);
139 PUT_MODE (shift_test, mode);
140 PUT_MODE (XEXP (shift_test, 0), mode);
143 /* Fill in the integers. */
144 XEXP (and_test, 1) = GEN_INT ((unsigned HOST_WIDE_INT) 1 << bitnum);
145 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
147 return (rtx_cost (and_test, IF_THEN_ELSE)
148 <= rtx_cost (shift_test, IF_THEN_ELSE));
151 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
152 the result is zero, or IF_TRUE_LABEL if the result is one.
153 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
154 meaning fall through in that case.
156 do_jump always does any pending stack adjust except when it does not
157 actually perform a jump. An example where there is no jump
158 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. */
160 void
161 do_jump (tree exp, rtx if_false_label, rtx if_true_label)
163 enum tree_code code = TREE_CODE (exp);
164 rtx temp;
165 int i;
166 tree type;
167 enum machine_mode mode;
168 rtx drop_through_label = 0;
170 switch (code)
172 case ERROR_MARK:
173 break;
175 case INTEGER_CST:
176 temp = integer_zerop (exp) ? if_false_label : if_true_label;
177 if (temp)
178 emit_jump (temp);
179 break;
181 #if 0
182 /* This is not true with #pragma weak */
183 case ADDR_EXPR:
184 /* The address of something can never be zero. */
185 if (if_true_label)
186 emit_jump (if_true_label);
187 break;
188 #endif
190 case NOP_EXPR:
191 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
192 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
193 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
194 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
195 goto normal;
196 case CONVERT_EXPR:
197 /* If we are narrowing the operand, we have to do the compare in the
198 narrower mode. */
199 if ((TYPE_PRECISION (TREE_TYPE (exp))
200 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
201 goto normal;
202 case NON_LVALUE_EXPR:
203 case ABS_EXPR:
204 case NEGATE_EXPR:
205 case LROTATE_EXPR:
206 case RROTATE_EXPR:
207 /* These cannot change zero->nonzero or vice versa. */
208 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
209 break;
211 case BIT_AND_EXPR:
212 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
213 See if the former is preferred for jump tests and restore it
214 if so. */
215 if (integer_onep (TREE_OPERAND (exp, 1)))
217 tree exp0 = TREE_OPERAND (exp, 0);
218 rtx set_label, clr_label;
220 /* Strip narrowing integral type conversions. */
221 while ((TREE_CODE (exp0) == NOP_EXPR
222 || TREE_CODE (exp0) == CONVERT_EXPR
223 || TREE_CODE (exp0) == NON_LVALUE_EXPR)
224 && TREE_OPERAND (exp0, 0) != error_mark_node
225 && TYPE_PRECISION (TREE_TYPE (exp0))
226 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
227 exp0 = TREE_OPERAND (exp0, 0);
229 /* "exp0 ^ 1" inverts the sense of the single bit test. */
230 if (TREE_CODE (exp0) == BIT_XOR_EXPR
231 && integer_onep (TREE_OPERAND (exp0, 1)))
233 exp0 = TREE_OPERAND (exp0, 0);
234 clr_label = if_true_label;
235 set_label = if_false_label;
237 else
239 clr_label = if_false_label;
240 set_label = if_true_label;
243 if (TREE_CODE (exp0) == RSHIFT_EXPR)
245 tree arg = TREE_OPERAND (exp0, 0);
246 tree shift = TREE_OPERAND (exp0, 1);
247 tree argtype = TREE_TYPE (arg);
248 if (TREE_CODE (shift) == INTEGER_CST
249 && compare_tree_int (shift, 0) >= 0
250 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
251 && prefer_and_bit_test (TYPE_MODE (argtype),
252 TREE_INT_CST_LOW (shift)))
254 HOST_WIDE_INT mask = (HOST_WIDE_INT) 1
255 << TREE_INT_CST_LOW (shift);
256 do_jump (build2 (BIT_AND_EXPR, argtype, arg,
257 build_int_cst_type (argtype, mask)),
258 clr_label, set_label);
259 break;
264 /* If we are AND'ing with a small constant, do this comparison in the
265 smallest type that fits. If the machine doesn't have comparisons
266 that small, it will be converted back to the wider comparison.
267 This helps if we are testing the sign bit of a narrower object.
268 combine can't do this for us because it can't know whether a
269 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
271 if (! SLOW_BYTE_ACCESS
272 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
273 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
274 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
275 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
276 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
277 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
278 && (optab_handler (cmp_optab, TYPE_MODE (type))->insn_code
279 != CODE_FOR_nothing))
281 do_jump (fold_convert (type, exp), if_false_label, if_true_label);
282 break;
284 goto normal;
286 case TRUTH_NOT_EXPR:
287 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
288 break;
290 case COND_EXPR:
292 rtx label1 = gen_label_rtx ();
293 if (!if_true_label || !if_false_label)
295 drop_through_label = gen_label_rtx ();
296 if (!if_true_label)
297 if_true_label = drop_through_label;
298 if (!if_false_label)
299 if_false_label = drop_through_label;
302 do_pending_stack_adjust ();
303 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
304 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
305 emit_label (label1);
306 do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label);
307 break;
310 case TRUTH_ANDIF_EXPR:
311 case TRUTH_ORIF_EXPR:
312 case COMPOUND_EXPR:
313 /* Lowered by gimplify.c. */
314 gcc_unreachable ();
316 case COMPONENT_REF:
317 case BIT_FIELD_REF:
318 case ARRAY_REF:
319 case ARRAY_RANGE_REF:
321 HOST_WIDE_INT bitsize, bitpos;
322 int unsignedp;
323 enum machine_mode mode;
324 tree type;
325 tree offset;
326 int volatilep = 0;
328 /* Get description of this reference. We don't actually care
329 about the underlying object here. */
330 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
331 &unsignedp, &volatilep, false);
333 type = lang_hooks.types.type_for_size (bitsize, unsignedp);
334 if (! SLOW_BYTE_ACCESS
335 && type != 0 && bitsize >= 0
336 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
337 && (optab_handler (cmp_optab, TYPE_MODE (type))->insn_code
338 != CODE_FOR_nothing))
340 do_jump (fold_convert (type, exp), if_false_label, if_true_label);
341 break;
343 goto normal;
346 case EQ_EXPR:
348 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
350 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
351 != MODE_COMPLEX_FLOAT);
352 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
353 != MODE_COMPLEX_INT);
355 if (integer_zerop (TREE_OPERAND (exp, 1)))
356 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
357 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
358 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
359 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
360 else
361 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
362 break;
365 case MINUS_EXPR:
366 /* Nonzero iff operands of minus differ. */
367 exp = build2 (NE_EXPR, TREE_TYPE (exp),
368 TREE_OPERAND (exp, 0),
369 TREE_OPERAND (exp, 1));
370 /* FALLTHRU */
371 case NE_EXPR:
373 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
375 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
376 != MODE_COMPLEX_FLOAT);
377 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
378 != MODE_COMPLEX_INT);
380 if (integer_zerop (TREE_OPERAND (exp, 1)))
381 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
382 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
383 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
384 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
385 else
386 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
387 break;
390 case LT_EXPR:
391 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
392 if (GET_MODE_CLASS (mode) == MODE_INT
393 && ! can_compare_p (LT, mode, ccp_jump))
394 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
395 else
396 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
397 break;
399 case LE_EXPR:
400 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
401 if (GET_MODE_CLASS (mode) == MODE_INT
402 && ! can_compare_p (LE, mode, ccp_jump))
403 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
404 else
405 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
406 break;
408 case GT_EXPR:
409 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
410 if (GET_MODE_CLASS (mode) == MODE_INT
411 && ! can_compare_p (GT, mode, ccp_jump))
412 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
413 else
414 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
415 break;
417 case GE_EXPR:
418 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
419 if (GET_MODE_CLASS (mode) == MODE_INT
420 && ! can_compare_p (GE, mode, ccp_jump))
421 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
422 else
423 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
424 break;
426 case UNORDERED_EXPR:
427 case ORDERED_EXPR:
429 enum rtx_code cmp, rcmp;
430 int do_rev;
432 if (code == UNORDERED_EXPR)
433 cmp = UNORDERED, rcmp = ORDERED;
434 else
435 cmp = ORDERED, rcmp = UNORDERED;
436 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
438 do_rev = 0;
439 if (! can_compare_p (cmp, mode, ccp_jump)
440 && (can_compare_p (rcmp, mode, ccp_jump)
441 /* If the target doesn't provide either UNORDERED or ORDERED
442 comparisons, canonicalize on UNORDERED for the library. */
443 || rcmp == UNORDERED))
444 do_rev = 1;
446 if (! do_rev)
447 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
448 else
449 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
451 break;
454 enum rtx_code rcode1;
455 enum tree_code tcode1, tcode2;
457 case UNLT_EXPR:
458 rcode1 = UNLT;
459 tcode1 = UNORDERED_EXPR;
460 tcode2 = LT_EXPR;
461 goto unordered_bcc;
462 case UNLE_EXPR:
463 rcode1 = UNLE;
464 tcode1 = UNORDERED_EXPR;
465 tcode2 = LE_EXPR;
466 goto unordered_bcc;
467 case UNGT_EXPR:
468 rcode1 = UNGT;
469 tcode1 = UNORDERED_EXPR;
470 tcode2 = GT_EXPR;
471 goto unordered_bcc;
472 case UNGE_EXPR:
473 rcode1 = UNGE;
474 tcode1 = UNORDERED_EXPR;
475 tcode2 = GE_EXPR;
476 goto unordered_bcc;
477 case UNEQ_EXPR:
478 rcode1 = UNEQ;
479 tcode1 = UNORDERED_EXPR;
480 tcode2 = EQ_EXPR;
481 goto unordered_bcc;
482 case LTGT_EXPR:
483 /* It is ok for LTGT_EXPR to trap when the result is unordered,
484 so expand to (a < b) || (a > b). */
485 rcode1 = LTGT;
486 tcode1 = LT_EXPR;
487 tcode2 = GT_EXPR;
488 goto unordered_bcc;
490 unordered_bcc:
491 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
492 if (can_compare_p (rcode1, mode, ccp_jump))
493 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
494 if_true_label);
495 else
497 tree op0 = save_expr (TREE_OPERAND (exp, 0));
498 tree op1 = save_expr (TREE_OPERAND (exp, 1));
499 tree cmp0, cmp1;
501 /* If the target doesn't support combined unordered
502 compares, decompose into two comparisons. */
503 if (if_true_label == 0)
504 drop_through_label = if_true_label = gen_label_rtx ();
506 cmp0 = fold_build2 (tcode1, TREE_TYPE (exp), op0, op1);
507 cmp1 = fold_build2 (tcode2, TREE_TYPE (exp), op0, op1);
508 do_jump (cmp0, 0, if_true_label);
509 do_jump (cmp1, if_false_label, if_true_label);
512 break;
514 case TRUTH_AND_EXPR:
515 /* High branch cost, expand as the bitwise AND of the conditions.
516 Do the same if the RHS has side effects, because we're effectively
517 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */
518 if (BRANCH_COST >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
519 goto normal;
521 if (if_false_label == NULL_RTX)
523 drop_through_label = gen_label_rtx ();
524 do_jump (TREE_OPERAND (exp, 0), drop_through_label, NULL_RTX);
525 do_jump (TREE_OPERAND (exp, 1), NULL_RTX, if_true_label);
527 else
529 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
530 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
532 break;
534 case TRUTH_OR_EXPR:
535 /* High branch cost, expand as the bitwise OR of the conditions.
536 Do the same if the RHS has side effects, because we're effectively
537 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */
538 if (BRANCH_COST >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
539 goto normal;
541 if (if_true_label == NULL_RTX)
543 drop_through_label = gen_label_rtx ();
544 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, drop_through_label);
545 do_jump (TREE_OPERAND (exp, 1), if_false_label, NULL_RTX);
547 else
549 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
550 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
552 break;
554 /* Fall through and generate the normal code. */
555 default:
556 normal:
557 temp = expand_normal (exp);
558 do_pending_stack_adjust ();
559 /* The RTL optimizers prefer comparisons against pseudos. */
560 if (GET_CODE (temp) == SUBREG)
562 /* Compare promoted variables in their promoted mode. */
563 if (SUBREG_PROMOTED_VAR_P (temp)
564 && REG_P (XEXP (temp, 0)))
565 temp = XEXP (temp, 0);
566 else
567 temp = copy_to_reg (temp);
569 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
570 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
571 GET_MODE (temp), NULL_RTX,
572 if_false_label, if_true_label);
575 if (drop_through_label)
577 do_pending_stack_adjust ();
578 emit_label (drop_through_label);
582 /* Compare OP0 with OP1, word at a time, in mode MODE.
583 UNSIGNEDP says to do unsigned comparison.
584 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
586 static void
587 do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
588 rtx op1, rtx if_false_label, rtx if_true_label)
590 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
591 rtx drop_through_label = 0;
592 int i;
594 if (! if_true_label || ! if_false_label)
595 drop_through_label = gen_label_rtx ();
596 if (! if_true_label)
597 if_true_label = drop_through_label;
598 if (! if_false_label)
599 if_false_label = drop_through_label;
601 /* Compare a word at a time, high order first. */
602 for (i = 0; i < nwords; i++)
604 rtx op0_word, op1_word;
606 if (WORDS_BIG_ENDIAN)
608 op0_word = operand_subword_force (op0, i, mode);
609 op1_word = operand_subword_force (op1, i, mode);
611 else
613 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
614 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
617 /* All but high-order word must be compared as unsigned. */
618 do_compare_rtx_and_jump (op0_word, op1_word, GT,
619 (unsignedp || i > 0), word_mode, NULL_RTX,
620 NULL_RTX, if_true_label);
622 /* Consider lower words only if these are equal. */
623 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
624 NULL_RTX, NULL_RTX, if_false_label);
627 if (if_false_label)
628 emit_jump (if_false_label);
629 if (drop_through_label)
630 emit_label (drop_through_label);
633 /* Given a comparison expression EXP for values too wide to be compared
634 with one insn, test the comparison and jump to the appropriate label.
635 The code of EXP is ignored; we always test GT if SWAP is 0,
636 and LT if SWAP is 1. */
638 static void
639 do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label,
640 rtx if_true_label)
642 rtx op0 = expand_normal (TREE_OPERAND (exp, swap));
643 rtx op1 = expand_normal (TREE_OPERAND (exp, !swap));
644 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
645 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
647 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
648 if_true_label);
651 /* Jump according to whether OP0 is 0. We assume that OP0 has an integer
652 mode, MODE, that is too wide for the available compare insns. Either
653 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
654 to indicate drop through. */
656 static void
657 do_jump_by_parts_zero_rtx (enum machine_mode mode, rtx op0,
658 rtx if_false_label, rtx if_true_label)
660 int nwords = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
661 rtx part;
662 int i;
663 rtx drop_through_label = 0;
665 /* The fastest way of doing this comparison on almost any machine is to
666 "or" all the words and compare the result. If all have to be loaded
667 from memory and this is a very wide item, it's possible this may
668 be slower, but that's highly unlikely. */
670 part = gen_reg_rtx (word_mode);
671 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
672 for (i = 1; i < nwords && part != 0; i++)
673 part = expand_binop (word_mode, ior_optab, part,
674 operand_subword_force (op0, i, GET_MODE (op0)),
675 part, 1, OPTAB_WIDEN);
677 if (part != 0)
679 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
680 NULL_RTX, if_false_label, if_true_label);
682 return;
685 /* If we couldn't do the "or" simply, do this with a series of compares. */
686 if (! if_false_label)
687 drop_through_label = if_false_label = gen_label_rtx ();
689 for (i = 0; i < nwords; i++)
690 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
691 const0_rtx, EQ, 1, word_mode, NULL_RTX,
692 if_false_label, NULL_RTX);
694 if (if_true_label)
695 emit_jump (if_true_label);
697 if (drop_through_label)
698 emit_label (drop_through_label);
701 /* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
702 where MODE is an integer mode too wide to be compared with one insn.
703 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
704 to indicate drop through. */
706 static void
707 do_jump_by_parts_equality_rtx (enum machine_mode mode, rtx op0, rtx op1,
708 rtx if_false_label, rtx if_true_label)
710 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
711 rtx drop_through_label = 0;
712 int i;
714 if (op1 == const0_rtx)
716 do_jump_by_parts_zero_rtx (mode, op0, if_false_label, if_true_label);
717 return;
719 else if (op0 == const0_rtx)
721 do_jump_by_parts_zero_rtx (mode, op1, if_false_label, if_true_label);
722 return;
725 if (! if_false_label)
726 drop_through_label = if_false_label = gen_label_rtx ();
728 for (i = 0; i < nwords; i++)
729 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
730 operand_subword_force (op1, i, mode),
731 EQ, 0, word_mode, NULL_RTX,
732 if_false_label, NULL_RTX);
734 if (if_true_label)
735 emit_jump (if_true_label);
736 if (drop_through_label)
737 emit_label (drop_through_label);
740 /* Given an EQ_EXPR expression EXP for values too wide to be compared
741 with one insn, test the comparison and jump to the appropriate label. */
743 static void
744 do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label)
746 rtx op0 = expand_normal (TREE_OPERAND (exp, 0));
747 rtx op1 = expand_normal (TREE_OPERAND (exp, 1));
748 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
749 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
750 if_true_label);
753 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
754 MODE is the machine mode of the comparison, not of the result.
755 (including code to compute the values to be compared) and set CC0
756 according to the result. The decision as to signed or unsigned
757 comparison must be made by the caller.
759 We force a stack adjustment unless there are currently
760 things pushed on the stack that aren't yet used.
762 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
763 compared. */
766 compare_from_rtx (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
767 enum machine_mode mode, rtx size)
769 rtx tem;
771 /* If one operand is constant, make it the second one. Only do this
772 if the other operand is not constant as well. */
774 if (swap_commutative_operands_p (op0, op1))
776 tem = op0;
777 op0 = op1;
778 op1 = tem;
779 code = swap_condition (code);
782 do_pending_stack_adjust ();
784 code = unsignedp ? unsigned_condition (code) : code;
785 tem = simplify_relational_operation (code, VOIDmode, mode, op0, op1);
786 if (tem)
788 if (CONSTANT_P (tem))
789 return tem;
791 if (COMPARISON_P (tem))
793 code = GET_CODE (tem);
794 op0 = XEXP (tem, 0);
795 op1 = XEXP (tem, 1);
796 mode = GET_MODE (op0);
797 unsignedp = (code == GTU || code == LTU
798 || code == GEU || code == LEU);
802 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
804 #if HAVE_cc0
805 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
806 #else
807 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
808 #endif
811 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
812 The decision as to signed or unsigned comparison must be made by the caller.
814 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
815 compared. */
817 void
818 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
819 enum machine_mode mode, rtx size, rtx if_false_label,
820 rtx if_true_label)
822 rtx tem;
823 int dummy_true_label = 0;
825 /* Reverse the comparison if that is safe and we want to jump if it is
826 false. */
827 if (! if_true_label && ! FLOAT_MODE_P (mode))
829 if_true_label = if_false_label;
830 if_false_label = 0;
831 code = reverse_condition (code);
834 /* If one operand is constant, make it the second one. Only do this
835 if the other operand is not constant as well. */
837 if (swap_commutative_operands_p (op0, op1))
839 tem = op0;
840 op0 = op1;
841 op1 = tem;
842 code = swap_condition (code);
845 do_pending_stack_adjust ();
847 code = unsignedp ? unsigned_condition (code) : code;
848 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
849 op0, op1)))
851 if (CONSTANT_P (tem))
853 rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
854 ? if_false_label : if_true_label;
855 if (label)
856 emit_jump (label);
857 return;
860 code = GET_CODE (tem);
861 mode = GET_MODE (tem);
862 op0 = XEXP (tem, 0);
863 op1 = XEXP (tem, 1);
864 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
868 if (! if_true_label)
870 dummy_true_label = 1;
871 if_true_label = gen_label_rtx ();
874 if (GET_MODE_CLASS (mode) == MODE_INT
875 && ! can_compare_p (code, mode, ccp_jump))
877 switch (code)
879 case LTU:
880 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
881 if_false_label, if_true_label);
882 break;
884 case LEU:
885 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
886 if_true_label, if_false_label);
887 break;
889 case GTU:
890 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
891 if_false_label, if_true_label);
892 break;
894 case GEU:
895 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
896 if_true_label, if_false_label);
897 break;
899 case LT:
900 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
901 if_false_label, if_true_label);
902 break;
904 case LE:
905 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
906 if_true_label, if_false_label);
907 break;
909 case GT:
910 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
911 if_false_label, if_true_label);
912 break;
914 case GE:
915 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
916 if_true_label, if_false_label);
917 break;
919 case EQ:
920 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
921 if_true_label);
922 break;
924 case NE:
925 do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label,
926 if_false_label);
927 break;
929 default:
930 gcc_unreachable ();
933 else
934 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
935 if_true_label);
937 if (if_false_label)
938 emit_jump (if_false_label);
939 if (dummy_true_label)
940 emit_label (if_true_label);
943 /* Generate code for a comparison expression EXP (including code to compute
944 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
945 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
946 generated code will drop through.
947 SIGNED_CODE should be the rtx operation for this comparison for
948 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
950 We force a stack adjustment unless there are currently
951 things pushed on the stack that aren't yet used. */
953 static void
954 do_compare_and_jump (tree exp, enum rtx_code signed_code,
955 enum rtx_code unsigned_code, rtx if_false_label,
956 rtx if_true_label)
958 rtx op0, op1;
959 tree type;
960 enum machine_mode mode;
961 int unsignedp;
962 enum rtx_code code;
964 /* Don't crash if the comparison was erroneous. */
965 op0 = expand_normal (TREE_OPERAND (exp, 0));
966 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
967 return;
969 op1 = expand_normal (TREE_OPERAND (exp, 1));
970 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
971 return;
973 type = TREE_TYPE (TREE_OPERAND (exp, 0));
974 mode = TYPE_MODE (type);
975 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
976 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
977 || (GET_MODE_BITSIZE (mode)
978 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
979 1)))))))
981 /* op0 might have been replaced by promoted constant, in which
982 case the type of second argument should be used. */
983 type = TREE_TYPE (TREE_OPERAND (exp, 1));
984 mode = TYPE_MODE (type);
986 unsignedp = TYPE_UNSIGNED (type);
987 code = unsignedp ? unsigned_code : signed_code;
989 #ifdef HAVE_canonicalize_funcptr_for_compare
990 /* If function pointers need to be "canonicalized" before they can
991 be reliably compared, then canonicalize them.
992 Only do this if *both* sides of the comparison are function pointers.
993 If one side isn't, we want a noncanonicalized comparison. See PR
994 middle-end/17564. */
995 if (HAVE_canonicalize_funcptr_for_compare
996 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
997 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
998 == FUNCTION_TYPE
999 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
1000 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
1001 == FUNCTION_TYPE)
1003 rtx new_op0 = gen_reg_rtx (mode);
1004 rtx new_op1 = gen_reg_rtx (mode);
1006 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
1007 op0 = new_op0;
1009 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
1010 op1 = new_op1;
1012 #endif
1014 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
1015 ((mode == BLKmode)
1016 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
1017 if_false_label, if_true_label);
1020 #include "gt-dojump.h"