2005-06-30 J. D. Johnston <jjohnst@us.ibm.com>
[official-gcc.git] / gcc / dojump.c
blobc955f5d5fb39f9009951b56046847086436e770b
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
33 #include "expr.h"
34 #include "optabs.h"
35 #include "langhooks.h"
36 #include "ggc.h"
38 static bool prefer_and_bit_test (enum machine_mode, int);
39 static void do_jump_by_parts_greater (tree, int, rtx, rtx);
40 static void do_jump_by_parts_equality (tree, rtx, rtx);
41 static void do_compare_and_jump (tree, enum rtx_code, enum rtx_code, rtx,
42 rtx);
44 /* At the start of a function, record that we have no previously-pushed
45 arguments waiting to be popped. */
47 void
48 init_pending_stack_adjust (void)
50 pending_stack_adjust = 0;
53 /* Discard any pending stack adjustment. This avoid relying on the
54 RTL optimizers to remove useless adjustments when we know the
55 stack pointer value is dead. */
56 void discard_pending_stack_adjust (void)
58 stack_pointer_delta -= pending_stack_adjust;
59 pending_stack_adjust = 0;
62 /* When exiting from function, if safe, clear out any pending stack adjust
63 so the adjustment won't get done.
65 Note, if the current function calls alloca, then it must have a
66 frame pointer regardless of the value of flag_omit_frame_pointer. */
68 void
69 clear_pending_stack_adjust (void)
71 if (optimize > 0
72 && (! flag_omit_frame_pointer || current_function_calls_alloca)
73 && EXIT_IGNORE_STACK
74 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline))
75 discard_pending_stack_adjust ();
78 /* Pop any previously-pushed arguments that have not been popped yet. */
80 void
81 do_pending_stack_adjust (void)
83 if (inhibit_defer_pop == 0)
85 if (pending_stack_adjust != 0)
86 adjust_stack (GEN_INT (pending_stack_adjust));
87 pending_stack_adjust = 0;
91 /* Expand conditional expressions. */
93 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
94 LABEL is an rtx of code CODE_LABEL, in this function and all the
95 functions here. */
97 void
98 jumpifnot (tree exp, rtx label)
100 do_jump (exp, label, NULL_RTX);
103 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
105 void
106 jumpif (tree exp, rtx label)
108 do_jump (exp, NULL_RTX, label);
111 /* Used internally by prefer_and_bit_test. */
113 static GTY(()) rtx and_reg;
114 static GTY(()) rtx and_test;
115 static GTY(()) rtx shift_test;
117 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
118 where X is an arbitrary register of mode MODE. Return true if the former
119 is preferred. */
121 static bool
122 prefer_and_bit_test (enum machine_mode mode, int bitnum)
124 if (and_test == 0)
126 /* Set up rtxes for the two variations. Use NULL as a placeholder
127 for the BITNUM-based constants. */
128 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
129 and_test = gen_rtx_AND (mode, and_reg, NULL);
130 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
131 const1_rtx);
133 else
135 /* Change the mode of the previously-created rtxes. */
136 PUT_MODE (and_reg, mode);
137 PUT_MODE (and_test, mode);
138 PUT_MODE (shift_test, mode);
139 PUT_MODE (XEXP (shift_test, 0), mode);
142 /* Fill in the integers. */
143 XEXP (and_test, 1) = GEN_INT ((unsigned HOST_WIDE_INT) 1 << bitnum);
144 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
146 return (rtx_cost (and_test, IF_THEN_ELSE)
147 <= rtx_cost (shift_test, IF_THEN_ELSE));
150 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
151 the result is zero, or IF_TRUE_LABEL if the result is one.
152 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
153 meaning fall through in that case.
155 do_jump always does any pending stack adjust except when it does not
156 actually perform a jump. An example where there is no jump
157 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. */
159 void
160 do_jump (tree exp, rtx if_false_label, rtx if_true_label)
162 enum tree_code code = TREE_CODE (exp);
163 rtx temp;
164 int i;
165 tree type;
166 enum machine_mode mode;
167 rtx drop_through_label = 0;
169 switch (code)
171 case ERROR_MARK:
172 break;
174 case INTEGER_CST:
175 temp = integer_zerop (exp) ? if_false_label : if_true_label;
176 if (temp)
177 emit_jump (temp);
178 break;
180 #if 0
181 /* This is not true with #pragma weak */
182 case ADDR_EXPR:
183 /* The address of something can never be zero. */
184 if (if_true_label)
185 emit_jump (if_true_label);
186 break;
187 #endif
189 case NOP_EXPR:
190 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
191 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
192 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
193 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
194 goto normal;
195 case CONVERT_EXPR:
196 /* If we are narrowing the operand, we have to do the compare in the
197 narrower mode. */
198 if ((TYPE_PRECISION (TREE_TYPE (exp))
199 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
200 goto normal;
201 case NON_LVALUE_EXPR:
202 case ABS_EXPR:
203 case NEGATE_EXPR:
204 case LROTATE_EXPR:
205 case RROTATE_EXPR:
206 /* These cannot change zero->nonzero or vice versa. */
207 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
208 break;
210 case BIT_AND_EXPR:
211 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
212 See if the former is preferred for jump tests and restore it
213 if so. */
214 if (integer_onep (TREE_OPERAND (exp, 1)))
216 tree exp0 = TREE_OPERAND (exp, 0);
217 rtx set_label, clr_label;
219 /* Strip narrowing integral type conversions. */
220 while ((TREE_CODE (exp0) == NOP_EXPR
221 || TREE_CODE (exp0) == CONVERT_EXPR
222 || TREE_CODE (exp0) == NON_LVALUE_EXPR)
223 && TREE_OPERAND (exp0, 0) != error_mark_node
224 && TYPE_PRECISION (TREE_TYPE (exp0))
225 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
226 exp0 = TREE_OPERAND (exp0, 0);
228 /* "exp0 ^ 1" inverts the sense of the single bit test. */
229 if (TREE_CODE (exp0) == BIT_XOR_EXPR
230 && integer_onep (TREE_OPERAND (exp0, 1)))
232 exp0 = TREE_OPERAND (exp0, 0);
233 clr_label = if_true_label;
234 set_label = if_false_label;
236 else
238 clr_label = if_false_label;
239 set_label = if_true_label;
242 if (TREE_CODE (exp0) == RSHIFT_EXPR)
244 tree arg = TREE_OPERAND (exp0, 0);
245 tree shift = TREE_OPERAND (exp0, 1);
246 tree argtype = TREE_TYPE (arg);
247 if (TREE_CODE (shift) == INTEGER_CST
248 && compare_tree_int (shift, 0) >= 0
249 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
250 && prefer_and_bit_test (TYPE_MODE (argtype),
251 TREE_INT_CST_LOW (shift)))
253 HOST_WIDE_INT mask = (HOST_WIDE_INT) 1
254 << TREE_INT_CST_LOW (shift);
255 do_jump (build2 (BIT_AND_EXPR, argtype, arg,
256 build_int_cst_type (argtype, mask)),
257 clr_label, set_label);
258 break;
263 /* If we are AND'ing with a small constant, do this comparison in the
264 smallest type that fits. If the machine doesn't have comparisons
265 that small, it will be converted back to the wider comparison.
266 This helps if we are testing the sign bit of a narrower object.
267 combine can't do this for us because it can't know whether a
268 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
270 if (! SLOW_BYTE_ACCESS
271 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
272 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
273 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
274 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
275 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
276 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
277 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
278 != CODE_FOR_nothing))
280 do_jump (convert (type, exp), if_false_label, if_true_label);
281 break;
283 goto normal;
285 case TRUTH_NOT_EXPR:
286 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
287 break;
289 case COND_EXPR:
291 rtx label1 = gen_label_rtx ();
292 if (!if_true_label || !if_false_label)
294 drop_through_label = gen_label_rtx ();
295 if (!if_true_label)
296 if_true_label = drop_through_label;
297 if (!if_false_label)
298 if_false_label = drop_through_label;
301 do_pending_stack_adjust ();
302 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
303 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
304 emit_label (label1);
305 do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label);
306 break;
309 case TRUTH_ANDIF_EXPR:
310 case TRUTH_ORIF_EXPR:
311 case COMPOUND_EXPR:
312 /* Lowered by gimplify.c. */
313 gcc_unreachable ();
315 case COMPONENT_REF:
316 case BIT_FIELD_REF:
317 case ARRAY_REF:
318 case ARRAY_RANGE_REF:
320 HOST_WIDE_INT bitsize, bitpos;
321 int unsignedp;
322 enum machine_mode mode;
323 tree type;
324 tree offset;
325 int volatilep = 0;
327 /* Get description of this reference. We don't actually care
328 about the underlying object here. */
329 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
330 &unsignedp, &volatilep, false);
332 type = lang_hooks.types.type_for_size (bitsize, unsignedp);
333 if (! SLOW_BYTE_ACCESS
334 && type != 0 && bitsize >= 0
335 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
336 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
337 != CODE_FOR_nothing))
339 do_jump (convert (type, exp), if_false_label, if_true_label);
340 break;
342 goto normal;
345 case EQ_EXPR:
347 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
349 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
350 != MODE_COMPLEX_FLOAT);
351 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
352 != MODE_COMPLEX_INT);
354 if (integer_zerop (TREE_OPERAND (exp, 1)))
355 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
356 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
357 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
358 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
359 else
360 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
361 break;
364 case MINUS_EXPR:
365 /* Nonzero iff operands of minus differ. */
366 exp = build2 (NE_EXPR, TREE_TYPE (exp),
367 TREE_OPERAND (exp, 0),
368 TREE_OPERAND (exp, 1));
369 /* FALLTHRU */
370 case NE_EXPR:
372 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
374 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
375 != MODE_COMPLEX_FLOAT);
376 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
377 != MODE_COMPLEX_INT);
379 if (integer_zerop (TREE_OPERAND (exp, 1)))
380 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
381 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
382 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
383 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
384 else
385 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
386 break;
389 case LT_EXPR:
390 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
391 if (GET_MODE_CLASS (mode) == MODE_INT
392 && ! can_compare_p (LT, mode, ccp_jump))
393 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
394 else
395 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
396 break;
398 case LE_EXPR:
399 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
400 if (GET_MODE_CLASS (mode) == MODE_INT
401 && ! can_compare_p (LE, mode, ccp_jump))
402 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
403 else
404 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
405 break;
407 case GT_EXPR:
408 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
409 if (GET_MODE_CLASS (mode) == MODE_INT
410 && ! can_compare_p (GT, mode, ccp_jump))
411 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
412 else
413 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
414 break;
416 case GE_EXPR:
417 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
418 if (GET_MODE_CLASS (mode) == MODE_INT
419 && ! can_compare_p (GE, mode, ccp_jump))
420 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
421 else
422 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
423 break;
425 case UNORDERED_EXPR:
426 case ORDERED_EXPR:
428 enum rtx_code cmp, rcmp;
429 int do_rev;
431 if (code == UNORDERED_EXPR)
432 cmp = UNORDERED, rcmp = ORDERED;
433 else
434 cmp = ORDERED, rcmp = UNORDERED;
435 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
437 do_rev = 0;
438 if (! can_compare_p (cmp, mode, ccp_jump)
439 && (can_compare_p (rcmp, mode, ccp_jump)
440 /* If the target doesn't provide either UNORDERED or ORDERED
441 comparisons, canonicalize on UNORDERED for the library. */
442 || rcmp == UNORDERED))
443 do_rev = 1;
445 if (! do_rev)
446 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
447 else
448 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
450 break;
453 enum rtx_code rcode1;
454 enum tree_code tcode1, tcode2;
456 case UNLT_EXPR:
457 rcode1 = UNLT;
458 tcode1 = UNORDERED_EXPR;
459 tcode2 = LT_EXPR;
460 goto unordered_bcc;
461 case UNLE_EXPR:
462 rcode1 = UNLE;
463 tcode1 = UNORDERED_EXPR;
464 tcode2 = LE_EXPR;
465 goto unordered_bcc;
466 case UNGT_EXPR:
467 rcode1 = UNGT;
468 tcode1 = UNORDERED_EXPR;
469 tcode2 = GT_EXPR;
470 goto unordered_bcc;
471 case UNGE_EXPR:
472 rcode1 = UNGE;
473 tcode1 = UNORDERED_EXPR;
474 tcode2 = GE_EXPR;
475 goto unordered_bcc;
476 case UNEQ_EXPR:
477 rcode1 = UNEQ;
478 tcode1 = UNORDERED_EXPR;
479 tcode2 = EQ_EXPR;
480 goto unordered_bcc;
481 case LTGT_EXPR:
482 /* It is ok for LTGT_EXPR to trap when the result is unordered,
483 so expand to (a < b) || (a > b). */
484 rcode1 = LTGT;
485 tcode1 = LT_EXPR;
486 tcode2 = GT_EXPR;
487 goto unordered_bcc;
489 unordered_bcc:
490 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
491 if (can_compare_p (rcode1, mode, ccp_jump))
492 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
493 if_true_label);
494 else
496 tree op0 = save_expr (TREE_OPERAND (exp, 0));
497 tree op1 = save_expr (TREE_OPERAND (exp, 1));
498 tree cmp0, cmp1;
500 /* If the target doesn't support combined unordered
501 compares, decompose into two comparisons. */
502 if (if_true_label == 0)
503 drop_through_label = if_true_label = gen_label_rtx ();
505 cmp0 = fold_build2 (tcode1, TREE_TYPE (exp), op0, op1);
506 cmp1 = fold_build2 (tcode2, TREE_TYPE (exp), op0, op1);
507 do_jump (cmp0, 0, if_true_label);
508 do_jump (cmp1, if_false_label, if_true_label);
511 break;
513 /* Special case:
514 __builtin_expect (<test>, 0) and
515 __builtin_expect (<test>, 1)
517 We need to do this here, so that <test> is not converted to a SCC
518 operation on machines that use condition code registers and COMPARE
519 like the PowerPC, and then the jump is done based on whether the SCC
520 operation produced a 1 or 0. */
521 case CALL_EXPR:
522 /* Check for a built-in function. */
524 tree fndecl = get_callee_fndecl (exp);
525 tree arglist = TREE_OPERAND (exp, 1);
527 if (fndecl
528 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
529 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
530 && arglist != NULL_TREE
531 && TREE_CHAIN (arglist) != NULL_TREE)
533 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
534 if_true_label);
536 if (seq != NULL_RTX)
538 emit_insn (seq);
539 return;
543 /* Fall through and generate the normal code. */
545 default:
546 normal:
547 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
548 do_pending_stack_adjust ();
550 if (GET_CODE (temp) == CONST_INT
551 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
552 || GET_CODE (temp) == LABEL_REF)
554 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
555 if (target)
556 emit_jump (target);
558 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
559 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
560 /* Note swapping the labels gives us not-equal. */
561 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
562 else
564 gcc_assert (GET_MODE (temp) != VOIDmode);
566 /* The RTL optimizers prefer comparisons against pseudos. */
567 if (GET_CODE (temp) == SUBREG)
569 /* Compare promoted variables in their promoted mode. */
570 if (SUBREG_PROMOTED_VAR_P (temp)
571 && REG_P (XEXP (temp, 0)))
572 temp = XEXP (temp, 0);
573 else
574 temp = copy_to_reg (temp);
576 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
577 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
578 GET_MODE (temp), NULL_RTX,
579 if_false_label, if_true_label);
583 if (drop_through_label)
585 do_pending_stack_adjust ();
586 emit_label (drop_through_label);
590 /* Given a comparison expression EXP for values too wide to be compared
591 with one insn, test the comparison and jump to the appropriate label.
592 The code of EXP is ignored; we always test GT if SWAP is 0,
593 and LT if SWAP is 1. */
595 static void
596 do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label,
597 rtx if_true_label)
599 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
600 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
601 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
602 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
604 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
605 if_true_label);
608 /* Compare OP0 with OP1, word at a time, in mode MODE.
609 UNSIGNEDP says to do unsigned comparison.
610 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
612 void
613 do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
614 rtx op1, rtx if_false_label, rtx if_true_label)
616 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
617 rtx drop_through_label = 0;
618 int i;
620 if (! if_true_label || ! if_false_label)
621 drop_through_label = gen_label_rtx ();
622 if (! if_true_label)
623 if_true_label = drop_through_label;
624 if (! if_false_label)
625 if_false_label = drop_through_label;
627 /* Compare a word at a time, high order first. */
628 for (i = 0; i < nwords; i++)
630 rtx op0_word, op1_word;
632 if (WORDS_BIG_ENDIAN)
634 op0_word = operand_subword_force (op0, i, mode);
635 op1_word = operand_subword_force (op1, i, mode);
637 else
639 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
640 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
643 /* All but high-order word must be compared as unsigned. */
644 do_compare_rtx_and_jump (op0_word, op1_word, GT,
645 (unsignedp || i > 0), word_mode, NULL_RTX,
646 NULL_RTX, if_true_label);
648 /* Consider lower words only if these are equal. */
649 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
650 NULL_RTX, NULL_RTX, if_false_label);
653 if (if_false_label)
654 emit_jump (if_false_label);
655 if (drop_through_label)
656 emit_label (drop_through_label);
659 /* Given an EQ_EXPR expression EXP for values too wide to be compared
660 with one insn, test the comparison and jump to the appropriate label. */
662 static void
663 do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label)
665 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
666 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
667 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
668 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
669 int i;
670 rtx drop_through_label = 0;
672 if (! if_false_label)
673 drop_through_label = if_false_label = gen_label_rtx ();
675 for (i = 0; i < nwords; i++)
676 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
677 operand_subword_force (op1, i, mode),
678 EQ, TYPE_UNSIGNED (TREE_TYPE (exp)),
679 word_mode, NULL_RTX, if_false_label, NULL_RTX);
681 if (if_true_label)
682 emit_jump (if_true_label);
683 if (drop_through_label)
684 emit_label (drop_through_label);
687 /* Jump according to whether OP0 is 0.
688 We assume that OP0 has an integer mode that is too wide
689 for the available compare insns. */
691 void
692 do_jump_by_parts_equality_rtx (rtx op0, rtx if_false_label, rtx if_true_label)
694 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
695 rtx part;
696 int i;
697 rtx drop_through_label = 0;
699 /* The fastest way of doing this comparison on almost any machine is to
700 "or" all the words and compare the result. If all have to be loaded
701 from memory and this is a very wide item, it's possible this may
702 be slower, but that's highly unlikely. */
704 part = gen_reg_rtx (word_mode);
705 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
706 for (i = 1; i < nwords && part != 0; i++)
707 part = expand_binop (word_mode, ior_optab, part,
708 operand_subword_force (op0, i, GET_MODE (op0)),
709 part, 1, OPTAB_WIDEN);
711 if (part != 0)
713 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
714 NULL_RTX, if_false_label, if_true_label);
716 return;
719 /* If we couldn't do the "or" simply, do this with a series of compares. */
720 if (! if_false_label)
721 drop_through_label = if_false_label = gen_label_rtx ();
723 for (i = 0; i < nwords; i++)
724 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
725 const0_rtx, EQ, 1, word_mode, NULL_RTX,
726 if_false_label, NULL_RTX);
728 if (if_true_label)
729 emit_jump (if_true_label);
731 if (drop_through_label)
732 emit_label (drop_through_label);
735 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
736 MODE is the machine mode of the comparison, not of the result.
737 (including code to compute the values to be compared) and set CC0
738 according to the result. The decision as to signed or unsigned
739 comparison must be made by the caller.
741 We force a stack adjustment unless there are currently
742 things pushed on the stack that aren't yet used.
744 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
745 compared. */
748 compare_from_rtx (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
749 enum machine_mode mode, rtx size)
751 rtx tem;
753 /* If one operand is constant, make it the second one. Only do this
754 if the other operand is not constant as well. */
756 if (swap_commutative_operands_p (op0, op1))
758 tem = op0;
759 op0 = op1;
760 op1 = tem;
761 code = swap_condition (code);
764 if (flag_force_mem)
766 op0 = force_not_mem (op0);
767 op1 = force_not_mem (op1);
770 do_pending_stack_adjust ();
772 code = unsignedp ? unsigned_condition (code) : code;
773 tem = simplify_relational_operation (code, VOIDmode, mode, op0, op1);
774 if (tem)
776 if (CONSTANT_P (tem))
777 return tem;
779 if (COMPARISON_P (tem))
781 code = GET_CODE (tem);
782 op0 = XEXP (tem, 0);
783 op1 = XEXP (tem, 1);
784 mode = GET_MODE (op0);
785 unsignedp = (code == GTU || code == LTU
786 || code == GEU || code == LEU);
790 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
792 #if HAVE_cc0
793 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
794 #else
795 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
796 #endif
799 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
800 The decision as to signed or unsigned comparison must be made by the caller.
802 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
803 compared. */
805 void
806 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
807 enum machine_mode mode, rtx size, rtx if_false_label,
808 rtx if_true_label)
810 rtx tem;
811 int dummy_true_label = 0;
813 /* Reverse the comparison if that is safe and we want to jump if it is
814 false. */
815 if (! if_true_label && ! FLOAT_MODE_P (mode))
817 if_true_label = if_false_label;
818 if_false_label = 0;
819 code = reverse_condition (code);
822 /* If one operand is constant, make it the second one. Only do this
823 if the other operand is not constant as well. */
825 if (swap_commutative_operands_p (op0, op1))
827 tem = op0;
828 op0 = op1;
829 op1 = tem;
830 code = swap_condition (code);
833 if (flag_force_mem)
835 op0 = force_not_mem (op0);
836 op1 = force_not_mem (op1);
839 do_pending_stack_adjust ();
841 code = unsignedp ? unsigned_condition (code) : code;
842 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
843 op0, op1)))
845 if (CONSTANT_P (tem))
847 rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
848 ? if_false_label : if_true_label;
849 if (label)
850 emit_jump (label);
851 return;
854 code = GET_CODE (tem);
855 mode = GET_MODE (tem);
856 op0 = XEXP (tem, 0);
857 op1 = XEXP (tem, 1);
858 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
861 if (! if_true_label)
863 dummy_true_label = 1;
864 if_true_label = gen_label_rtx ();
867 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
868 if_true_label);
870 if (if_false_label)
871 emit_jump (if_false_label);
872 if (dummy_true_label)
873 emit_label (if_true_label);
876 /* Generate code for a comparison expression EXP (including code to compute
877 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
878 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
879 generated code will drop through.
880 SIGNED_CODE should be the rtx operation for this comparison for
881 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
883 We force a stack adjustment unless there are currently
884 things pushed on the stack that aren't yet used. */
886 static void
887 do_compare_and_jump (tree exp, enum rtx_code signed_code,
888 enum rtx_code unsigned_code, rtx if_false_label,
889 rtx if_true_label)
891 rtx op0, op1;
892 tree type;
893 enum machine_mode mode;
894 int unsignedp;
895 enum rtx_code code;
897 /* Don't crash if the comparison was erroneous. */
898 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
899 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
900 return;
902 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
903 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
904 return;
906 type = TREE_TYPE (TREE_OPERAND (exp, 0));
907 mode = TYPE_MODE (type);
908 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
909 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
910 || (GET_MODE_BITSIZE (mode)
911 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
912 1)))))))
914 /* op0 might have been replaced by promoted constant, in which
915 case the type of second argument should be used. */
916 type = TREE_TYPE (TREE_OPERAND (exp, 1));
917 mode = TYPE_MODE (type);
919 unsignedp = TYPE_UNSIGNED (type);
920 code = unsignedp ? unsigned_code : signed_code;
922 #ifdef HAVE_canonicalize_funcptr_for_compare
923 /* If function pointers need to be "canonicalized" before they can
924 be reliably compared, then canonicalize them.
925 Only do this if *both* sides of the comparison are function pointers.
926 If one side isn't, we want a noncanonicalized comparison. See PR
927 middle-end/17564. */
928 if (HAVE_canonicalize_funcptr_for_compare
929 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
930 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
931 == FUNCTION_TYPE
932 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
933 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
934 == FUNCTION_TYPE)
936 rtx new_op0 = gen_reg_rtx (mode);
937 rtx new_op1 = gen_reg_rtx (mode);
939 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
940 op0 = new_op0;
942 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
943 op1 = new_op1;
945 #endif
947 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
948 ((mode == BLKmode)
949 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
950 if_false_label, if_true_label);
953 #include "gt-dojump.h"