2004-10-07 J"orn Rennecke <joern.rennecke@st.com>
[official-gcc.git] / gcc / dojump.c
blob50fc093f72f61a8197e95a369d90e3183bdf163e
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
33 #include "expr.h"
34 #include "optabs.h"
35 #include "langhooks.h"
36 #include "ggc.h"
38 static bool prefer_and_bit_test (enum machine_mode, int);
39 static void do_jump_by_parts_greater (tree, int, rtx, rtx);
40 static void do_jump_by_parts_equality (tree, rtx, rtx);
41 static void do_compare_and_jump (tree, enum rtx_code, enum rtx_code, rtx,
42 rtx);
44 /* At the start of a function, record that we have no previously-pushed
45 arguments waiting to be popped. */
47 void
48 init_pending_stack_adjust (void)
50 pending_stack_adjust = 0;
53 /* When exiting from function, if safe, clear out any pending stack adjust
54 so the adjustment won't get done.
56 Note, if the current function calls alloca, then it must have a
57 frame pointer regardless of the value of flag_omit_frame_pointer. */
59 void
60 clear_pending_stack_adjust (void)
62 if (optimize > 0
63 && (! flag_omit_frame_pointer || current_function_calls_alloca)
64 && EXIT_IGNORE_STACK
65 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
66 && ! flag_inline_functions)
68 stack_pointer_delta -= pending_stack_adjust,
69 pending_stack_adjust = 0;
73 /* Pop any previously-pushed arguments that have not been popped yet. */
75 void
76 do_pending_stack_adjust (void)
78 if (inhibit_defer_pop == 0)
80 if (pending_stack_adjust != 0)
81 adjust_stack (GEN_INT (pending_stack_adjust));
82 pending_stack_adjust = 0;
86 /* Expand conditional expressions. */
88 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
89 LABEL is an rtx of code CODE_LABEL, in this function and all the
90 functions here. */
92 void
93 jumpifnot (tree exp, rtx label)
95 do_jump (exp, label, NULL_RTX);
98 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
100 void
101 jumpif (tree exp, rtx label)
103 do_jump (exp, NULL_RTX, label);
106 /* Used internally by prefer_and_bit_test. */
108 static GTY(()) rtx and_reg;
109 static GTY(()) rtx and_test;
110 static GTY(()) rtx shift_test;
112 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
113 where X is an arbitrary register of mode MODE. Return true if the former
114 is preferred. */
116 static bool
117 prefer_and_bit_test (enum machine_mode mode, int bitnum)
119 if (and_test == 0)
121 /* Set up rtxes for the two variations. Use NULL as a placeholder
122 for the BITNUM-based constants. */
123 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
124 and_test = gen_rtx_AND (mode, and_reg, NULL);
125 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
126 const1_rtx);
128 else
130 /* Change the mode of the previously-created rtxes. */
131 PUT_MODE (and_reg, mode);
132 PUT_MODE (and_test, mode);
133 PUT_MODE (shift_test, mode);
134 PUT_MODE (XEXP (shift_test, 0), mode);
137 /* Fill in the integers. */
138 XEXP (and_test, 1) = GEN_INT ((unsigned HOST_WIDE_INT) 1 << bitnum);
139 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
141 return (rtx_cost (and_test, IF_THEN_ELSE)
142 <= rtx_cost (shift_test, IF_THEN_ELSE));
145 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
146 the result is zero, or IF_TRUE_LABEL if the result is one.
147 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
148 meaning fall through in that case.
150 do_jump always does any pending stack adjust except when it does not
151 actually perform a jump. An example where there is no jump
152 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. */
154 void
155 do_jump (tree exp, rtx if_false_label, rtx if_true_label)
157 enum tree_code code = TREE_CODE (exp);
158 rtx temp;
159 int i;
160 tree type;
161 enum machine_mode mode;
163 switch (code)
165 case ERROR_MARK:
166 break;
168 case INTEGER_CST:
169 temp = integer_zerop (exp) ? if_false_label : if_true_label;
170 if (temp)
171 emit_jump (temp);
172 break;
174 #if 0
175 /* This is not true with #pragma weak */
176 case ADDR_EXPR:
177 /* The address of something can never be zero. */
178 if (if_true_label)
179 emit_jump (if_true_label);
180 break;
181 #endif
183 case NOP_EXPR:
184 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
185 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
186 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
187 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
188 goto normal;
189 case CONVERT_EXPR:
190 /* If we are narrowing the operand, we have to do the compare in the
191 narrower mode. */
192 if ((TYPE_PRECISION (TREE_TYPE (exp))
193 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
194 goto normal;
195 case NON_LVALUE_EXPR:
196 case ABS_EXPR:
197 case NEGATE_EXPR:
198 case LROTATE_EXPR:
199 case RROTATE_EXPR:
200 /* These cannot change zero->nonzero or vice versa. */
201 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
202 break;
204 case MINUS_EXPR:
205 /* Nonzero iff operands of minus differ. */
206 do_compare_and_jump (build2 (NE_EXPR, TREE_TYPE (exp),
207 TREE_OPERAND (exp, 0),
208 TREE_OPERAND (exp, 1)),
209 NE, NE, if_false_label, if_true_label);
210 break;
212 case BIT_AND_EXPR:
213 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
214 See if the former is preferred for jump tests and restore it
215 if so. */
216 if (TREE_CODE (TREE_OPERAND (exp, 0)) == RSHIFT_EXPR
217 && integer_onep (TREE_OPERAND (exp, 1)))
219 tree arg = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
220 tree shift = TREE_OPERAND (TREE_OPERAND (exp, 0), 1);
221 tree one = TREE_OPERAND (exp, 1);
222 tree argtype = TREE_TYPE (arg);
223 if (TREE_CODE (shift) == INTEGER_CST
224 && compare_tree_int (shift, 0) > 0
225 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
226 && prefer_and_bit_test (TYPE_MODE (argtype),
227 TREE_INT_CST_LOW (shift)))
229 do_jump (build2 (BIT_AND_EXPR, argtype, arg,
230 fold (build2 (LSHIFT_EXPR, argtype,
231 one, shift))),
232 if_false_label, if_true_label);
233 break;
237 /* If we are AND'ing with a small constant, do this comparison in the
238 smallest type that fits. If the machine doesn't have comparisons
239 that small, it will be converted back to the wider comparison.
240 This helps if we are testing the sign bit of a narrower object.
241 combine can't do this for us because it can't know whether a
242 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
244 if (! SLOW_BYTE_ACCESS
245 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
246 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
247 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
248 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
249 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
250 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
251 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
252 != CODE_FOR_nothing))
254 do_jump (convert (type, exp), if_false_label, if_true_label);
255 break;
257 goto normal;
259 case TRUTH_NOT_EXPR:
260 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
261 break;
263 case TRUTH_ANDIF_EXPR:
264 case TRUTH_ORIF_EXPR:
265 case COMPOUND_EXPR:
266 case COND_EXPR:
267 /* Lowered by gimplify.c. */
268 gcc_unreachable ();
270 case COMPONENT_REF:
271 case BIT_FIELD_REF:
272 case ARRAY_REF:
273 case ARRAY_RANGE_REF:
275 HOST_WIDE_INT bitsize, bitpos;
276 int unsignedp;
277 enum machine_mode mode;
278 tree type;
279 tree offset;
280 int volatilep = 0;
282 /* Get description of this reference. We don't actually care
283 about the underlying object here. */
284 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
285 &unsignedp, &volatilep);
287 type = lang_hooks.types.type_for_size (bitsize, unsignedp);
288 if (! SLOW_BYTE_ACCESS
289 && type != 0 && bitsize >= 0
290 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
291 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
292 != CODE_FOR_nothing))
294 do_jump (convert (type, exp), if_false_label, if_true_label);
295 break;
297 goto normal;
300 case EQ_EXPR:
302 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
304 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
305 != MODE_COMPLEX_FLOAT);
306 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
307 != MODE_COMPLEX_INT);
309 if (integer_zerop (TREE_OPERAND (exp, 1)))
310 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
311 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
312 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
313 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
314 else
315 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
316 break;
319 case NE_EXPR:
321 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
323 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
324 != MODE_COMPLEX_FLOAT);
325 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
326 != MODE_COMPLEX_INT);
328 if (integer_zerop (TREE_OPERAND (exp, 1)))
329 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
330 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
331 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
332 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
333 else
334 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
335 break;
338 case LT_EXPR:
339 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
340 if (GET_MODE_CLASS (mode) == MODE_INT
341 && ! can_compare_p (LT, mode, ccp_jump))
342 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
343 else
344 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
345 break;
347 case LE_EXPR:
348 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
349 if (GET_MODE_CLASS (mode) == MODE_INT
350 && ! can_compare_p (LE, mode, ccp_jump))
351 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
352 else
353 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
354 break;
356 case GT_EXPR:
357 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
358 if (GET_MODE_CLASS (mode) == MODE_INT
359 && ! can_compare_p (GT, mode, ccp_jump))
360 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
361 else
362 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
363 break;
365 case GE_EXPR:
366 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
367 if (GET_MODE_CLASS (mode) == MODE_INT
368 && ! can_compare_p (GE, mode, ccp_jump))
369 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
370 else
371 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
372 break;
374 case UNORDERED_EXPR:
375 case ORDERED_EXPR:
377 enum rtx_code cmp, rcmp;
378 int do_rev;
380 if (code == UNORDERED_EXPR)
381 cmp = UNORDERED, rcmp = ORDERED;
382 else
383 cmp = ORDERED, rcmp = UNORDERED;
384 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
386 do_rev = 0;
387 if (! can_compare_p (cmp, mode, ccp_jump)
388 && (can_compare_p (rcmp, mode, ccp_jump)
389 /* If the target doesn't provide either UNORDERED or ORDERED
390 comparisons, canonicalize on UNORDERED for the library. */
391 || rcmp == UNORDERED))
392 do_rev = 1;
394 if (! do_rev)
395 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
396 else
397 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
399 break;
402 enum rtx_code rcode1;
403 enum tree_code tcode1, tcode2;
405 case UNLT_EXPR:
406 rcode1 = UNLT;
407 tcode1 = UNORDERED_EXPR;
408 tcode2 = LT_EXPR;
409 goto unordered_bcc;
410 case UNLE_EXPR:
411 rcode1 = UNLE;
412 tcode1 = UNORDERED_EXPR;
413 tcode2 = LE_EXPR;
414 goto unordered_bcc;
415 case UNGT_EXPR:
416 rcode1 = UNGT;
417 tcode1 = UNORDERED_EXPR;
418 tcode2 = GT_EXPR;
419 goto unordered_bcc;
420 case UNGE_EXPR:
421 rcode1 = UNGE;
422 tcode1 = UNORDERED_EXPR;
423 tcode2 = GE_EXPR;
424 goto unordered_bcc;
425 case UNEQ_EXPR:
426 rcode1 = UNEQ;
427 tcode1 = UNORDERED_EXPR;
428 tcode2 = EQ_EXPR;
429 goto unordered_bcc;
430 case LTGT_EXPR:
431 /* It is ok for LTGT_EXPR to trap when the result is unordered,
432 so expand to (a < b) || (a > b). */
433 rcode1 = LTGT;
434 tcode1 = LT_EXPR;
435 tcode2 = GT_EXPR;
436 goto unordered_bcc;
438 unordered_bcc:
439 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
440 if (can_compare_p (rcode1, mode, ccp_jump))
441 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
442 if_true_label);
443 else
445 tree op0 = save_expr (TREE_OPERAND (exp, 0));
446 tree op1 = save_expr (TREE_OPERAND (exp, 1));
447 tree cmp0, cmp1;
448 rtx drop_through_label = 0;
450 /* If the target doesn't support combined unordered
451 compares, decompose into two comparisons. */
452 if (if_true_label == 0)
453 drop_through_label = if_true_label = gen_label_rtx ();
455 cmp0 = fold (build2 (tcode1, TREE_TYPE (exp), op0, op1));
456 cmp1 = fold (build2 (tcode2, TREE_TYPE (exp), op0, op1));
457 do_jump (cmp0, 0, if_true_label);
458 do_jump (cmp1, if_false_label, if_true_label);
460 if (drop_through_label)
462 do_pending_stack_adjust ();
463 emit_label (drop_through_label);
467 break;
469 /* Special case:
470 __builtin_expect (<test>, 0) and
471 __builtin_expect (<test>, 1)
473 We need to do this here, so that <test> is not converted to a SCC
474 operation on machines that use condition code registers and COMPARE
475 like the PowerPC, and then the jump is done based on whether the SCC
476 operation produced a 1 or 0. */
477 case CALL_EXPR:
478 /* Check for a built-in function. */
480 tree fndecl = get_callee_fndecl (exp);
481 tree arglist = TREE_OPERAND (exp, 1);
483 if (fndecl
484 && DECL_BUILT_IN (fndecl)
485 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
486 && arglist != NULL_TREE
487 && TREE_CHAIN (arglist) != NULL_TREE)
489 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
490 if_true_label);
492 if (seq != NULL_RTX)
494 emit_insn (seq);
495 return;
499 /* Fall through and generate the normal code. */
501 default:
502 normal:
503 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
504 do_pending_stack_adjust ();
506 if (GET_CODE (temp) == CONST_INT
507 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
508 || GET_CODE (temp) == LABEL_REF)
510 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
511 if (target)
512 emit_jump (target);
514 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
515 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
516 /* Note swapping the labels gives us not-equal. */
517 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
518 else
520 gcc_assert (GET_MODE (temp) != VOIDmode);
522 /* The RTL optimizers prefer comparisons against pseudos. */
523 if (GET_CODE (temp) == SUBREG)
525 /* Compare promoted variables in their promoted mode. */
526 if (SUBREG_PROMOTED_VAR_P (temp)
527 && REG_P (XEXP (temp, 0)))
528 temp = XEXP (temp, 0);
529 else
530 temp = copy_to_reg (temp);
532 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
533 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
534 GET_MODE (temp), NULL_RTX,
535 if_false_label, if_true_label);
540 /* Given a comparison expression EXP for values too wide to be compared
541 with one insn, test the comparison and jump to the appropriate label.
542 The code of EXP is ignored; we always test GT if SWAP is 0,
543 and LT if SWAP is 1. */
545 static void
546 do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label,
547 rtx if_true_label)
549 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
550 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
551 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
552 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
554 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
555 if_true_label);
558 /* Compare OP0 with OP1, word at a time, in mode MODE.
559 UNSIGNEDP says to do unsigned comparison.
560 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
562 void
563 do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
564 rtx op1, rtx if_false_label, rtx if_true_label)
566 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
567 rtx drop_through_label = 0;
568 int i;
570 if (! if_true_label || ! if_false_label)
571 drop_through_label = gen_label_rtx ();
572 if (! if_true_label)
573 if_true_label = drop_through_label;
574 if (! if_false_label)
575 if_false_label = drop_through_label;
577 /* Compare a word at a time, high order first. */
578 for (i = 0; i < nwords; i++)
580 rtx op0_word, op1_word;
582 if (WORDS_BIG_ENDIAN)
584 op0_word = operand_subword_force (op0, i, mode);
585 op1_word = operand_subword_force (op1, i, mode);
587 else
589 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
590 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
593 /* All but high-order word must be compared as unsigned. */
594 do_compare_rtx_and_jump (op0_word, op1_word, GT,
595 (unsignedp || i > 0), word_mode, NULL_RTX,
596 NULL_RTX, if_true_label);
598 /* Consider lower words only if these are equal. */
599 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
600 NULL_RTX, NULL_RTX, if_false_label);
603 if (if_false_label)
604 emit_jump (if_false_label);
605 if (drop_through_label)
606 emit_label (drop_through_label);
609 /* Given an EQ_EXPR expression EXP for values too wide to be compared
610 with one insn, test the comparison and jump to the appropriate label. */
612 static void
613 do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label)
615 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
616 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
617 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
618 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
619 int i;
620 rtx drop_through_label = 0;
622 if (! if_false_label)
623 drop_through_label = if_false_label = gen_label_rtx ();
625 for (i = 0; i < nwords; i++)
626 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
627 operand_subword_force (op1, i, mode),
628 EQ, TYPE_UNSIGNED (TREE_TYPE (exp)),
629 word_mode, NULL_RTX, if_false_label, NULL_RTX);
631 if (if_true_label)
632 emit_jump (if_true_label);
633 if (drop_through_label)
634 emit_label (drop_through_label);
637 /* Jump according to whether OP0 is 0.
638 We assume that OP0 has an integer mode that is too wide
639 for the available compare insns. */
641 void
642 do_jump_by_parts_equality_rtx (rtx op0, rtx if_false_label, rtx if_true_label)
644 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
645 rtx part;
646 int i;
647 rtx drop_through_label = 0;
649 /* The fastest way of doing this comparison on almost any machine is to
650 "or" all the words and compare the result. If all have to be loaded
651 from memory and this is a very wide item, it's possible this may
652 be slower, but that's highly unlikely. */
654 part = gen_reg_rtx (word_mode);
655 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
656 for (i = 1; i < nwords && part != 0; i++)
657 part = expand_binop (word_mode, ior_optab, part,
658 operand_subword_force (op0, i, GET_MODE (op0)),
659 part, 1, OPTAB_WIDEN);
661 if (part != 0)
663 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
664 NULL_RTX, if_false_label, if_true_label);
666 return;
669 /* If we couldn't do the "or" simply, do this with a series of compares. */
670 if (! if_false_label)
671 drop_through_label = if_false_label = gen_label_rtx ();
673 for (i = 0; i < nwords; i++)
674 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
675 const0_rtx, EQ, 1, word_mode, NULL_RTX,
676 if_false_label, NULL_RTX);
678 if (if_true_label)
679 emit_jump (if_true_label);
681 if (drop_through_label)
682 emit_label (drop_through_label);
685 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
686 (including code to compute the values to be compared)
687 and set (CC0) according to the result.
688 The decision as to signed or unsigned comparison must be made by the caller.
690 We force a stack adjustment unless there are currently
691 things pushed on the stack that aren't yet used.
693 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
694 compared. */
697 compare_from_rtx (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
698 enum machine_mode mode, rtx size)
700 rtx tem;
702 /* If one operand is constant, make it the second one. Only do this
703 if the other operand is not constant as well. */
705 if (swap_commutative_operands_p (op0, op1))
707 tem = op0;
708 op0 = op1;
709 op1 = tem;
710 code = swap_condition (code);
713 if (flag_force_mem)
715 op0 = force_not_mem (op0);
716 op1 = force_not_mem (op1);
719 do_pending_stack_adjust ();
721 code = unsignedp ? unsigned_condition (code) : code;
722 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
723 op0, op1)))
725 if (CONSTANT_P (tem))
726 return tem;
728 code = GET_CODE (tem);
729 mode = GET_MODE (tem);
730 op0 = XEXP (tem, 0);
731 op1 = XEXP (tem, 1);
732 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
735 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
737 #if HAVE_cc0
738 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
739 #else
740 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
741 #endif
744 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
745 The decision as to signed or unsigned comparison must be made by the caller.
747 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
748 compared. */
750 void
751 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
752 enum machine_mode mode, rtx size, rtx if_false_label,
753 rtx if_true_label)
755 rtx tem;
756 int dummy_true_label = 0;
758 /* Reverse the comparison if that is safe and we want to jump if it is
759 false. */
760 if (! if_true_label && ! FLOAT_MODE_P (mode))
762 if_true_label = if_false_label;
763 if_false_label = 0;
764 code = reverse_condition (code);
767 /* If one operand is constant, make it the second one. Only do this
768 if the other operand is not constant as well. */
770 if (swap_commutative_operands_p (op0, op1))
772 tem = op0;
773 op0 = op1;
774 op1 = tem;
775 code = swap_condition (code);
778 if (flag_force_mem)
780 op0 = force_not_mem (op0);
781 op1 = force_not_mem (op1);
784 do_pending_stack_adjust ();
786 code = unsignedp ? unsigned_condition (code) : code;
787 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
788 op0, op1)))
790 if (CONSTANT_P (tem))
792 rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
793 ? if_false_label : if_true_label;
794 if (label)
795 emit_jump (label);
796 return;
799 code = GET_CODE (tem);
800 mode = GET_MODE (tem);
801 op0 = XEXP (tem, 0);
802 op1 = XEXP (tem, 1);
803 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
806 if (! if_true_label)
808 dummy_true_label = 1;
809 if_true_label = gen_label_rtx ();
812 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
813 if_true_label);
815 if (if_false_label)
816 emit_jump (if_false_label);
817 if (dummy_true_label)
818 emit_label (if_true_label);
821 /* Generate code for a comparison expression EXP (including code to compute
822 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
823 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
824 generated code will drop through.
825 SIGNED_CODE should be the rtx operation for this comparison for
826 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
828 We force a stack adjustment unless there are currently
829 things pushed on the stack that aren't yet used. */
831 static void
832 do_compare_and_jump (tree exp, enum rtx_code signed_code,
833 enum rtx_code unsigned_code, rtx if_false_label,
834 rtx if_true_label)
836 rtx op0, op1;
837 tree type;
838 enum machine_mode mode;
839 int unsignedp;
840 enum rtx_code code;
842 /* Don't crash if the comparison was erroneous. */
843 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
844 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
845 return;
847 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
848 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
849 return;
851 type = TREE_TYPE (TREE_OPERAND (exp, 0));
852 mode = TYPE_MODE (type);
853 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
854 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
855 || (GET_MODE_BITSIZE (mode)
856 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
857 1)))))))
859 /* op0 might have been replaced by promoted constant, in which
860 case the type of second argument should be used. */
861 type = TREE_TYPE (TREE_OPERAND (exp, 1));
862 mode = TYPE_MODE (type);
864 unsignedp = TYPE_UNSIGNED (type);
865 code = unsignedp ? unsigned_code : signed_code;
867 #ifdef HAVE_canonicalize_funcptr_for_compare
868 /* If function pointers need to be "canonicalized" before they can
869 be reliably compared, then canonicalize them. */
870 if (HAVE_canonicalize_funcptr_for_compare
871 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
872 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
873 == FUNCTION_TYPE))
875 rtx new_op0 = gen_reg_rtx (mode);
877 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
878 op0 = new_op0;
881 if (HAVE_canonicalize_funcptr_for_compare
882 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
883 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
884 == FUNCTION_TYPE))
886 rtx new_op1 = gen_reg_rtx (mode);
888 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
889 op1 = new_op1;
891 #endif
893 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
894 ((mode == BLKmode)
895 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
896 if_false_label, if_true_label);
899 #include "gt-dojump.h"