* es.po: Update.
[official-gcc/alias-decl.git] / gcc / dojump.c
blob27a3cd68cee05cadc298301637f641917e705d3b
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
33 #include "expr.h"
34 #include "optabs.h"
35 #include "langhooks.h"
36 #include "ggc.h"
38 static bool prefer_and_bit_test (enum machine_mode, int);
39 static void do_jump_by_parts_greater (tree, int, rtx, rtx);
40 static void do_jump_by_parts_equality (tree, rtx, rtx);
41 static void do_compare_and_jump (tree, enum rtx_code, enum rtx_code, rtx,
42 rtx);
44 /* At the start of a function, record that we have no previously-pushed
45 arguments waiting to be popped. */
47 void
48 init_pending_stack_adjust (void)
50 pending_stack_adjust = 0;
53 /* Discard any pending stack adjustment. This avoid relying on the
54 RTL optimizers to remove useless adjustments when we know the
55 stack pointer value is dead. */
56 void discard_pending_stack_adjust (void)
58 stack_pointer_delta -= pending_stack_adjust;
59 pending_stack_adjust = 0;
62 /* When exiting from function, if safe, clear out any pending stack adjust
63 so the adjustment won't get done.
65 Note, if the current function calls alloca, then it must have a
66 frame pointer regardless of the value of flag_omit_frame_pointer. */
68 void
69 clear_pending_stack_adjust (void)
71 if (optimize > 0
72 && (! flag_omit_frame_pointer || current_function_calls_alloca)
73 && EXIT_IGNORE_STACK
74 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline))
75 discard_pending_stack_adjust ();
78 /* Pop any previously-pushed arguments that have not been popped yet. */
80 void
81 do_pending_stack_adjust (void)
83 if (inhibit_defer_pop == 0)
85 if (pending_stack_adjust != 0)
86 adjust_stack (GEN_INT (pending_stack_adjust));
87 pending_stack_adjust = 0;
91 /* Expand conditional expressions. */
93 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
94 LABEL is an rtx of code CODE_LABEL, in this function and all the
95 functions here. */
97 void
98 jumpifnot (tree exp, rtx label)
100 do_jump (exp, label, NULL_RTX);
103 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
105 void
106 jumpif (tree exp, rtx label)
108 do_jump (exp, NULL_RTX, label);
111 /* Used internally by prefer_and_bit_test. */
113 static GTY(()) rtx and_reg;
114 static GTY(()) rtx and_test;
115 static GTY(()) rtx shift_test;
117 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
118 where X is an arbitrary register of mode MODE. Return true if the former
119 is preferred. */
121 static bool
122 prefer_and_bit_test (enum machine_mode mode, int bitnum)
124 if (and_test == 0)
126 /* Set up rtxes for the two variations. Use NULL as a placeholder
127 for the BITNUM-based constants. */
128 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
129 and_test = gen_rtx_AND (mode, and_reg, NULL);
130 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
131 const1_rtx);
133 else
135 /* Change the mode of the previously-created rtxes. */
136 PUT_MODE (and_reg, mode);
137 PUT_MODE (and_test, mode);
138 PUT_MODE (shift_test, mode);
139 PUT_MODE (XEXP (shift_test, 0), mode);
142 /* Fill in the integers. */
143 XEXP (and_test, 1) = GEN_INT ((unsigned HOST_WIDE_INT) 1 << bitnum);
144 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
146 return (rtx_cost (and_test, IF_THEN_ELSE)
147 <= rtx_cost (shift_test, IF_THEN_ELSE));
150 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
151 the result is zero, or IF_TRUE_LABEL if the result is one.
152 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
153 meaning fall through in that case.
155 do_jump always does any pending stack adjust except when it does not
156 actually perform a jump. An example where there is no jump
157 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. */
159 void
160 do_jump (tree exp, rtx if_false_label, rtx if_true_label)
162 enum tree_code code = TREE_CODE (exp);
163 rtx temp;
164 int i;
165 tree type;
166 enum machine_mode mode;
168 switch (code)
170 case ERROR_MARK:
171 break;
173 case INTEGER_CST:
174 temp = integer_zerop (exp) ? if_false_label : if_true_label;
175 if (temp)
176 emit_jump (temp);
177 break;
179 #if 0
180 /* This is not true with #pragma weak */
181 case ADDR_EXPR:
182 /* The address of something can never be zero. */
183 if (if_true_label)
184 emit_jump (if_true_label);
185 break;
186 #endif
188 case NOP_EXPR:
189 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
190 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
191 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
192 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
193 goto normal;
194 case CONVERT_EXPR:
195 /* If we are narrowing the operand, we have to do the compare in the
196 narrower mode. */
197 if ((TYPE_PRECISION (TREE_TYPE (exp))
198 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
199 goto normal;
200 case NON_LVALUE_EXPR:
201 case ABS_EXPR:
202 case NEGATE_EXPR:
203 case LROTATE_EXPR:
204 case RROTATE_EXPR:
205 /* These cannot change zero->nonzero or vice versa. */
206 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
207 break;
209 case MINUS_EXPR:
210 /* Nonzero iff operands of minus differ. */
211 do_compare_and_jump (build2 (NE_EXPR, TREE_TYPE (exp),
212 TREE_OPERAND (exp, 0),
213 TREE_OPERAND (exp, 1)),
214 NE, NE, if_false_label, if_true_label);
215 break;
217 case BIT_AND_EXPR:
218 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
219 See if the former is preferred for jump tests and restore it
220 if so. */
221 if (TREE_CODE (TREE_OPERAND (exp, 0)) == RSHIFT_EXPR
222 && integer_onep (TREE_OPERAND (exp, 1)))
224 tree arg = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
225 tree shift = TREE_OPERAND (TREE_OPERAND (exp, 0), 1);
226 tree one = TREE_OPERAND (exp, 1);
227 tree argtype = TREE_TYPE (arg);
228 if (TREE_CODE (shift) == INTEGER_CST
229 && compare_tree_int (shift, 0) > 0
230 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
231 && prefer_and_bit_test (TYPE_MODE (argtype),
232 TREE_INT_CST_LOW (shift)))
234 do_jump (build2 (BIT_AND_EXPR, argtype, arg,
235 fold (build2 (LSHIFT_EXPR, argtype,
236 one, shift))),
237 if_false_label, if_true_label);
238 break;
242 /* If we are AND'ing with a small constant, do this comparison in the
243 smallest type that fits. If the machine doesn't have comparisons
244 that small, it will be converted back to the wider comparison.
245 This helps if we are testing the sign bit of a narrower object.
246 combine can't do this for us because it can't know whether a
247 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
249 if (! SLOW_BYTE_ACCESS
250 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
251 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
252 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
253 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
254 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
255 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
256 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
257 != CODE_FOR_nothing))
259 do_jump (convert (type, exp), if_false_label, if_true_label);
260 break;
262 goto normal;
264 case TRUTH_NOT_EXPR:
265 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
266 break;
268 case TRUTH_ANDIF_EXPR:
269 case TRUTH_ORIF_EXPR:
270 case COMPOUND_EXPR:
271 case COND_EXPR:
272 /* Lowered by gimplify.c. */
273 gcc_unreachable ();
275 case COMPONENT_REF:
276 case BIT_FIELD_REF:
277 case ARRAY_REF:
278 case ARRAY_RANGE_REF:
280 HOST_WIDE_INT bitsize, bitpos;
281 int unsignedp;
282 enum machine_mode mode;
283 tree type;
284 tree offset;
285 int volatilep = 0;
287 /* Get description of this reference. We don't actually care
288 about the underlying object here. */
289 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
290 &unsignedp, &volatilep);
292 type = lang_hooks.types.type_for_size (bitsize, unsignedp);
293 if (! SLOW_BYTE_ACCESS
294 && type != 0 && bitsize >= 0
295 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
296 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
297 != CODE_FOR_nothing))
299 do_jump (convert (type, exp), if_false_label, if_true_label);
300 break;
302 goto normal;
305 case EQ_EXPR:
307 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
309 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
310 != MODE_COMPLEX_FLOAT);
311 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
312 != MODE_COMPLEX_INT);
314 if (integer_zerop (TREE_OPERAND (exp, 1)))
315 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
316 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
317 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
318 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
319 else
320 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
321 break;
324 case NE_EXPR:
326 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
328 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
329 != MODE_COMPLEX_FLOAT);
330 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
331 != MODE_COMPLEX_INT);
333 if (integer_zerop (TREE_OPERAND (exp, 1)))
334 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
335 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
336 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
337 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
338 else
339 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
340 break;
343 case LT_EXPR:
344 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
345 if (GET_MODE_CLASS (mode) == MODE_INT
346 && ! can_compare_p (LT, mode, ccp_jump))
347 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
348 else
349 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
350 break;
352 case LE_EXPR:
353 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
354 if (GET_MODE_CLASS (mode) == MODE_INT
355 && ! can_compare_p (LE, mode, ccp_jump))
356 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
357 else
358 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
359 break;
361 case GT_EXPR:
362 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
363 if (GET_MODE_CLASS (mode) == MODE_INT
364 && ! can_compare_p (GT, mode, ccp_jump))
365 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
366 else
367 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
368 break;
370 case GE_EXPR:
371 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
372 if (GET_MODE_CLASS (mode) == MODE_INT
373 && ! can_compare_p (GE, mode, ccp_jump))
374 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
375 else
376 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
377 break;
379 case UNORDERED_EXPR:
380 case ORDERED_EXPR:
382 enum rtx_code cmp, rcmp;
383 int do_rev;
385 if (code == UNORDERED_EXPR)
386 cmp = UNORDERED, rcmp = ORDERED;
387 else
388 cmp = ORDERED, rcmp = UNORDERED;
389 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
391 do_rev = 0;
392 if (! can_compare_p (cmp, mode, ccp_jump)
393 && (can_compare_p (rcmp, mode, ccp_jump)
394 /* If the target doesn't provide either UNORDERED or ORDERED
395 comparisons, canonicalize on UNORDERED for the library. */
396 || rcmp == UNORDERED))
397 do_rev = 1;
399 if (! do_rev)
400 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
401 else
402 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
404 break;
407 enum rtx_code rcode1;
408 enum tree_code tcode1, tcode2;
410 case UNLT_EXPR:
411 rcode1 = UNLT;
412 tcode1 = UNORDERED_EXPR;
413 tcode2 = LT_EXPR;
414 goto unordered_bcc;
415 case UNLE_EXPR:
416 rcode1 = UNLE;
417 tcode1 = UNORDERED_EXPR;
418 tcode2 = LE_EXPR;
419 goto unordered_bcc;
420 case UNGT_EXPR:
421 rcode1 = UNGT;
422 tcode1 = UNORDERED_EXPR;
423 tcode2 = GT_EXPR;
424 goto unordered_bcc;
425 case UNGE_EXPR:
426 rcode1 = UNGE;
427 tcode1 = UNORDERED_EXPR;
428 tcode2 = GE_EXPR;
429 goto unordered_bcc;
430 case UNEQ_EXPR:
431 rcode1 = UNEQ;
432 tcode1 = UNORDERED_EXPR;
433 tcode2 = EQ_EXPR;
434 goto unordered_bcc;
435 case LTGT_EXPR:
436 /* It is ok for LTGT_EXPR to trap when the result is unordered,
437 so expand to (a < b) || (a > b). */
438 rcode1 = LTGT;
439 tcode1 = LT_EXPR;
440 tcode2 = GT_EXPR;
441 goto unordered_bcc;
443 unordered_bcc:
444 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
445 if (can_compare_p (rcode1, mode, ccp_jump))
446 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
447 if_true_label);
448 else
450 tree op0 = save_expr (TREE_OPERAND (exp, 0));
451 tree op1 = save_expr (TREE_OPERAND (exp, 1));
452 tree cmp0, cmp1;
453 rtx drop_through_label = 0;
455 /* If the target doesn't support combined unordered
456 compares, decompose into two comparisons. */
457 if (if_true_label == 0)
458 drop_through_label = if_true_label = gen_label_rtx ();
460 cmp0 = fold (build2 (tcode1, TREE_TYPE (exp), op0, op1));
461 cmp1 = fold (build2 (tcode2, TREE_TYPE (exp), op0, op1));
462 do_jump (cmp0, 0, if_true_label);
463 do_jump (cmp1, if_false_label, if_true_label);
465 if (drop_through_label)
467 do_pending_stack_adjust ();
468 emit_label (drop_through_label);
472 break;
474 /* Special case:
475 __builtin_expect (<test>, 0) and
476 __builtin_expect (<test>, 1)
478 We need to do this here, so that <test> is not converted to a SCC
479 operation on machines that use condition code registers and COMPARE
480 like the PowerPC, and then the jump is done based on whether the SCC
481 operation produced a 1 or 0. */
482 case CALL_EXPR:
483 /* Check for a built-in function. */
485 tree fndecl = get_callee_fndecl (exp);
486 tree arglist = TREE_OPERAND (exp, 1);
488 if (fndecl
489 && DECL_BUILT_IN (fndecl)
490 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
491 && arglist != NULL_TREE
492 && TREE_CHAIN (arglist) != NULL_TREE)
494 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
495 if_true_label);
497 if (seq != NULL_RTX)
499 emit_insn (seq);
500 return;
504 /* Fall through and generate the normal code. */
506 default:
507 normal:
508 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
509 do_pending_stack_adjust ();
511 if (GET_CODE (temp) == CONST_INT
512 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
513 || GET_CODE (temp) == LABEL_REF)
515 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
516 if (target)
517 emit_jump (target);
519 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
520 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
521 /* Note swapping the labels gives us not-equal. */
522 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
523 else
525 gcc_assert (GET_MODE (temp) != VOIDmode);
527 /* The RTL optimizers prefer comparisons against pseudos. */
528 if (GET_CODE (temp) == SUBREG)
530 /* Compare promoted variables in their promoted mode. */
531 if (SUBREG_PROMOTED_VAR_P (temp)
532 && REG_P (XEXP (temp, 0)))
533 temp = XEXP (temp, 0);
534 else
535 temp = copy_to_reg (temp);
537 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
538 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
539 GET_MODE (temp), NULL_RTX,
540 if_false_label, if_true_label);
545 /* Given a comparison expression EXP for values too wide to be compared
546 with one insn, test the comparison and jump to the appropriate label.
547 The code of EXP is ignored; we always test GT if SWAP is 0,
548 and LT if SWAP is 1. */
550 static void
551 do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label,
552 rtx if_true_label)
554 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
555 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
556 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
557 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
559 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
560 if_true_label);
563 /* Compare OP0 with OP1, word at a time, in mode MODE.
564 UNSIGNEDP says to do unsigned comparison.
565 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
567 void
568 do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
569 rtx op1, rtx if_false_label, rtx if_true_label)
571 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
572 rtx drop_through_label = 0;
573 int i;
575 if (! if_true_label || ! if_false_label)
576 drop_through_label = gen_label_rtx ();
577 if (! if_true_label)
578 if_true_label = drop_through_label;
579 if (! if_false_label)
580 if_false_label = drop_through_label;
582 /* Compare a word at a time, high order first. */
583 for (i = 0; i < nwords; i++)
585 rtx op0_word, op1_word;
587 if (WORDS_BIG_ENDIAN)
589 op0_word = operand_subword_force (op0, i, mode);
590 op1_word = operand_subword_force (op1, i, mode);
592 else
594 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
595 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
598 /* All but high-order word must be compared as unsigned. */
599 do_compare_rtx_and_jump (op0_word, op1_word, GT,
600 (unsignedp || i > 0), word_mode, NULL_RTX,
601 NULL_RTX, if_true_label);
603 /* Consider lower words only if these are equal. */
604 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
605 NULL_RTX, NULL_RTX, if_false_label);
608 if (if_false_label)
609 emit_jump (if_false_label);
610 if (drop_through_label)
611 emit_label (drop_through_label);
614 /* Given an EQ_EXPR expression EXP for values too wide to be compared
615 with one insn, test the comparison and jump to the appropriate label. */
617 static void
618 do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label)
620 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
621 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
622 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
623 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
624 int i;
625 rtx drop_through_label = 0;
627 if (! if_false_label)
628 drop_through_label = if_false_label = gen_label_rtx ();
630 for (i = 0; i < nwords; i++)
631 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
632 operand_subword_force (op1, i, mode),
633 EQ, TYPE_UNSIGNED (TREE_TYPE (exp)),
634 word_mode, NULL_RTX, if_false_label, NULL_RTX);
636 if (if_true_label)
637 emit_jump (if_true_label);
638 if (drop_through_label)
639 emit_label (drop_through_label);
642 /* Jump according to whether OP0 is 0.
643 We assume that OP0 has an integer mode that is too wide
644 for the available compare insns. */
646 void
647 do_jump_by_parts_equality_rtx (rtx op0, rtx if_false_label, rtx if_true_label)
649 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
650 rtx part;
651 int i;
652 rtx drop_through_label = 0;
654 /* The fastest way of doing this comparison on almost any machine is to
655 "or" all the words and compare the result. If all have to be loaded
656 from memory and this is a very wide item, it's possible this may
657 be slower, but that's highly unlikely. */
659 part = gen_reg_rtx (word_mode);
660 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
661 for (i = 1; i < nwords && part != 0; i++)
662 part = expand_binop (word_mode, ior_optab, part,
663 operand_subword_force (op0, i, GET_MODE (op0)),
664 part, 1, OPTAB_WIDEN);
666 if (part != 0)
668 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
669 NULL_RTX, if_false_label, if_true_label);
671 return;
674 /* If we couldn't do the "or" simply, do this with a series of compares. */
675 if (! if_false_label)
676 drop_through_label = if_false_label = gen_label_rtx ();
678 for (i = 0; i < nwords; i++)
679 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
680 const0_rtx, EQ, 1, word_mode, NULL_RTX,
681 if_false_label, NULL_RTX);
683 if (if_true_label)
684 emit_jump (if_true_label);
686 if (drop_through_label)
687 emit_label (drop_through_label);
690 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
691 MODE is the machine mode of the comparison, not of the result.
692 (including code to compute the values to be compared) and set CC0
693 according to the result. The decision as to signed or unsigned
694 comparison must be made by the caller.
696 We force a stack adjustment unless there are currently
697 things pushed on the stack that aren't yet used.
699 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
700 compared. */
703 compare_from_rtx (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
704 enum machine_mode mode, rtx size)
706 rtx tem;
708 /* If one operand is constant, make it the second one. Only do this
709 if the other operand is not constant as well. */
711 if (swap_commutative_operands_p (op0, op1))
713 tem = op0;
714 op0 = op1;
715 op1 = tem;
716 code = swap_condition (code);
719 if (flag_force_mem)
721 op0 = force_not_mem (op0);
722 op1 = force_not_mem (op1);
725 do_pending_stack_adjust ();
727 code = unsignedp ? unsigned_condition (code) : code;
728 tem = simplify_relational_operation (code, VOIDmode, mode, op0, op1);
729 if (tem)
731 if (CONSTANT_P (tem))
732 return tem;
734 if (COMPARISON_P (tem))
736 code = GET_CODE (tem);
737 op0 = XEXP (tem, 0);
738 op1 = XEXP (tem, 1);
739 mode = GET_MODE (op0);
740 unsignedp = (code == GTU || code == LTU
741 || code == GEU || code == LEU);
745 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
747 #if HAVE_cc0
748 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
749 #else
750 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
751 #endif
754 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
755 The decision as to signed or unsigned comparison must be made by the caller.
757 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
758 compared. */
760 void
761 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
762 enum machine_mode mode, rtx size, rtx if_false_label,
763 rtx if_true_label)
765 rtx tem;
766 int dummy_true_label = 0;
768 /* Reverse the comparison if that is safe and we want to jump if it is
769 false. */
770 if (! if_true_label && ! FLOAT_MODE_P (mode))
772 if_true_label = if_false_label;
773 if_false_label = 0;
774 code = reverse_condition (code);
777 /* If one operand is constant, make it the second one. Only do this
778 if the other operand is not constant as well. */
780 if (swap_commutative_operands_p (op0, op1))
782 tem = op0;
783 op0 = op1;
784 op1 = tem;
785 code = swap_condition (code);
788 if (flag_force_mem)
790 op0 = force_not_mem (op0);
791 op1 = force_not_mem (op1);
794 do_pending_stack_adjust ();
796 code = unsignedp ? unsigned_condition (code) : code;
797 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
798 op0, op1)))
800 if (CONSTANT_P (tem))
802 rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
803 ? if_false_label : if_true_label;
804 if (label)
805 emit_jump (label);
806 return;
809 code = GET_CODE (tem);
810 mode = GET_MODE (tem);
811 op0 = XEXP (tem, 0);
812 op1 = XEXP (tem, 1);
813 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
816 if (! if_true_label)
818 dummy_true_label = 1;
819 if_true_label = gen_label_rtx ();
822 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
823 if_true_label);
825 if (if_false_label)
826 emit_jump (if_false_label);
827 if (dummy_true_label)
828 emit_label (if_true_label);
831 /* Generate code for a comparison expression EXP (including code to compute
832 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
833 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
834 generated code will drop through.
835 SIGNED_CODE should be the rtx operation for this comparison for
836 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
838 We force a stack adjustment unless there are currently
839 things pushed on the stack that aren't yet used. */
841 static void
842 do_compare_and_jump (tree exp, enum rtx_code signed_code,
843 enum rtx_code unsigned_code, rtx if_false_label,
844 rtx if_true_label)
846 rtx op0, op1;
847 tree type;
848 enum machine_mode mode;
849 int unsignedp;
850 enum rtx_code code;
852 /* Don't crash if the comparison was erroneous. */
853 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
854 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
855 return;
857 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
858 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
859 return;
861 type = TREE_TYPE (TREE_OPERAND (exp, 0));
862 mode = TYPE_MODE (type);
863 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
864 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
865 || (GET_MODE_BITSIZE (mode)
866 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
867 1)))))))
869 /* op0 might have been replaced by promoted constant, in which
870 case the type of second argument should be used. */
871 type = TREE_TYPE (TREE_OPERAND (exp, 1));
872 mode = TYPE_MODE (type);
874 unsignedp = TYPE_UNSIGNED (type);
875 code = unsignedp ? unsigned_code : signed_code;
877 #ifdef HAVE_canonicalize_funcptr_for_compare
878 /* If function pointers need to be "canonicalized" before they can
879 be reliably compared, then canonicalize them. */
880 if (HAVE_canonicalize_funcptr_for_compare
881 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
882 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
883 == FUNCTION_TYPE))
885 rtx new_op0 = gen_reg_rtx (mode);
887 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
888 op0 = new_op0;
891 if (HAVE_canonicalize_funcptr_for_compare
892 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
893 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
894 == FUNCTION_TYPE))
896 rtx new_op1 = gen_reg_rtx (mode);
898 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
899 op1 = new_op1;
901 #endif
903 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
904 ((mode == BLKmode)
905 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
906 if_false_label, if_true_label);
909 #include "gt-dojump.h"