* arm.c (adjacent_mem_locations): Reject volatile memory refs.
[official-gcc.git] / gcc / dojump.c
blob3c9d88a548b9b8a4f3688da23749dfd7d4c2922a
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
33 #include "expr.h"
34 #include "optabs.h"
35 #include "langhooks.h"
36 #include "ggc.h"
38 static bool prefer_and_bit_test (enum machine_mode, int);
39 static void do_jump_by_parts_greater (tree, int, rtx, rtx);
40 static void do_jump_by_parts_equality (tree, rtx, rtx);
41 static void do_compare_and_jump (tree, enum rtx_code, enum rtx_code, rtx,
42 rtx);
44 /* At the start of a function, record that we have no previously-pushed
45 arguments waiting to be popped. */
47 void
48 init_pending_stack_adjust (void)
50 pending_stack_adjust = 0;
53 /* Discard any pending stack adjustment. This avoid relying on the
54 RTL optimizers to remove useless adjustments when we know the
55 stack pointer value is dead. */
56 void discard_pending_stack_adjust (void)
58 stack_pointer_delta -= pending_stack_adjust;
59 pending_stack_adjust = 0;
62 /* When exiting from function, if safe, clear out any pending stack adjust
63 so the adjustment won't get done.
65 Note, if the current function calls alloca, then it must have a
66 frame pointer regardless of the value of flag_omit_frame_pointer. */
68 void
69 clear_pending_stack_adjust (void)
71 if (optimize > 0
72 && (! flag_omit_frame_pointer || current_function_calls_alloca)
73 && EXIT_IGNORE_STACK
74 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline))
75 discard_pending_stack_adjust ();
78 /* Pop any previously-pushed arguments that have not been popped yet. */
80 void
81 do_pending_stack_adjust (void)
83 if (inhibit_defer_pop == 0)
85 if (pending_stack_adjust != 0)
86 adjust_stack (GEN_INT (pending_stack_adjust));
87 pending_stack_adjust = 0;
91 /* Expand conditional expressions. */
93 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
94 LABEL is an rtx of code CODE_LABEL, in this function and all the
95 functions here. */
97 void
98 jumpifnot (tree exp, rtx label)
100 do_jump (exp, label, NULL_RTX);
103 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
105 void
106 jumpif (tree exp, rtx label)
108 do_jump (exp, NULL_RTX, label);
111 /* Used internally by prefer_and_bit_test. */
113 static GTY(()) rtx and_reg;
114 static GTY(()) rtx and_test;
115 static GTY(()) rtx shift_test;
117 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
118 where X is an arbitrary register of mode MODE. Return true if the former
119 is preferred. */
121 static bool
122 prefer_and_bit_test (enum machine_mode mode, int bitnum)
124 if (and_test == 0)
126 /* Set up rtxes for the two variations. Use NULL as a placeholder
127 for the BITNUM-based constants. */
128 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
129 and_test = gen_rtx_AND (mode, and_reg, NULL);
130 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
131 const1_rtx);
133 else
135 /* Change the mode of the previously-created rtxes. */
136 PUT_MODE (and_reg, mode);
137 PUT_MODE (and_test, mode);
138 PUT_MODE (shift_test, mode);
139 PUT_MODE (XEXP (shift_test, 0), mode);
142 /* Fill in the integers. */
143 XEXP (and_test, 1) = GEN_INT ((unsigned HOST_WIDE_INT) 1 << bitnum);
144 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
146 return (rtx_cost (and_test, IF_THEN_ELSE)
147 <= rtx_cost (shift_test, IF_THEN_ELSE));
150 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
151 the result is zero, or IF_TRUE_LABEL if the result is one.
152 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
153 meaning fall through in that case.
155 do_jump always does any pending stack adjust except when it does not
156 actually perform a jump. An example where there is no jump
157 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. */
159 void
160 do_jump (tree exp, rtx if_false_label, rtx if_true_label)
162 enum tree_code code = TREE_CODE (exp);
163 rtx temp;
164 int i;
165 tree type;
166 enum machine_mode mode;
167 rtx drop_through_label = 0;
169 switch (code)
171 case ERROR_MARK:
172 break;
174 case INTEGER_CST:
175 temp = integer_zerop (exp) ? if_false_label : if_true_label;
176 if (temp)
177 emit_jump (temp);
178 break;
180 #if 0
181 /* This is not true with #pragma weak */
182 case ADDR_EXPR:
183 /* The address of something can never be zero. */
184 if (if_true_label)
185 emit_jump (if_true_label);
186 break;
187 #endif
189 case NOP_EXPR:
190 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
191 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
192 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
193 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
194 goto normal;
195 case CONVERT_EXPR:
196 /* If we are narrowing the operand, we have to do the compare in the
197 narrower mode. */
198 if ((TYPE_PRECISION (TREE_TYPE (exp))
199 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
200 goto normal;
201 case NON_LVALUE_EXPR:
202 case ABS_EXPR:
203 case NEGATE_EXPR:
204 case LROTATE_EXPR:
205 case RROTATE_EXPR:
206 /* These cannot change zero->nonzero or vice versa. */
207 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
208 break;
210 case MINUS_EXPR:
211 /* Nonzero iff operands of minus differ. */
212 do_compare_and_jump (build2 (NE_EXPR, TREE_TYPE (exp),
213 TREE_OPERAND (exp, 0),
214 TREE_OPERAND (exp, 1)),
215 NE, NE, if_false_label, if_true_label);
216 break;
218 case BIT_AND_EXPR:
219 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
220 See if the former is preferred for jump tests and restore it
221 if so. */
222 if (integer_onep (TREE_OPERAND (exp, 1)))
224 tree exp0 = TREE_OPERAND (exp, 0);
225 rtx set_label, clr_label;
227 /* Strip narrowing integral type conversions. */
228 while ((TREE_CODE (exp0) == NOP_EXPR
229 || TREE_CODE (exp0) == CONVERT_EXPR
230 || TREE_CODE (exp0) == NON_LVALUE_EXPR)
231 && TREE_OPERAND (exp0, 0) != error_mark_node
232 && TYPE_PRECISION (TREE_TYPE (exp0))
233 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
234 exp0 = TREE_OPERAND (exp0, 0);
236 /* "exp0 ^ 1" inverts the sense of the single bit test. */
237 if (TREE_CODE (exp0) == BIT_XOR_EXPR
238 && integer_onep (TREE_OPERAND (exp0, 1)))
240 exp0 = TREE_OPERAND (exp0, 0);
241 clr_label = if_true_label;
242 set_label = if_false_label;
244 else
246 clr_label = if_false_label;
247 set_label = if_true_label;
250 if (TREE_CODE (exp0) == RSHIFT_EXPR)
252 tree arg = TREE_OPERAND (exp0, 0);
253 tree shift = TREE_OPERAND (exp0, 1);
254 tree argtype = TREE_TYPE (arg);
255 if (TREE_CODE (shift) == INTEGER_CST
256 && compare_tree_int (shift, 0) >= 0
257 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
258 && prefer_and_bit_test (TYPE_MODE (argtype),
259 TREE_INT_CST_LOW (shift)))
261 HOST_WIDE_INT mask = (HOST_WIDE_INT) 1
262 << TREE_INT_CST_LOW (shift);
263 do_jump (build2 (BIT_AND_EXPR, argtype, arg,
264 build_int_cst_type (argtype, mask)),
265 clr_label, set_label);
266 break;
271 /* If we are AND'ing with a small constant, do this comparison in the
272 smallest type that fits. If the machine doesn't have comparisons
273 that small, it will be converted back to the wider comparison.
274 This helps if we are testing the sign bit of a narrower object.
275 combine can't do this for us because it can't know whether a
276 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
278 if (! SLOW_BYTE_ACCESS
279 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
280 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
281 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
282 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
283 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
284 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
285 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
286 != CODE_FOR_nothing))
288 do_jump (convert (type, exp), if_false_label, if_true_label);
289 break;
291 goto normal;
293 case TRUTH_NOT_EXPR:
294 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
295 break;
297 case COND_EXPR:
299 rtx label1 = gen_label_rtx ();
300 if (!if_true_label || !if_false_label)
302 drop_through_label = gen_label_rtx ();
303 if (!if_true_label)
304 if_true_label = drop_through_label;
305 if (!if_false_label)
306 if_false_label = drop_through_label;
309 do_pending_stack_adjust ();
310 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
311 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
312 emit_label (label1);
313 do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label);
314 break;
317 case TRUTH_ANDIF_EXPR:
318 case TRUTH_ORIF_EXPR:
319 case COMPOUND_EXPR:
320 /* Lowered by gimplify.c. */
321 gcc_unreachable ();
323 case COMPONENT_REF:
324 case BIT_FIELD_REF:
325 case ARRAY_REF:
326 case ARRAY_RANGE_REF:
328 HOST_WIDE_INT bitsize, bitpos;
329 int unsignedp;
330 enum machine_mode mode;
331 tree type;
332 tree offset;
333 int volatilep = 0;
335 /* Get description of this reference. We don't actually care
336 about the underlying object here. */
337 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
338 &unsignedp, &volatilep, false);
340 type = lang_hooks.types.type_for_size (bitsize, unsignedp);
341 if (! SLOW_BYTE_ACCESS
342 && type != 0 && bitsize >= 0
343 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
344 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
345 != CODE_FOR_nothing))
347 do_jump (convert (type, exp), if_false_label, if_true_label);
348 break;
350 goto normal;
353 case EQ_EXPR:
355 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
357 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
358 != MODE_COMPLEX_FLOAT);
359 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
360 != MODE_COMPLEX_INT);
362 if (integer_zerop (TREE_OPERAND (exp, 1)))
363 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
364 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
365 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
366 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
367 else
368 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
369 break;
372 case NE_EXPR:
374 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
376 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
377 != MODE_COMPLEX_FLOAT);
378 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
379 != MODE_COMPLEX_INT);
381 if (integer_zerop (TREE_OPERAND (exp, 1)))
382 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
383 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
384 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
385 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
386 else
387 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
388 break;
391 case LT_EXPR:
392 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
393 if (GET_MODE_CLASS (mode) == MODE_INT
394 && ! can_compare_p (LT, mode, ccp_jump))
395 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
396 else
397 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
398 break;
400 case LE_EXPR:
401 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
402 if (GET_MODE_CLASS (mode) == MODE_INT
403 && ! can_compare_p (LE, mode, ccp_jump))
404 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
405 else
406 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
407 break;
409 case GT_EXPR:
410 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
411 if (GET_MODE_CLASS (mode) == MODE_INT
412 && ! can_compare_p (GT, mode, ccp_jump))
413 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
414 else
415 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
416 break;
418 case GE_EXPR:
419 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
420 if (GET_MODE_CLASS (mode) == MODE_INT
421 && ! can_compare_p (GE, mode, ccp_jump))
422 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
423 else
424 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
425 break;
427 case UNORDERED_EXPR:
428 case ORDERED_EXPR:
430 enum rtx_code cmp, rcmp;
431 int do_rev;
433 if (code == UNORDERED_EXPR)
434 cmp = UNORDERED, rcmp = ORDERED;
435 else
436 cmp = ORDERED, rcmp = UNORDERED;
437 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
439 do_rev = 0;
440 if (! can_compare_p (cmp, mode, ccp_jump)
441 && (can_compare_p (rcmp, mode, ccp_jump)
442 /* If the target doesn't provide either UNORDERED or ORDERED
443 comparisons, canonicalize on UNORDERED for the library. */
444 || rcmp == UNORDERED))
445 do_rev = 1;
447 if (! do_rev)
448 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
449 else
450 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
452 break;
455 enum rtx_code rcode1;
456 enum tree_code tcode1, tcode2;
458 case UNLT_EXPR:
459 rcode1 = UNLT;
460 tcode1 = UNORDERED_EXPR;
461 tcode2 = LT_EXPR;
462 goto unordered_bcc;
463 case UNLE_EXPR:
464 rcode1 = UNLE;
465 tcode1 = UNORDERED_EXPR;
466 tcode2 = LE_EXPR;
467 goto unordered_bcc;
468 case UNGT_EXPR:
469 rcode1 = UNGT;
470 tcode1 = UNORDERED_EXPR;
471 tcode2 = GT_EXPR;
472 goto unordered_bcc;
473 case UNGE_EXPR:
474 rcode1 = UNGE;
475 tcode1 = UNORDERED_EXPR;
476 tcode2 = GE_EXPR;
477 goto unordered_bcc;
478 case UNEQ_EXPR:
479 rcode1 = UNEQ;
480 tcode1 = UNORDERED_EXPR;
481 tcode2 = EQ_EXPR;
482 goto unordered_bcc;
483 case LTGT_EXPR:
484 /* It is ok for LTGT_EXPR to trap when the result is unordered,
485 so expand to (a < b) || (a > b). */
486 rcode1 = LTGT;
487 tcode1 = LT_EXPR;
488 tcode2 = GT_EXPR;
489 goto unordered_bcc;
491 unordered_bcc:
492 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
493 if (can_compare_p (rcode1, mode, ccp_jump))
494 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
495 if_true_label);
496 else
498 tree op0 = save_expr (TREE_OPERAND (exp, 0));
499 tree op1 = save_expr (TREE_OPERAND (exp, 1));
500 tree cmp0, cmp1;
502 /* If the target doesn't support combined unordered
503 compares, decompose into two comparisons. */
504 if (if_true_label == 0)
505 drop_through_label = if_true_label = gen_label_rtx ();
507 cmp0 = fold (build2 (tcode1, TREE_TYPE (exp), op0, op1));
508 cmp1 = fold (build2 (tcode2, TREE_TYPE (exp), op0, op1));
509 do_jump (cmp0, 0, if_true_label);
510 do_jump (cmp1, if_false_label, if_true_label);
513 break;
515 /* Special case:
516 __builtin_expect (<test>, 0) and
517 __builtin_expect (<test>, 1)
519 We need to do this here, so that <test> is not converted to a SCC
520 operation on machines that use condition code registers and COMPARE
521 like the PowerPC, and then the jump is done based on whether the SCC
522 operation produced a 1 or 0. */
523 case CALL_EXPR:
524 /* Check for a built-in function. */
526 tree fndecl = get_callee_fndecl (exp);
527 tree arglist = TREE_OPERAND (exp, 1);
529 if (fndecl
530 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
531 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
532 && arglist != NULL_TREE
533 && TREE_CHAIN (arglist) != NULL_TREE)
535 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
536 if_true_label);
538 if (seq != NULL_RTX)
540 emit_insn (seq);
541 return;
545 /* Fall through and generate the normal code. */
547 default:
548 normal:
549 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
550 do_pending_stack_adjust ();
552 if (GET_CODE (temp) == CONST_INT
553 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
554 || GET_CODE (temp) == LABEL_REF)
556 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
557 if (target)
558 emit_jump (target);
560 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
561 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
562 /* Note swapping the labels gives us not-equal. */
563 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
564 else
566 gcc_assert (GET_MODE (temp) != VOIDmode);
568 /* The RTL optimizers prefer comparisons against pseudos. */
569 if (GET_CODE (temp) == SUBREG)
571 /* Compare promoted variables in their promoted mode. */
572 if (SUBREG_PROMOTED_VAR_P (temp)
573 && REG_P (XEXP (temp, 0)))
574 temp = XEXP (temp, 0);
575 else
576 temp = copy_to_reg (temp);
578 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
579 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
580 GET_MODE (temp), NULL_RTX,
581 if_false_label, if_true_label);
585 if (drop_through_label)
587 do_pending_stack_adjust ();
588 emit_label (drop_through_label);
592 /* Given a comparison expression EXP for values too wide to be compared
593 with one insn, test the comparison and jump to the appropriate label.
594 The code of EXP is ignored; we always test GT if SWAP is 0,
595 and LT if SWAP is 1. */
597 static void
598 do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label,
599 rtx if_true_label)
601 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
602 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
603 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
604 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
606 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
607 if_true_label);
610 /* Compare OP0 with OP1, word at a time, in mode MODE.
611 UNSIGNEDP says to do unsigned comparison.
612 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
614 void
615 do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
616 rtx op1, rtx if_false_label, rtx if_true_label)
618 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
619 rtx drop_through_label = 0;
620 int i;
622 if (! if_true_label || ! if_false_label)
623 drop_through_label = gen_label_rtx ();
624 if (! if_true_label)
625 if_true_label = drop_through_label;
626 if (! if_false_label)
627 if_false_label = drop_through_label;
629 /* Compare a word at a time, high order first. */
630 for (i = 0; i < nwords; i++)
632 rtx op0_word, op1_word;
634 if (WORDS_BIG_ENDIAN)
636 op0_word = operand_subword_force (op0, i, mode);
637 op1_word = operand_subword_force (op1, i, mode);
639 else
641 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
642 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
645 /* All but high-order word must be compared as unsigned. */
646 do_compare_rtx_and_jump (op0_word, op1_word, GT,
647 (unsignedp || i > 0), word_mode, NULL_RTX,
648 NULL_RTX, if_true_label);
650 /* Consider lower words only if these are equal. */
651 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
652 NULL_RTX, NULL_RTX, if_false_label);
655 if (if_false_label)
656 emit_jump (if_false_label);
657 if (drop_through_label)
658 emit_label (drop_through_label);
661 /* Given an EQ_EXPR expression EXP for values too wide to be compared
662 with one insn, test the comparison and jump to the appropriate label. */
664 static void
665 do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label)
667 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
668 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
669 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
670 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
671 int i;
672 rtx drop_through_label = 0;
674 if (! if_false_label)
675 drop_through_label = if_false_label = gen_label_rtx ();
677 for (i = 0; i < nwords; i++)
678 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
679 operand_subword_force (op1, i, mode),
680 EQ, TYPE_UNSIGNED (TREE_TYPE (exp)),
681 word_mode, NULL_RTX, if_false_label, NULL_RTX);
683 if (if_true_label)
684 emit_jump (if_true_label);
685 if (drop_through_label)
686 emit_label (drop_through_label);
689 /* Jump according to whether OP0 is 0.
690 We assume that OP0 has an integer mode that is too wide
691 for the available compare insns. */
693 void
694 do_jump_by_parts_equality_rtx (rtx op0, rtx if_false_label, rtx if_true_label)
696 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
697 rtx part;
698 int i;
699 rtx drop_through_label = 0;
701 /* The fastest way of doing this comparison on almost any machine is to
702 "or" all the words and compare the result. If all have to be loaded
703 from memory and this is a very wide item, it's possible this may
704 be slower, but that's highly unlikely. */
706 part = gen_reg_rtx (word_mode);
707 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
708 for (i = 1; i < nwords && part != 0; i++)
709 part = expand_binop (word_mode, ior_optab, part,
710 operand_subword_force (op0, i, GET_MODE (op0)),
711 part, 1, OPTAB_WIDEN);
713 if (part != 0)
715 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
716 NULL_RTX, if_false_label, if_true_label);
718 return;
721 /* If we couldn't do the "or" simply, do this with a series of compares. */
722 if (! if_false_label)
723 drop_through_label = if_false_label = gen_label_rtx ();
725 for (i = 0; i < nwords; i++)
726 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
727 const0_rtx, EQ, 1, word_mode, NULL_RTX,
728 if_false_label, NULL_RTX);
730 if (if_true_label)
731 emit_jump (if_true_label);
733 if (drop_through_label)
734 emit_label (drop_through_label);
737 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
738 MODE is the machine mode of the comparison, not of the result.
739 (including code to compute the values to be compared) and set CC0
740 according to the result. The decision as to signed or unsigned
741 comparison must be made by the caller.
743 We force a stack adjustment unless there are currently
744 things pushed on the stack that aren't yet used.
746 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
747 compared. */
750 compare_from_rtx (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
751 enum machine_mode mode, rtx size)
753 rtx tem;
755 /* If one operand is constant, make it the second one. Only do this
756 if the other operand is not constant as well. */
758 if (swap_commutative_operands_p (op0, op1))
760 tem = op0;
761 op0 = op1;
762 op1 = tem;
763 code = swap_condition (code);
766 if (flag_force_mem)
768 op0 = force_not_mem (op0);
769 op1 = force_not_mem (op1);
772 do_pending_stack_adjust ();
774 code = unsignedp ? unsigned_condition (code) : code;
775 tem = simplify_relational_operation (code, VOIDmode, mode, op0, op1);
776 if (tem)
778 if (CONSTANT_P (tem))
779 return tem;
781 if (COMPARISON_P (tem))
783 code = GET_CODE (tem);
784 op0 = XEXP (tem, 0);
785 op1 = XEXP (tem, 1);
786 mode = GET_MODE (op0);
787 unsignedp = (code == GTU || code == LTU
788 || code == GEU || code == LEU);
792 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
794 #if HAVE_cc0
795 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
796 #else
797 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
798 #endif
801 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
802 The decision as to signed or unsigned comparison must be made by the caller.
804 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
805 compared. */
807 void
808 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
809 enum machine_mode mode, rtx size, rtx if_false_label,
810 rtx if_true_label)
812 rtx tem;
813 int dummy_true_label = 0;
815 /* Reverse the comparison if that is safe and we want to jump if it is
816 false. */
817 if (! if_true_label && ! FLOAT_MODE_P (mode))
819 if_true_label = if_false_label;
820 if_false_label = 0;
821 code = reverse_condition (code);
824 /* If one operand is constant, make it the second one. Only do this
825 if the other operand is not constant as well. */
827 if (swap_commutative_operands_p (op0, op1))
829 tem = op0;
830 op0 = op1;
831 op1 = tem;
832 code = swap_condition (code);
835 if (flag_force_mem)
837 op0 = force_not_mem (op0);
838 op1 = force_not_mem (op1);
841 do_pending_stack_adjust ();
843 code = unsignedp ? unsigned_condition (code) : code;
844 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
845 op0, op1)))
847 if (CONSTANT_P (tem))
849 rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
850 ? if_false_label : if_true_label;
851 if (label)
852 emit_jump (label);
853 return;
856 code = GET_CODE (tem);
857 mode = GET_MODE (tem);
858 op0 = XEXP (tem, 0);
859 op1 = XEXP (tem, 1);
860 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
863 if (! if_true_label)
865 dummy_true_label = 1;
866 if_true_label = gen_label_rtx ();
869 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
870 if_true_label);
872 if (if_false_label)
873 emit_jump (if_false_label);
874 if (dummy_true_label)
875 emit_label (if_true_label);
878 /* Generate code for a comparison expression EXP (including code to compute
879 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
880 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
881 generated code will drop through.
882 SIGNED_CODE should be the rtx operation for this comparison for
883 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
885 We force a stack adjustment unless there are currently
886 things pushed on the stack that aren't yet used. */
888 static void
889 do_compare_and_jump (tree exp, enum rtx_code signed_code,
890 enum rtx_code unsigned_code, rtx if_false_label,
891 rtx if_true_label)
893 rtx op0, op1;
894 tree type;
895 enum machine_mode mode;
896 int unsignedp;
897 enum rtx_code code;
899 /* Don't crash if the comparison was erroneous. */
900 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
901 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
902 return;
904 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
905 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
906 return;
908 type = TREE_TYPE (TREE_OPERAND (exp, 0));
909 mode = TYPE_MODE (type);
910 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
911 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
912 || (GET_MODE_BITSIZE (mode)
913 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
914 1)))))))
916 /* op0 might have been replaced by promoted constant, in which
917 case the type of second argument should be used. */
918 type = TREE_TYPE (TREE_OPERAND (exp, 1));
919 mode = TYPE_MODE (type);
921 unsignedp = TYPE_UNSIGNED (type);
922 code = unsignedp ? unsigned_code : signed_code;
924 #ifdef HAVE_canonicalize_funcptr_for_compare
925 /* If function pointers need to be "canonicalized" before they can
926 be reliably compared, then canonicalize them.
927 Only do this if *both* sides of the comparison are function pointers.
928 If one side isn't, we want a noncanonicalized comparison. See PR
929 middle-end/17564. */
930 if (HAVE_canonicalize_funcptr_for_compare
931 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
932 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
933 == FUNCTION_TYPE
934 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
935 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
936 == FUNCTION_TYPE)
938 rtx new_op0 = gen_reg_rtx (mode);
939 rtx new_op1 = gen_reg_rtx (mode);
941 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
942 op0 = new_op0;
944 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
945 op1 = new_op1;
947 #endif
949 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
950 ((mode == BLKmode)
951 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
952 if_false_label, if_true_label);
955 #include "gt-dojump.h"