* gimplify.c (gimplify_call_expr): Don't set CALL_CANNOT_INLINE_P
[official-gcc.git] / gcc / dojump.c
blob060eb51e77e11677703556de3f6a13f897002d91
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
33 #include "expr.h"
34 #include "optabs.h"
35 #include "langhooks.h"
36 #include "ggc.h"
37 #include "basic-block.h"
39 static bool prefer_and_bit_test (enum machine_mode, int);
40 static void do_jump_by_parts_greater (tree, int, rtx, rtx);
41 static void do_jump_by_parts_equality (tree, rtx, rtx);
42 static void do_compare_and_jump (tree, enum rtx_code, enum rtx_code, rtx,
43 rtx);
45 /* At the start of a function, record that we have no previously-pushed
46 arguments waiting to be popped. */
48 void
49 init_pending_stack_adjust (void)
51 pending_stack_adjust = 0;
54 /* Discard any pending stack adjustment. This avoid relying on the
55 RTL optimizers to remove useless adjustments when we know the
56 stack pointer value is dead. */
57 void
58 discard_pending_stack_adjust (void)
60 stack_pointer_delta -= pending_stack_adjust;
61 pending_stack_adjust = 0;
64 /* When exiting from function, if safe, clear out any pending stack adjust
65 so the adjustment won't get done.
67 Note, if the current function calls alloca, then it must have a
68 frame pointer regardless of the value of flag_omit_frame_pointer. */
70 void
71 clear_pending_stack_adjust (void)
73 if (optimize > 0
74 && (! flag_omit_frame_pointer || cfun->calls_alloca)
75 && EXIT_IGNORE_STACK)
76 discard_pending_stack_adjust ();
79 /* Pop any previously-pushed arguments that have not been popped yet. */
81 void
82 do_pending_stack_adjust (void)
84 if (inhibit_defer_pop == 0)
86 if (pending_stack_adjust != 0)
87 adjust_stack (GEN_INT (pending_stack_adjust));
88 pending_stack_adjust = 0;
92 /* Expand conditional expressions. */
94 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
95 LABEL is an rtx of code CODE_LABEL, in this function and all the
96 functions here. */
98 void
99 jumpifnot (tree exp, rtx label)
101 do_jump (exp, label, NULL_RTX);
104 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
106 void
107 jumpif (tree exp, rtx label)
109 do_jump (exp, NULL_RTX, label);
112 /* Used internally by prefer_and_bit_test. */
114 static GTY(()) rtx and_reg;
115 static GTY(()) rtx and_test;
116 static GTY(()) rtx shift_test;
118 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
119 where X is an arbitrary register of mode MODE. Return true if the former
120 is preferred. */
122 static bool
123 prefer_and_bit_test (enum machine_mode mode, int bitnum)
125 if (and_test == 0)
127 /* Set up rtxes for the two variations. Use NULL as a placeholder
128 for the BITNUM-based constants. */
129 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
130 and_test = gen_rtx_AND (mode, and_reg, NULL);
131 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
132 const1_rtx);
134 else
136 /* Change the mode of the previously-created rtxes. */
137 PUT_MODE (and_reg, mode);
138 PUT_MODE (and_test, mode);
139 PUT_MODE (shift_test, mode);
140 PUT_MODE (XEXP (shift_test, 0), mode);
143 /* Fill in the integers. */
144 XEXP (and_test, 1) = GEN_INT ((unsigned HOST_WIDE_INT) 1 << bitnum);
145 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
147 return (rtx_cost (and_test, IF_THEN_ELSE, optimize_insn_for_speed_p ())
148 <= rtx_cost (shift_test, IF_THEN_ELSE, optimize_insn_for_speed_p ()));
151 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
152 the result is zero, or IF_TRUE_LABEL if the result is one.
153 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
154 meaning fall through in that case.
156 do_jump always does any pending stack adjust except when it does not
157 actually perform a jump. An example where there is no jump
158 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. */
160 void
161 do_jump (tree exp, rtx if_false_label, rtx if_true_label)
163 enum tree_code code = TREE_CODE (exp);
164 rtx temp;
165 int i;
166 tree type;
167 enum machine_mode mode;
168 rtx drop_through_label = 0;
170 switch (code)
172 case ERROR_MARK:
173 break;
175 case INTEGER_CST:
176 temp = integer_zerop (exp) ? if_false_label : if_true_label;
177 if (temp)
178 emit_jump (temp);
179 break;
181 #if 0
182 /* This is not true with #pragma weak */
183 case ADDR_EXPR:
184 /* The address of something can never be zero. */
185 if (if_true_label)
186 emit_jump (if_true_label);
187 break;
188 #endif
190 case NOP_EXPR:
191 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
192 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
193 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
194 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
195 goto normal;
196 case CONVERT_EXPR:
197 /* If we are narrowing the operand, we have to do the compare in the
198 narrower mode. */
199 if ((TYPE_PRECISION (TREE_TYPE (exp))
200 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
201 goto normal;
202 case NON_LVALUE_EXPR:
203 case ABS_EXPR:
204 case NEGATE_EXPR:
205 case LROTATE_EXPR:
206 case RROTATE_EXPR:
207 /* These cannot change zero->nonzero or vice versa. */
208 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
209 break;
211 case TRUTH_NOT_EXPR:
212 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
213 break;
215 case COND_EXPR:
217 rtx label1 = gen_label_rtx ();
218 if (!if_true_label || !if_false_label)
220 drop_through_label = gen_label_rtx ();
221 if (!if_true_label)
222 if_true_label = drop_through_label;
223 if (!if_false_label)
224 if_false_label = drop_through_label;
227 do_pending_stack_adjust ();
228 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
229 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
230 emit_label (label1);
231 do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label);
232 break;
235 case COMPOUND_EXPR:
236 /* Lowered by gimplify.c. */
237 gcc_unreachable ();
239 case COMPONENT_REF:
240 case BIT_FIELD_REF:
241 case ARRAY_REF:
242 case ARRAY_RANGE_REF:
244 HOST_WIDE_INT bitsize, bitpos;
245 int unsignedp;
246 enum machine_mode mode;
247 tree type;
248 tree offset;
249 int volatilep = 0;
251 /* Get description of this reference. We don't actually care
252 about the underlying object here. */
253 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
254 &unsignedp, &volatilep, false);
256 type = lang_hooks.types.type_for_size (bitsize, unsignedp);
257 if (! SLOW_BYTE_ACCESS
258 && type != 0 && bitsize >= 0
259 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
260 && (optab_handler (cmp_optab, TYPE_MODE (type))->insn_code
261 != CODE_FOR_nothing))
263 do_jump (fold_convert (type, exp), if_false_label, if_true_label);
264 break;
266 goto normal;
269 case EQ_EXPR:
271 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
273 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
274 != MODE_COMPLEX_FLOAT);
275 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
276 != MODE_COMPLEX_INT);
278 if (integer_zerop (TREE_OPERAND (exp, 1)))
279 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
280 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
281 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
282 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
283 else
284 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
285 break;
288 case MINUS_EXPR:
289 /* Nonzero iff operands of minus differ. */
290 exp = build2 (NE_EXPR, TREE_TYPE (exp),
291 TREE_OPERAND (exp, 0),
292 TREE_OPERAND (exp, 1));
293 /* FALLTHRU */
294 case NE_EXPR:
296 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
298 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
299 != MODE_COMPLEX_FLOAT);
300 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
301 != MODE_COMPLEX_INT);
303 if (integer_zerop (TREE_OPERAND (exp, 1)))
304 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
305 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
306 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
307 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
308 else
309 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
310 break;
313 case LT_EXPR:
314 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
315 if (GET_MODE_CLASS (mode) == MODE_INT
316 && ! can_compare_p (LT, mode, ccp_jump))
317 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
318 else
319 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
320 break;
322 case LE_EXPR:
323 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
324 if (GET_MODE_CLASS (mode) == MODE_INT
325 && ! can_compare_p (LE, mode, ccp_jump))
326 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
327 else
328 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
329 break;
331 case GT_EXPR:
332 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
333 if (GET_MODE_CLASS (mode) == MODE_INT
334 && ! can_compare_p (GT, mode, ccp_jump))
335 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
336 else
337 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
338 break;
340 case GE_EXPR:
341 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
342 if (GET_MODE_CLASS (mode) == MODE_INT
343 && ! can_compare_p (GE, mode, ccp_jump))
344 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
345 else
346 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
347 break;
349 case UNORDERED_EXPR:
350 case ORDERED_EXPR:
352 enum rtx_code cmp, rcmp;
353 int do_rev;
355 if (code == UNORDERED_EXPR)
356 cmp = UNORDERED, rcmp = ORDERED;
357 else
358 cmp = ORDERED, rcmp = UNORDERED;
359 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
361 do_rev = 0;
362 if (! can_compare_p (cmp, mode, ccp_jump)
363 && (can_compare_p (rcmp, mode, ccp_jump)
364 /* If the target doesn't provide either UNORDERED or ORDERED
365 comparisons, canonicalize on UNORDERED for the library. */
366 || rcmp == UNORDERED))
367 do_rev = 1;
369 if (! do_rev)
370 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
371 else
372 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
374 break;
377 enum rtx_code rcode1;
378 enum tree_code tcode1, tcode2;
380 case UNLT_EXPR:
381 rcode1 = UNLT;
382 tcode1 = UNORDERED_EXPR;
383 tcode2 = LT_EXPR;
384 goto unordered_bcc;
385 case UNLE_EXPR:
386 rcode1 = UNLE;
387 tcode1 = UNORDERED_EXPR;
388 tcode2 = LE_EXPR;
389 goto unordered_bcc;
390 case UNGT_EXPR:
391 rcode1 = UNGT;
392 tcode1 = UNORDERED_EXPR;
393 tcode2 = GT_EXPR;
394 goto unordered_bcc;
395 case UNGE_EXPR:
396 rcode1 = UNGE;
397 tcode1 = UNORDERED_EXPR;
398 tcode2 = GE_EXPR;
399 goto unordered_bcc;
400 case UNEQ_EXPR:
401 rcode1 = UNEQ;
402 tcode1 = UNORDERED_EXPR;
403 tcode2 = EQ_EXPR;
404 goto unordered_bcc;
405 case LTGT_EXPR:
406 /* It is ok for LTGT_EXPR to trap when the result is unordered,
407 so expand to (a < b) || (a > b). */
408 rcode1 = LTGT;
409 tcode1 = LT_EXPR;
410 tcode2 = GT_EXPR;
411 goto unordered_bcc;
413 unordered_bcc:
414 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
415 if (can_compare_p (rcode1, mode, ccp_jump))
416 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
417 if_true_label);
418 else
420 tree op0 = save_expr (TREE_OPERAND (exp, 0));
421 tree op1 = save_expr (TREE_OPERAND (exp, 1));
422 tree cmp0, cmp1;
424 /* If the target doesn't support combined unordered
425 compares, decompose into two comparisons. */
426 if (if_true_label == 0)
427 drop_through_label = if_true_label = gen_label_rtx ();
429 cmp0 = fold_build2 (tcode1, TREE_TYPE (exp), op0, op1);
430 cmp1 = fold_build2 (tcode2, TREE_TYPE (exp), op0, op1);
431 do_jump (cmp0, 0, if_true_label);
432 do_jump (cmp1, if_false_label, if_true_label);
434 break;
437 case BIT_AND_EXPR:
438 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
439 See if the former is preferred for jump tests and restore it
440 if so. */
441 if (integer_onep (TREE_OPERAND (exp, 1)))
443 tree exp0 = TREE_OPERAND (exp, 0);
444 rtx set_label, clr_label;
446 /* Strip narrowing integral type conversions. */
447 while (CONVERT_EXPR_P (exp0)
448 && TREE_OPERAND (exp0, 0) != error_mark_node
449 && TYPE_PRECISION (TREE_TYPE (exp0))
450 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
451 exp0 = TREE_OPERAND (exp0, 0);
453 /* "exp0 ^ 1" inverts the sense of the single bit test. */
454 if (TREE_CODE (exp0) == BIT_XOR_EXPR
455 && integer_onep (TREE_OPERAND (exp0, 1)))
457 exp0 = TREE_OPERAND (exp0, 0);
458 clr_label = if_true_label;
459 set_label = if_false_label;
461 else
463 clr_label = if_false_label;
464 set_label = if_true_label;
467 if (TREE_CODE (exp0) == RSHIFT_EXPR)
469 tree arg = TREE_OPERAND (exp0, 0);
470 tree shift = TREE_OPERAND (exp0, 1);
471 tree argtype = TREE_TYPE (arg);
472 if (TREE_CODE (shift) == INTEGER_CST
473 && compare_tree_int (shift, 0) >= 0
474 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
475 && prefer_and_bit_test (TYPE_MODE (argtype),
476 TREE_INT_CST_LOW (shift)))
478 HOST_WIDE_INT mask = (HOST_WIDE_INT) 1
479 << TREE_INT_CST_LOW (shift);
480 do_jump (build2 (BIT_AND_EXPR, argtype, arg,
481 build_int_cst_type (argtype, mask)),
482 clr_label, set_label);
483 break;
488 /* If we are AND'ing with a small constant, do this comparison in the
489 smallest type that fits. If the machine doesn't have comparisons
490 that small, it will be converted back to the wider comparison.
491 This helps if we are testing the sign bit of a narrower object.
492 combine can't do this for us because it can't know whether a
493 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
495 if (! SLOW_BYTE_ACCESS
496 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
497 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
498 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
499 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
500 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
501 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
502 && (optab_handler (cmp_optab, TYPE_MODE (type))->insn_code
503 != CODE_FOR_nothing))
505 do_jump (fold_convert (type, exp), if_false_label, if_true_label);
506 break;
509 if (TYPE_PRECISION (TREE_TYPE (exp)) > 1
510 || TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
511 goto normal;
513 /* Boolean comparisons can be compiled as TRUTH_AND_EXPR. */
515 case TRUTH_AND_EXPR:
516 /* High branch cost, expand as the bitwise AND of the conditions.
517 Do the same if the RHS has side effects, because we're effectively
518 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */
519 if (BRANCH_COST (optimize_insn_for_speed_p (),
520 false) >= 4
521 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
522 goto normal;
524 case TRUTH_ANDIF_EXPR:
525 if (if_false_label == NULL_RTX)
527 drop_through_label = gen_label_rtx ();
528 do_jump (TREE_OPERAND (exp, 0), drop_through_label, NULL_RTX);
529 do_jump (TREE_OPERAND (exp, 1), NULL_RTX, if_true_label);
531 else
533 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
534 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
536 break;
538 case BIT_IOR_EXPR:
539 case TRUTH_OR_EXPR:
540 /* High branch cost, expand as the bitwise OR of the conditions.
541 Do the same if the RHS has side effects, because we're effectively
542 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */
543 if (BRANCH_COST (optimize_insn_for_speed_p (), false)>= 4
544 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
545 goto normal;
547 case TRUTH_ORIF_EXPR:
548 if (if_true_label == NULL_RTX)
550 drop_through_label = gen_label_rtx ();
551 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, drop_through_label);
552 do_jump (TREE_OPERAND (exp, 1), if_false_label, NULL_RTX);
554 else
556 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
557 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
559 break;
561 /* Fall through and generate the normal code. */
562 default:
563 normal:
564 temp = expand_normal (exp);
565 do_pending_stack_adjust ();
566 /* The RTL optimizers prefer comparisons against pseudos. */
567 if (GET_CODE (temp) == SUBREG)
569 /* Compare promoted variables in their promoted mode. */
570 if (SUBREG_PROMOTED_VAR_P (temp)
571 && REG_P (XEXP (temp, 0)))
572 temp = XEXP (temp, 0);
573 else
574 temp = copy_to_reg (temp);
576 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
577 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
578 GET_MODE (temp), NULL_RTX,
579 if_false_label, if_true_label);
582 if (drop_through_label)
584 do_pending_stack_adjust ();
585 emit_label (drop_through_label);
589 /* Compare OP0 with OP1, word at a time, in mode MODE.
590 UNSIGNEDP says to do unsigned comparison.
591 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
593 static void
594 do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
595 rtx op1, rtx if_false_label, rtx if_true_label)
597 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
598 rtx drop_through_label = 0;
599 int i;
601 if (! if_true_label || ! if_false_label)
602 drop_through_label = gen_label_rtx ();
603 if (! if_true_label)
604 if_true_label = drop_through_label;
605 if (! if_false_label)
606 if_false_label = drop_through_label;
608 /* Compare a word at a time, high order first. */
609 for (i = 0; i < nwords; i++)
611 rtx op0_word, op1_word;
613 if (WORDS_BIG_ENDIAN)
615 op0_word = operand_subword_force (op0, i, mode);
616 op1_word = operand_subword_force (op1, i, mode);
618 else
620 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
621 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
624 /* All but high-order word must be compared as unsigned. */
625 do_compare_rtx_and_jump (op0_word, op1_word, GT,
626 (unsignedp || i > 0), word_mode, NULL_RTX,
627 NULL_RTX, if_true_label);
629 /* Consider lower words only if these are equal. */
630 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
631 NULL_RTX, NULL_RTX, if_false_label);
634 if (if_false_label)
635 emit_jump (if_false_label);
636 if (drop_through_label)
637 emit_label (drop_through_label);
640 /* Given a comparison expression EXP for values too wide to be compared
641 with one insn, test the comparison and jump to the appropriate label.
642 The code of EXP is ignored; we always test GT if SWAP is 0,
643 and LT if SWAP is 1. */
645 static void
646 do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label,
647 rtx if_true_label)
649 rtx op0 = expand_normal (TREE_OPERAND (exp, swap));
650 rtx op1 = expand_normal (TREE_OPERAND (exp, !swap));
651 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
652 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
654 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
655 if_true_label);
658 /* Jump according to whether OP0 is 0. We assume that OP0 has an integer
659 mode, MODE, that is too wide for the available compare insns. Either
660 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
661 to indicate drop through. */
663 static void
664 do_jump_by_parts_zero_rtx (enum machine_mode mode, rtx op0,
665 rtx if_false_label, rtx if_true_label)
667 int nwords = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
668 rtx part;
669 int i;
670 rtx drop_through_label = 0;
672 /* The fastest way of doing this comparison on almost any machine is to
673 "or" all the words and compare the result. If all have to be loaded
674 from memory and this is a very wide item, it's possible this may
675 be slower, but that's highly unlikely. */
677 part = gen_reg_rtx (word_mode);
678 emit_move_insn (part, operand_subword_force (op0, 0, mode));
679 for (i = 1; i < nwords && part != 0; i++)
680 part = expand_binop (word_mode, ior_optab, part,
681 operand_subword_force (op0, i, mode),
682 part, 1, OPTAB_WIDEN);
684 if (part != 0)
686 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
687 NULL_RTX, if_false_label, if_true_label);
689 return;
692 /* If we couldn't do the "or" simply, do this with a series of compares. */
693 if (! if_false_label)
694 drop_through_label = if_false_label = gen_label_rtx ();
696 for (i = 0; i < nwords; i++)
697 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
698 const0_rtx, EQ, 1, word_mode, NULL_RTX,
699 if_false_label, NULL_RTX);
701 if (if_true_label)
702 emit_jump (if_true_label);
704 if (drop_through_label)
705 emit_label (drop_through_label);
708 /* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
709 where MODE is an integer mode too wide to be compared with one insn.
710 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
711 to indicate drop through. */
713 static void
714 do_jump_by_parts_equality_rtx (enum machine_mode mode, rtx op0, rtx op1,
715 rtx if_false_label, rtx if_true_label)
717 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
718 rtx drop_through_label = 0;
719 int i;
721 if (op1 == const0_rtx)
723 do_jump_by_parts_zero_rtx (mode, op0, if_false_label, if_true_label);
724 return;
726 else if (op0 == const0_rtx)
728 do_jump_by_parts_zero_rtx (mode, op1, if_false_label, if_true_label);
729 return;
732 if (! if_false_label)
733 drop_through_label = if_false_label = gen_label_rtx ();
735 for (i = 0; i < nwords; i++)
736 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
737 operand_subword_force (op1, i, mode),
738 EQ, 0, word_mode, NULL_RTX,
739 if_false_label, NULL_RTX);
741 if (if_true_label)
742 emit_jump (if_true_label);
743 if (drop_through_label)
744 emit_label (drop_through_label);
747 /* Given an EQ_EXPR expression EXP for values too wide to be compared
748 with one insn, test the comparison and jump to the appropriate label. */
750 static void
751 do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label)
753 rtx op0 = expand_normal (TREE_OPERAND (exp, 0));
754 rtx op1 = expand_normal (TREE_OPERAND (exp, 1));
755 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
756 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
757 if_true_label);
760 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
761 MODE is the machine mode of the comparison, not of the result.
762 (including code to compute the values to be compared) and set CC0
763 according to the result. The decision as to signed or unsigned
764 comparison must be made by the caller.
766 We force a stack adjustment unless there are currently
767 things pushed on the stack that aren't yet used.
769 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
770 compared. */
773 compare_from_rtx (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
774 enum machine_mode mode, rtx size)
776 rtx tem;
778 /* If one operand is constant, make it the second one. Only do this
779 if the other operand is not constant as well. */
781 if (swap_commutative_operands_p (op0, op1))
783 tem = op0;
784 op0 = op1;
785 op1 = tem;
786 code = swap_condition (code);
789 do_pending_stack_adjust ();
791 code = unsignedp ? unsigned_condition (code) : code;
792 tem = simplify_relational_operation (code, VOIDmode, mode, op0, op1);
793 if (tem)
795 if (CONSTANT_P (tem))
796 return tem;
798 if (COMPARISON_P (tem))
800 code = GET_CODE (tem);
801 op0 = XEXP (tem, 0);
802 op1 = XEXP (tem, 1);
803 mode = GET_MODE (op0);
804 unsignedp = (code == GTU || code == LTU
805 || code == GEU || code == LEU);
809 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
811 #if HAVE_cc0
812 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
813 #else
814 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
815 #endif
818 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
819 The decision as to signed or unsigned comparison must be made by the caller.
821 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
822 compared. */
824 void
825 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
826 enum machine_mode mode, rtx size, rtx if_false_label,
827 rtx if_true_label)
829 rtx tem;
830 int dummy_true_label = 0;
832 /* Reverse the comparison if that is safe and we want to jump if it is
833 false. */
834 if (! if_true_label && ! FLOAT_MODE_P (mode))
836 if_true_label = if_false_label;
837 if_false_label = 0;
838 code = reverse_condition (code);
841 /* If one operand is constant, make it the second one. Only do this
842 if the other operand is not constant as well. */
844 if (swap_commutative_operands_p (op0, op1))
846 tem = op0;
847 op0 = op1;
848 op1 = tem;
849 code = swap_condition (code);
852 do_pending_stack_adjust ();
854 code = unsignedp ? unsigned_condition (code) : code;
855 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
856 op0, op1)))
858 if (CONSTANT_P (tem))
860 rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
861 ? if_false_label : if_true_label;
862 if (label)
863 emit_jump (label);
864 return;
867 code = GET_CODE (tem);
868 mode = GET_MODE (tem);
869 op0 = XEXP (tem, 0);
870 op1 = XEXP (tem, 1);
871 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
875 if (! if_true_label)
877 dummy_true_label = 1;
878 if_true_label = gen_label_rtx ();
881 if (GET_MODE_CLASS (mode) == MODE_INT
882 && ! can_compare_p (code, mode, ccp_jump))
884 switch (code)
886 case LTU:
887 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
888 if_false_label, if_true_label);
889 break;
891 case LEU:
892 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
893 if_true_label, if_false_label);
894 break;
896 case GTU:
897 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
898 if_false_label, if_true_label);
899 break;
901 case GEU:
902 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
903 if_true_label, if_false_label);
904 break;
906 case LT:
907 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
908 if_false_label, if_true_label);
909 break;
911 case LE:
912 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
913 if_true_label, if_false_label);
914 break;
916 case GT:
917 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
918 if_false_label, if_true_label);
919 break;
921 case GE:
922 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
923 if_true_label, if_false_label);
924 break;
926 case EQ:
927 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
928 if_true_label);
929 break;
931 case NE:
932 do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label,
933 if_false_label);
934 break;
936 default:
937 gcc_unreachable ();
940 else
941 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
942 if_true_label);
944 if (if_false_label)
945 emit_jump (if_false_label);
946 if (dummy_true_label)
947 emit_label (if_true_label);
950 /* Generate code for a comparison expression EXP (including code to compute
951 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
952 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
953 generated code will drop through.
954 SIGNED_CODE should be the rtx operation for this comparison for
955 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
957 We force a stack adjustment unless there are currently
958 things pushed on the stack that aren't yet used. */
960 static void
961 do_compare_and_jump (tree exp, enum rtx_code signed_code,
962 enum rtx_code unsigned_code, rtx if_false_label,
963 rtx if_true_label)
965 rtx op0, op1;
966 tree type;
967 enum machine_mode mode;
968 int unsignedp;
969 enum rtx_code code;
971 /* Don't crash if the comparison was erroneous. */
972 op0 = expand_normal (TREE_OPERAND (exp, 0));
973 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
974 return;
976 op1 = expand_normal (TREE_OPERAND (exp, 1));
977 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
978 return;
980 type = TREE_TYPE (TREE_OPERAND (exp, 0));
981 mode = TYPE_MODE (type);
982 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
983 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
984 || (GET_MODE_BITSIZE (mode)
985 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
986 1)))))))
988 /* op0 might have been replaced by promoted constant, in which
989 case the type of second argument should be used. */
990 type = TREE_TYPE (TREE_OPERAND (exp, 1));
991 mode = TYPE_MODE (type);
993 unsignedp = TYPE_UNSIGNED (type);
994 code = unsignedp ? unsigned_code : signed_code;
996 #ifdef HAVE_canonicalize_funcptr_for_compare
997 /* If function pointers need to be "canonicalized" before they can
998 be reliably compared, then canonicalize them.
999 Only do this if *both* sides of the comparison are function pointers.
1000 If one side isn't, we want a noncanonicalized comparison. See PR
1001 middle-end/17564. */
1002 if (HAVE_canonicalize_funcptr_for_compare
1003 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
1004 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
1005 == FUNCTION_TYPE
1006 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
1007 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
1008 == FUNCTION_TYPE)
1010 rtx new_op0 = gen_reg_rtx (mode);
1011 rtx new_op1 = gen_reg_rtx (mode);
1013 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
1014 op0 = new_op0;
1016 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
1017 op1 = new_op1;
1019 #endif
1021 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
1022 ((mode == BLKmode)
1023 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
1024 if_false_label, if_true_label);
1027 #include "gt-dojump.h"