gcc/ChangeLog
[official-gcc.git] / gcc / dojump.c
blob3f04eacabb7e9fd091eaabfdf4ce061942e8c3bc
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "insn-config.h"
29 #include "insn-attr.h"
30 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
31 #include "expr.h"
32 #include "optabs.h"
33 #include "langhooks.h"
34 #include "ggc.h"
35 #include "basic-block.h"
36 #include "tm_p.h"
38 static bool prefer_and_bit_test (enum machine_mode, int);
39 static void do_jump_by_parts_greater (tree, tree, int, rtx, rtx, int);
40 static void do_jump_by_parts_equality (tree, tree, rtx, rtx, int);
41 static void do_compare_and_jump (tree, tree, enum rtx_code, enum rtx_code, rtx,
42 rtx, int);
44 /* Invert probability if there is any. -1 stands for unknown. */
46 static inline int
47 inv (int prob)
49 return prob == -1 ? -1 : REG_BR_PROB_BASE - prob;
52 /* At the start of a function, record that we have no previously-pushed
53 arguments waiting to be popped. */
55 void
56 init_pending_stack_adjust (void)
58 pending_stack_adjust = 0;
61 /* Discard any pending stack adjustment. This avoid relying on the
62 RTL optimizers to remove useless adjustments when we know the
63 stack pointer value is dead. */
64 void
65 discard_pending_stack_adjust (void)
67 stack_pointer_delta -= pending_stack_adjust;
68 pending_stack_adjust = 0;
71 /* When exiting from function, if safe, clear out any pending stack adjust
72 so the adjustment won't get done.
74 Note, if the current function calls alloca, then it must have a
75 frame pointer regardless of the value of flag_omit_frame_pointer. */
77 void
78 clear_pending_stack_adjust (void)
80 if (optimize > 0
81 && (! flag_omit_frame_pointer || cfun->calls_alloca)
82 && EXIT_IGNORE_STACK)
83 discard_pending_stack_adjust ();
86 /* Pop any previously-pushed arguments that have not been popped yet. */
88 void
89 do_pending_stack_adjust (void)
91 if (inhibit_defer_pop == 0)
93 if (pending_stack_adjust != 0)
94 adjust_stack (GEN_INT (pending_stack_adjust));
95 pending_stack_adjust = 0;
99 /* Expand conditional expressions. */
101 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
102 LABEL is an rtx of code CODE_LABEL, in this function and all the
103 functions here. */
105 void
106 jumpifnot (tree exp, rtx label, int prob)
108 do_jump (exp, label, NULL_RTX, inv (prob));
111 void
112 jumpifnot_1 (enum tree_code code, tree op0, tree op1, rtx label, int prob)
114 do_jump_1 (code, op0, op1, label, NULL_RTX, inv (prob));
117 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
119 void
120 jumpif (tree exp, rtx label, int prob)
122 do_jump (exp, NULL_RTX, label, prob);
125 void
126 jumpif_1 (enum tree_code code, tree op0, tree op1, rtx label, int prob)
128 do_jump_1 (code, op0, op1, NULL_RTX, label, prob);
131 /* Used internally by prefer_and_bit_test. */
133 static GTY(()) rtx and_reg;
134 static GTY(()) rtx and_test;
135 static GTY(()) rtx shift_test;
137 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
138 where X is an arbitrary register of mode MODE. Return true if the former
139 is preferred. */
141 static bool
142 prefer_and_bit_test (enum machine_mode mode, int bitnum)
144 bool speed_p;
146 if (and_test == 0)
148 /* Set up rtxes for the two variations. Use NULL as a placeholder
149 for the BITNUM-based constants. */
150 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
151 and_test = gen_rtx_AND (mode, and_reg, NULL);
152 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
153 const1_rtx);
155 else
157 /* Change the mode of the previously-created rtxes. */
158 PUT_MODE (and_reg, mode);
159 PUT_MODE (and_test, mode);
160 PUT_MODE (shift_test, mode);
161 PUT_MODE (XEXP (shift_test, 0), mode);
164 /* Fill in the integers. */
165 XEXP (and_test, 1)
166 = immed_double_int_const (double_int_zero.set_bit (bitnum), mode);
167 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
169 speed_p = optimize_insn_for_speed_p ();
170 return (rtx_cost (and_test, IF_THEN_ELSE, 0, speed_p)
171 <= rtx_cost (shift_test, IF_THEN_ELSE, 0, speed_p));
174 /* Subroutine of do_jump, dealing with exploded comparisons of the type
175 OP0 CODE OP1 . IF_FALSE_LABEL and IF_TRUE_LABEL like in do_jump.
176 PROB is probability of jump to if_true_label, or -1 if unknown. */
178 void
179 do_jump_1 (enum tree_code code, tree op0, tree op1,
180 rtx if_false_label, rtx if_true_label, int prob)
182 enum machine_mode mode;
183 rtx drop_through_label = 0;
185 switch (code)
187 case EQ_EXPR:
189 tree inner_type = TREE_TYPE (op0);
191 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
192 != MODE_COMPLEX_FLOAT);
193 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
194 != MODE_COMPLEX_INT);
196 if (integer_zerop (op1))
197 do_jump (op0, if_true_label, if_false_label, inv (prob));
198 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
199 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
200 do_jump_by_parts_equality (op0, op1, if_false_label, if_true_label,
201 prob);
202 else
203 do_compare_and_jump (op0, op1, EQ, EQ, if_false_label, if_true_label,
204 prob);
205 break;
208 case NE_EXPR:
210 tree inner_type = TREE_TYPE (op0);
212 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
213 != MODE_COMPLEX_FLOAT);
214 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
215 != MODE_COMPLEX_INT);
217 if (integer_zerop (op1))
218 do_jump (op0, if_false_label, if_true_label, prob);
219 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
220 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
221 do_jump_by_parts_equality (op0, op1, if_true_label, if_false_label,
222 inv (prob));
223 else
224 do_compare_and_jump (op0, op1, NE, NE, if_false_label, if_true_label,
225 prob);
226 break;
229 case LT_EXPR:
230 mode = TYPE_MODE (TREE_TYPE (op0));
231 if (GET_MODE_CLASS (mode) == MODE_INT
232 && ! can_compare_p (LT, mode, ccp_jump))
233 do_jump_by_parts_greater (op0, op1, 1, if_false_label, if_true_label,
234 prob);
235 else
236 do_compare_and_jump (op0, op1, LT, LTU, if_false_label, if_true_label,
237 prob);
238 break;
240 case LE_EXPR:
241 mode = TYPE_MODE (TREE_TYPE (op0));
242 if (GET_MODE_CLASS (mode) == MODE_INT
243 && ! can_compare_p (LE, mode, ccp_jump))
244 do_jump_by_parts_greater (op0, op1, 0, if_true_label, if_false_label,
245 inv (prob));
246 else
247 do_compare_and_jump (op0, op1, LE, LEU, if_false_label, if_true_label,
248 prob);
249 break;
251 case GT_EXPR:
252 mode = TYPE_MODE (TREE_TYPE (op0));
253 if (GET_MODE_CLASS (mode) == MODE_INT
254 && ! can_compare_p (GT, mode, ccp_jump))
255 do_jump_by_parts_greater (op0, op1, 0, if_false_label, if_true_label,
256 prob);
257 else
258 do_compare_and_jump (op0, op1, GT, GTU, if_false_label, if_true_label,
259 prob);
260 break;
262 case GE_EXPR:
263 mode = TYPE_MODE (TREE_TYPE (op0));
264 if (GET_MODE_CLASS (mode) == MODE_INT
265 && ! can_compare_p (GE, mode, ccp_jump))
266 do_jump_by_parts_greater (op0, op1, 1, if_true_label, if_false_label,
267 inv (prob));
268 else
269 do_compare_and_jump (op0, op1, GE, GEU, if_false_label, if_true_label,
270 prob);
271 break;
273 case ORDERED_EXPR:
274 do_compare_and_jump (op0, op1, ORDERED, ORDERED,
275 if_false_label, if_true_label, prob);
276 break;
278 case UNORDERED_EXPR:
279 do_compare_and_jump (op0, op1, UNORDERED, UNORDERED,
280 if_false_label, if_true_label, prob);
281 break;
283 case UNLT_EXPR:
284 do_compare_and_jump (op0, op1, UNLT, UNLT, if_false_label, if_true_label,
285 prob);
286 break;
288 case UNLE_EXPR:
289 do_compare_and_jump (op0, op1, UNLE, UNLE, if_false_label, if_true_label,
290 prob);
291 break;
293 case UNGT_EXPR:
294 do_compare_and_jump (op0, op1, UNGT, UNGT, if_false_label, if_true_label,
295 prob);
296 break;
298 case UNGE_EXPR:
299 do_compare_and_jump (op0, op1, UNGE, UNGE, if_false_label, if_true_label,
300 prob);
301 break;
303 case UNEQ_EXPR:
304 do_compare_and_jump (op0, op1, UNEQ, UNEQ, if_false_label, if_true_label,
305 prob);
306 break;
308 case LTGT_EXPR:
309 do_compare_and_jump (op0, op1, LTGT, LTGT, if_false_label, if_true_label,
310 prob);
311 break;
313 case TRUTH_ANDIF_EXPR:
314 if (if_false_label == NULL_RTX)
316 drop_through_label = gen_label_rtx ();
317 do_jump (op0, drop_through_label, NULL_RTX, prob);
318 do_jump (op1, NULL_RTX, if_true_label, prob);
320 else
322 do_jump (op0, if_false_label, NULL_RTX, prob);
323 do_jump (op1, if_false_label, if_true_label, prob);
325 break;
327 case TRUTH_ORIF_EXPR:
328 if (if_true_label == NULL_RTX)
330 drop_through_label = gen_label_rtx ();
331 do_jump (op0, NULL_RTX, drop_through_label, prob);
332 do_jump (op1, if_false_label, NULL_RTX, prob);
334 else
336 do_jump (op0, NULL_RTX, if_true_label, prob);
337 do_jump (op1, if_false_label, if_true_label, prob);
339 break;
341 default:
342 gcc_unreachable ();
345 if (drop_through_label)
347 do_pending_stack_adjust ();
348 emit_label (drop_through_label);
352 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
353 the result is zero, or IF_TRUE_LABEL if the result is one.
354 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
355 meaning fall through in that case.
357 do_jump always does any pending stack adjust except when it does not
358 actually perform a jump. An example where there is no jump
359 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
361 PROB is probability of jump to if_true_label, or -1 if unknown. */
363 void
364 do_jump (tree exp, rtx if_false_label, rtx if_true_label, int prob)
366 enum tree_code code = TREE_CODE (exp);
367 rtx temp;
368 int i;
369 tree type;
370 enum machine_mode mode;
371 rtx drop_through_label = 0;
373 switch (code)
375 case ERROR_MARK:
376 break;
378 case INTEGER_CST:
379 temp = integer_zerop (exp) ? if_false_label : if_true_label;
380 if (temp)
381 emit_jump (temp);
382 break;
384 #if 0
385 /* This is not true with #pragma weak */
386 case ADDR_EXPR:
387 /* The address of something can never be zero. */
388 if (if_true_label)
389 emit_jump (if_true_label);
390 break;
391 #endif
393 case NOP_EXPR:
394 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
395 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
396 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
397 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
398 goto normal;
399 case CONVERT_EXPR:
400 /* If we are narrowing the operand, we have to do the compare in the
401 narrower mode. */
402 if ((TYPE_PRECISION (TREE_TYPE (exp))
403 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
404 goto normal;
405 case NON_LVALUE_EXPR:
406 case ABS_EXPR:
407 case NEGATE_EXPR:
408 case LROTATE_EXPR:
409 case RROTATE_EXPR:
410 /* These cannot change zero->nonzero or vice versa. */
411 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label, prob);
412 break;
414 case TRUTH_NOT_EXPR:
415 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label,
416 inv (prob));
417 break;
419 case COND_EXPR:
421 rtx label1 = gen_label_rtx ();
422 if (!if_true_label || !if_false_label)
424 drop_through_label = gen_label_rtx ();
425 if (!if_true_label)
426 if_true_label = drop_through_label;
427 if (!if_false_label)
428 if_false_label = drop_through_label;
431 do_pending_stack_adjust ();
432 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX, -1);
433 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob);
434 emit_label (label1);
435 do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label, prob);
436 break;
439 case COMPOUND_EXPR:
440 /* Lowered by gimplify.c. */
441 gcc_unreachable ();
443 case MINUS_EXPR:
444 /* Nonzero iff operands of minus differ. */
445 code = NE_EXPR;
447 /* FALLTHRU */
448 case EQ_EXPR:
449 case NE_EXPR:
450 case LT_EXPR:
451 case LE_EXPR:
452 case GT_EXPR:
453 case GE_EXPR:
454 case ORDERED_EXPR:
455 case UNORDERED_EXPR:
456 case UNLT_EXPR:
457 case UNLE_EXPR:
458 case UNGT_EXPR:
459 case UNGE_EXPR:
460 case UNEQ_EXPR:
461 case LTGT_EXPR:
462 case TRUTH_ANDIF_EXPR:
463 case TRUTH_ORIF_EXPR:
464 other_code:
465 do_jump_1 (code, TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
466 if_false_label, if_true_label, prob);
467 break;
469 case BIT_AND_EXPR:
470 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
471 See if the former is preferred for jump tests and restore it
472 if so. */
473 if (integer_onep (TREE_OPERAND (exp, 1)))
475 tree exp0 = TREE_OPERAND (exp, 0);
476 rtx set_label, clr_label;
477 int setclr_prob = prob;
479 /* Strip narrowing integral type conversions. */
480 while (CONVERT_EXPR_P (exp0)
481 && TREE_OPERAND (exp0, 0) != error_mark_node
482 && TYPE_PRECISION (TREE_TYPE (exp0))
483 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
484 exp0 = TREE_OPERAND (exp0, 0);
486 /* "exp0 ^ 1" inverts the sense of the single bit test. */
487 if (TREE_CODE (exp0) == BIT_XOR_EXPR
488 && integer_onep (TREE_OPERAND (exp0, 1)))
490 exp0 = TREE_OPERAND (exp0, 0);
491 clr_label = if_true_label;
492 set_label = if_false_label;
493 setclr_prob = inv (prob);
495 else
497 clr_label = if_false_label;
498 set_label = if_true_label;
501 if (TREE_CODE (exp0) == RSHIFT_EXPR)
503 tree arg = TREE_OPERAND (exp0, 0);
504 tree shift = TREE_OPERAND (exp0, 1);
505 tree argtype = TREE_TYPE (arg);
506 if (TREE_CODE (shift) == INTEGER_CST
507 && compare_tree_int (shift, 0) >= 0
508 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
509 && prefer_and_bit_test (TYPE_MODE (argtype),
510 TREE_INT_CST_LOW (shift)))
512 unsigned HOST_WIDE_INT mask
513 = (unsigned HOST_WIDE_INT) 1 << TREE_INT_CST_LOW (shift);
514 do_jump (build2 (BIT_AND_EXPR, argtype, arg,
515 build_int_cstu (argtype, mask)),
516 clr_label, set_label, setclr_prob);
517 break;
522 /* If we are AND'ing with a small constant, do this comparison in the
523 smallest type that fits. If the machine doesn't have comparisons
524 that small, it will be converted back to the wider comparison.
525 This helps if we are testing the sign bit of a narrower object.
526 combine can't do this for us because it can't know whether a
527 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
529 if (! SLOW_BYTE_ACCESS
530 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
531 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
532 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
533 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
534 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
535 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
536 && have_insn_for (COMPARE, TYPE_MODE (type)))
538 do_jump (fold_convert (type, exp), if_false_label, if_true_label,
539 prob);
540 break;
543 if (TYPE_PRECISION (TREE_TYPE (exp)) > 1
544 || TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
545 goto normal;
547 /* Boolean comparisons can be compiled as TRUTH_AND_EXPR. */
549 case TRUTH_AND_EXPR:
550 /* High branch cost, expand as the bitwise AND of the conditions.
551 Do the same if the RHS has side effects, because we're effectively
552 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */
553 if (BRANCH_COST (optimize_insn_for_speed_p (),
554 false) >= 4
555 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
556 goto normal;
557 code = TRUTH_ANDIF_EXPR;
558 goto other_code;
560 case BIT_IOR_EXPR:
561 case TRUTH_OR_EXPR:
562 /* High branch cost, expand as the bitwise OR of the conditions.
563 Do the same if the RHS has side effects, because we're effectively
564 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */
565 if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4
566 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
567 goto normal;
568 code = TRUTH_ORIF_EXPR;
569 goto other_code;
571 /* Fall through and generate the normal code. */
572 default:
573 normal:
574 temp = expand_normal (exp);
575 do_pending_stack_adjust ();
576 /* The RTL optimizers prefer comparisons against pseudos. */
577 if (GET_CODE (temp) == SUBREG)
579 /* Compare promoted variables in their promoted mode. */
580 if (SUBREG_PROMOTED_VAR_P (temp)
581 && REG_P (XEXP (temp, 0)))
582 temp = XEXP (temp, 0);
583 else
584 temp = copy_to_reg (temp);
586 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
587 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
588 GET_MODE (temp), NULL_RTX,
589 if_false_label, if_true_label, prob);
592 if (drop_through_label)
594 do_pending_stack_adjust ();
595 emit_label (drop_through_label);
599 /* Compare OP0 with OP1, word at a time, in mode MODE.
600 UNSIGNEDP says to do unsigned comparison.
601 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
603 static void
604 do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
605 rtx op1, rtx if_false_label, rtx if_true_label,
606 int prob)
608 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
609 rtx drop_through_label = 0;
610 bool drop_through_if_true = false, drop_through_if_false = false;
611 enum rtx_code code = GT;
612 int i;
614 if (! if_true_label || ! if_false_label)
615 drop_through_label = gen_label_rtx ();
616 if (! if_true_label)
618 if_true_label = drop_through_label;
619 drop_through_if_true = true;
621 if (! if_false_label)
623 if_false_label = drop_through_label;
624 drop_through_if_false = true;
627 /* Deal with the special case 0 > x: only one comparison is necessary and
628 we reverse it to avoid jumping to the drop-through label. */
629 if (op0 == const0_rtx && drop_through_if_true && !drop_through_if_false)
631 code = LE;
632 if_true_label = if_false_label;
633 if_false_label = drop_through_label;
634 drop_through_if_true = false;
635 drop_through_if_false = true;
638 /* Compare a word at a time, high order first. */
639 for (i = 0; i < nwords; i++)
641 rtx op0_word, op1_word;
643 if (WORDS_BIG_ENDIAN)
645 op0_word = operand_subword_force (op0, i, mode);
646 op1_word = operand_subword_force (op1, i, mode);
648 else
650 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
651 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
654 /* All but high-order word must be compared as unsigned. */
655 do_compare_rtx_and_jump (op0_word, op1_word, code, (unsignedp || i > 0),
656 word_mode, NULL_RTX, NULL_RTX, if_true_label,
657 prob);
659 /* Emit only one comparison for 0. Do not emit the last cond jump. */
660 if (op0 == const0_rtx || i == nwords - 1)
661 break;
663 /* Consider lower words only if these are equal. */
664 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
665 NULL_RTX, NULL_RTX, if_false_label, inv (prob));
668 if (!drop_through_if_false)
669 emit_jump (if_false_label);
670 if (drop_through_label)
671 emit_label (drop_through_label);
674 /* Given a comparison expression EXP for values too wide to be compared
675 with one insn, test the comparison and jump to the appropriate label.
676 The code of EXP is ignored; we always test GT if SWAP is 0,
677 and LT if SWAP is 1. */
679 static void
680 do_jump_by_parts_greater (tree treeop0, tree treeop1, int swap,
681 rtx if_false_label, rtx if_true_label, int prob)
683 rtx op0 = expand_normal (swap ? treeop1 : treeop0);
684 rtx op1 = expand_normal (swap ? treeop0 : treeop1);
685 enum machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
686 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (treeop0));
688 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
689 if_true_label, prob);
692 /* Jump according to whether OP0 is 0. We assume that OP0 has an integer
693 mode, MODE, that is too wide for the available compare insns. Either
694 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
695 to indicate drop through. */
697 static void
698 do_jump_by_parts_zero_rtx (enum machine_mode mode, rtx op0,
699 rtx if_false_label, rtx if_true_label, int prob)
701 int nwords = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
702 rtx part;
703 int i;
704 rtx drop_through_label = 0;
706 /* The fastest way of doing this comparison on almost any machine is to
707 "or" all the words and compare the result. If all have to be loaded
708 from memory and this is a very wide item, it's possible this may
709 be slower, but that's highly unlikely. */
711 part = gen_reg_rtx (word_mode);
712 emit_move_insn (part, operand_subword_force (op0, 0, mode));
713 for (i = 1; i < nwords && part != 0; i++)
714 part = expand_binop (word_mode, ior_optab, part,
715 operand_subword_force (op0, i, mode),
716 part, 1, OPTAB_WIDEN);
718 if (part != 0)
720 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
721 NULL_RTX, if_false_label, if_true_label, prob);
722 return;
725 /* If we couldn't do the "or" simply, do this with a series of compares. */
726 if (! if_false_label)
727 drop_through_label = if_false_label = gen_label_rtx ();
729 for (i = 0; i < nwords; i++)
730 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
731 const0_rtx, EQ, 1, word_mode, NULL_RTX,
732 if_false_label, NULL_RTX, prob);
734 if (if_true_label)
735 emit_jump (if_true_label);
737 if (drop_through_label)
738 emit_label (drop_through_label);
741 /* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
742 where MODE is an integer mode too wide to be compared with one insn.
743 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
744 to indicate drop through. */
746 static void
747 do_jump_by_parts_equality_rtx (enum machine_mode mode, rtx op0, rtx op1,
748 rtx if_false_label, rtx if_true_label, int prob)
750 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
751 rtx drop_through_label = 0;
752 int i;
754 if (op1 == const0_rtx)
756 do_jump_by_parts_zero_rtx (mode, op0, if_false_label, if_true_label,
757 prob);
758 return;
760 else if (op0 == const0_rtx)
762 do_jump_by_parts_zero_rtx (mode, op1, if_false_label, if_true_label,
763 prob);
764 return;
767 if (! if_false_label)
768 drop_through_label = if_false_label = gen_label_rtx ();
770 for (i = 0; i < nwords; i++)
771 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
772 operand_subword_force (op1, i, mode),
773 EQ, 0, word_mode, NULL_RTX,
774 if_false_label, NULL_RTX, prob);
776 if (if_true_label)
777 emit_jump (if_true_label);
778 if (drop_through_label)
779 emit_label (drop_through_label);
782 /* Given an EQ_EXPR expression EXP for values too wide to be compared
783 with one insn, test the comparison and jump to the appropriate label. */
785 static void
786 do_jump_by_parts_equality (tree treeop0, tree treeop1, rtx if_false_label,
787 rtx if_true_label, int prob)
789 rtx op0 = expand_normal (treeop0);
790 rtx op1 = expand_normal (treeop1);
791 enum machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
792 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
793 if_true_label, prob);
796 /* Split a comparison into two others, the second of which has the other
797 "orderedness". The first is always ORDERED or UNORDERED if MODE
798 does not honor NaNs (which means that it can be skipped in that case;
799 see do_compare_rtx_and_jump).
801 The two conditions are written in *CODE1 and *CODE2. Return true if
802 the conditions must be ANDed, false if they must be ORed. */
804 bool
805 split_comparison (enum rtx_code code, enum machine_mode mode,
806 enum rtx_code *code1, enum rtx_code *code2)
808 switch (code)
810 case LT:
811 *code1 = ORDERED;
812 *code2 = UNLT;
813 return true;
814 case LE:
815 *code1 = ORDERED;
816 *code2 = UNLE;
817 return true;
818 case GT:
819 *code1 = ORDERED;
820 *code2 = UNGT;
821 return true;
822 case GE:
823 *code1 = ORDERED;
824 *code2 = UNGE;
825 return true;
826 case EQ:
827 *code1 = ORDERED;
828 *code2 = UNEQ;
829 return true;
830 case NE:
831 *code1 = UNORDERED;
832 *code2 = LTGT;
833 return false;
834 case UNLT:
835 *code1 = UNORDERED;
836 *code2 = LT;
837 return false;
838 case UNLE:
839 *code1 = UNORDERED;
840 *code2 = LE;
841 return false;
842 case UNGT:
843 *code1 = UNORDERED;
844 *code2 = GT;
845 return false;
846 case UNGE:
847 *code1 = UNORDERED;
848 *code2 = GE;
849 return false;
850 case UNEQ:
851 *code1 = UNORDERED;
852 *code2 = EQ;
853 return false;
854 case LTGT:
855 /* Do not turn a trapping comparison into a non-trapping one. */
856 if (HONOR_SNANS (mode))
858 *code1 = LT;
859 *code2 = GT;
860 return false;
862 else
864 *code1 = ORDERED;
865 *code2 = NE;
866 return true;
868 default:
869 gcc_unreachable ();
874 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
875 The decision as to signed or unsigned comparison must be made by the caller.
877 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
878 compared. */
880 void
881 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
882 enum machine_mode mode, rtx size, rtx if_false_label,
883 rtx if_true_label, int prob)
885 rtx tem;
886 rtx dummy_label = NULL_RTX;
888 /* Reverse the comparison if that is safe and we want to jump if it is
889 false. Also convert to the reverse comparison if the target can
890 implement it. */
891 if ((! if_true_label
892 || ! can_compare_p (code, mode, ccp_jump))
893 && (! FLOAT_MODE_P (mode)
894 || code == ORDERED || code == UNORDERED
895 || (! HONOR_NANS (mode) && (code == LTGT || code == UNEQ))
896 || (! HONOR_SNANS (mode) && (code == EQ || code == NE))))
898 enum rtx_code rcode;
899 if (FLOAT_MODE_P (mode))
900 rcode = reverse_condition_maybe_unordered (code);
901 else
902 rcode = reverse_condition (code);
904 /* Canonicalize to UNORDERED for the libcall. */
905 if (can_compare_p (rcode, mode, ccp_jump)
906 || (code == ORDERED && ! can_compare_p (ORDERED, mode, ccp_jump)))
908 tem = if_true_label;
909 if_true_label = if_false_label;
910 if_false_label = tem;
911 code = rcode;
912 prob = inv (prob);
916 /* If one operand is constant, make it the second one. Only do this
917 if the other operand is not constant as well. */
919 if (swap_commutative_operands_p (op0, op1))
921 tem = op0;
922 op0 = op1;
923 op1 = tem;
924 code = swap_condition (code);
927 do_pending_stack_adjust ();
929 code = unsignedp ? unsigned_condition (code) : code;
930 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
931 op0, op1)))
933 if (CONSTANT_P (tem))
935 rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
936 ? if_false_label : if_true_label;
937 if (label)
938 emit_jump (label);
939 return;
942 code = GET_CODE (tem);
943 mode = GET_MODE (tem);
944 op0 = XEXP (tem, 0);
945 op1 = XEXP (tem, 1);
946 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
949 if (! if_true_label)
950 dummy_label = if_true_label = gen_label_rtx ();
952 if (GET_MODE_CLASS (mode) == MODE_INT
953 && ! can_compare_p (code, mode, ccp_jump))
955 switch (code)
957 case LTU:
958 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
959 if_false_label, if_true_label, prob);
960 break;
962 case LEU:
963 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
964 if_true_label, if_false_label,
965 inv (prob));
966 break;
968 case GTU:
969 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
970 if_false_label, if_true_label, prob);
971 break;
973 case GEU:
974 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
975 if_true_label, if_false_label,
976 inv (prob));
977 break;
979 case LT:
980 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
981 if_false_label, if_true_label, prob);
982 break;
984 case LE:
985 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
986 if_true_label, if_false_label,
987 inv (prob));
988 break;
990 case GT:
991 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
992 if_false_label, if_true_label, prob);
993 break;
995 case GE:
996 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
997 if_true_label, if_false_label,
998 inv (prob));
999 break;
1001 case EQ:
1002 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
1003 if_true_label, prob);
1004 break;
1006 case NE:
1007 do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label,
1008 if_false_label, inv (prob));
1009 break;
1011 default:
1012 gcc_unreachable ();
1015 else
1017 if (SCALAR_FLOAT_MODE_P (mode)
1018 && ! can_compare_p (code, mode, ccp_jump)
1019 && can_compare_p (swap_condition (code), mode, ccp_jump))
1021 rtx tmp;
1022 code = swap_condition (code);
1023 tmp = op0;
1024 op0 = op1;
1025 op1 = tmp;
1027 else if (SCALAR_FLOAT_MODE_P (mode)
1028 && ! can_compare_p (code, mode, ccp_jump)
1029 /* Never split ORDERED and UNORDERED.
1030 These must be implemented. */
1031 && (code != ORDERED && code != UNORDERED)
1032 /* Split a floating-point comparison if
1033 we can jump on other conditions... */
1034 && (have_insn_for (COMPARE, mode)
1035 /* ... or if there is no libcall for it. */
1036 || code_to_optab (code) == unknown_optab))
1038 enum rtx_code first_code;
1039 bool and_them = split_comparison (code, mode, &first_code, &code);
1041 /* If there are no NaNs, the first comparison should always fall
1042 through. */
1043 if (!HONOR_NANS (mode))
1044 gcc_assert (first_code == (and_them ? ORDERED : UNORDERED));
1046 else
1048 if (and_them)
1050 rtx dest_label;
1051 /* If we only jump if true, just bypass the second jump. */
1052 if (! if_false_label)
1054 if (! dummy_label)
1055 dummy_label = gen_label_rtx ();
1056 dest_label = dummy_label;
1058 else
1059 dest_label = if_false_label;
1060 do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
1061 size, dest_label, NULL_RTX, prob);
1063 else
1064 do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
1065 size, NULL_RTX, if_true_label, prob);
1069 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
1070 if_true_label, prob);
1073 if (if_false_label)
1074 emit_jump (if_false_label);
1075 if (dummy_label)
1076 emit_label (dummy_label);
1079 /* Generate code for a comparison expression EXP (including code to compute
1080 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
1081 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
1082 generated code will drop through.
1083 SIGNED_CODE should be the rtx operation for this comparison for
1084 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
1086 We force a stack adjustment unless there are currently
1087 things pushed on the stack that aren't yet used. */
1089 static void
1090 do_compare_and_jump (tree treeop0, tree treeop1, enum rtx_code signed_code,
1091 enum rtx_code unsigned_code, rtx if_false_label,
1092 rtx if_true_label, int prob)
1094 rtx op0, op1;
1095 tree type;
1096 enum machine_mode mode;
1097 int unsignedp;
1098 enum rtx_code code;
1100 /* Don't crash if the comparison was erroneous. */
1101 op0 = expand_normal (treeop0);
1102 if (TREE_CODE (treeop0) == ERROR_MARK)
1103 return;
1105 op1 = expand_normal (treeop1);
1106 if (TREE_CODE (treeop1) == ERROR_MARK)
1107 return;
1109 type = TREE_TYPE (treeop0);
1110 mode = TYPE_MODE (type);
1111 if (TREE_CODE (treeop0) == INTEGER_CST
1112 && (TREE_CODE (treeop1) != INTEGER_CST
1113 || (GET_MODE_BITSIZE (mode)
1114 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (treeop1))))))
1116 /* op0 might have been replaced by promoted constant, in which
1117 case the type of second argument should be used. */
1118 type = TREE_TYPE (treeop1);
1119 mode = TYPE_MODE (type);
1121 unsignedp = TYPE_UNSIGNED (type);
1122 code = unsignedp ? unsigned_code : signed_code;
1124 #ifdef HAVE_canonicalize_funcptr_for_compare
1125 /* If function pointers need to be "canonicalized" before they can
1126 be reliably compared, then canonicalize them.
1127 Only do this if *both* sides of the comparison are function pointers.
1128 If one side isn't, we want a noncanonicalized comparison. See PR
1129 middle-end/17564. */
1130 if (HAVE_canonicalize_funcptr_for_compare
1131 && TREE_CODE (TREE_TYPE (treeop0)) == POINTER_TYPE
1132 && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop0)))
1133 == FUNCTION_TYPE
1134 && TREE_CODE (TREE_TYPE (treeop1)) == POINTER_TYPE
1135 && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop1)))
1136 == FUNCTION_TYPE)
1138 rtx new_op0 = gen_reg_rtx (mode);
1139 rtx new_op1 = gen_reg_rtx (mode);
1141 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
1142 op0 = new_op0;
1144 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
1145 op1 = new_op1;
1147 #endif
1149 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
1150 ((mode == BLKmode)
1151 ? expr_size (treeop0) : NULL_RTX),
1152 if_false_label, if_true_label, prob);
1155 #include "gt-dojump.h"