* omp-low.c (lower_omp_target): Remove unreachable code & merge
[official-gcc.git] / gcc / dojump.c
blobbea9ac72ca3f0dace2d62363d16e427802fc9638
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "predict.h"
28 #include "tm_p.h"
29 #include "expmed.h"
30 #include "optabs.h"
31 #include "emit-rtl.h"
32 #include "alias.h"
33 #include "fold-const.h"
34 #include "stor-layout.h"
35 #include "flags.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "dojump.h"
39 #include "explow.h"
40 #include "calls.h"
41 #include "varasm.h"
42 #include "stmt.h"
43 #include "expr.h"
44 #include "langhooks.h"
46 static bool prefer_and_bit_test (machine_mode, int);
47 static void do_jump_by_parts_greater (tree, tree, int,
48 rtx_code_label *, rtx_code_label *, int);
49 static void do_jump_by_parts_equality (tree, tree, rtx_code_label *,
50 rtx_code_label *, int);
51 static void do_compare_and_jump (tree, tree, enum rtx_code, enum rtx_code,
52 rtx_code_label *, rtx_code_label *, int);
54 /* Invert probability if there is any. -1 stands for unknown. */
56 static inline int
57 inv (int prob)
59 return prob == -1 ? -1 : REG_BR_PROB_BASE - prob;
62 /* At the start of a function, record that we have no previously-pushed
63 arguments waiting to be popped. */
65 void
66 init_pending_stack_adjust (void)
68 pending_stack_adjust = 0;
71 /* Discard any pending stack adjustment. This avoid relying on the
72 RTL optimizers to remove useless adjustments when we know the
73 stack pointer value is dead. */
74 void
75 discard_pending_stack_adjust (void)
77 stack_pointer_delta -= pending_stack_adjust;
78 pending_stack_adjust = 0;
81 /* When exiting from function, if safe, clear out any pending stack adjust
82 so the adjustment won't get done.
84 Note, if the current function calls alloca, then it must have a
85 frame pointer regardless of the value of flag_omit_frame_pointer. */
87 void
88 clear_pending_stack_adjust (void)
90 if (optimize > 0
91 && (! flag_omit_frame_pointer || cfun->calls_alloca)
92 && EXIT_IGNORE_STACK)
93 discard_pending_stack_adjust ();
96 /* Pop any previously-pushed arguments that have not been popped yet. */
98 void
99 do_pending_stack_adjust (void)
101 if (inhibit_defer_pop == 0)
103 if (pending_stack_adjust != 0)
104 adjust_stack (GEN_INT (pending_stack_adjust));
105 pending_stack_adjust = 0;
109 /* Remember pending_stack_adjust/stack_pointer_delta.
110 To be used around code that may call do_pending_stack_adjust (),
111 but the generated code could be discarded e.g. using delete_insns_since. */
113 void
114 save_pending_stack_adjust (saved_pending_stack_adjust *save)
116 save->x_pending_stack_adjust = pending_stack_adjust;
117 save->x_stack_pointer_delta = stack_pointer_delta;
120 /* Restore the saved pending_stack_adjust/stack_pointer_delta. */
122 void
123 restore_pending_stack_adjust (saved_pending_stack_adjust *save)
125 if (inhibit_defer_pop == 0)
127 pending_stack_adjust = save->x_pending_stack_adjust;
128 stack_pointer_delta = save->x_stack_pointer_delta;
132 /* Expand conditional expressions. */
134 /* Generate code to evaluate EXP and jump to LABEL if the value is zero. */
136 void
137 jumpifnot (tree exp, rtx_code_label *label, int prob)
139 do_jump (exp, label, NULL, inv (prob));
142 void
143 jumpifnot_1 (enum tree_code code, tree op0, tree op1, rtx_code_label *label,
144 int prob)
146 do_jump_1 (code, op0, op1, label, NULL, inv (prob));
149 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
151 void
152 jumpif (tree exp, rtx_code_label *label, int prob)
154 do_jump (exp, NULL, label, prob);
157 void
158 jumpif_1 (enum tree_code code, tree op0, tree op1,
159 rtx_code_label *label, int prob)
161 do_jump_1 (code, op0, op1, NULL, label, prob);
164 /* Used internally by prefer_and_bit_test. */
166 static GTY(()) rtx and_reg;
167 static GTY(()) rtx and_test;
168 static GTY(()) rtx shift_test;
170 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
171 where X is an arbitrary register of mode MODE. Return true if the former
172 is preferred. */
174 static bool
175 prefer_and_bit_test (machine_mode mode, int bitnum)
177 bool speed_p;
178 wide_int mask = wi::set_bit_in_zero (bitnum, GET_MODE_PRECISION (mode));
180 if (and_test == 0)
182 /* Set up rtxes for the two variations. Use NULL as a placeholder
183 for the BITNUM-based constants. */
184 and_reg = gen_rtx_REG (mode, LAST_VIRTUAL_REGISTER + 1);
185 and_test = gen_rtx_AND (mode, and_reg, NULL);
186 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
187 const1_rtx);
189 else
191 /* Change the mode of the previously-created rtxes. */
192 PUT_MODE (and_reg, mode);
193 PUT_MODE (and_test, mode);
194 PUT_MODE (shift_test, mode);
195 PUT_MODE (XEXP (shift_test, 0), mode);
198 /* Fill in the integers. */
199 XEXP (and_test, 1) = immed_wide_int_const (mask, mode);
200 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
202 speed_p = optimize_insn_for_speed_p ();
203 return (rtx_cost (and_test, mode, IF_THEN_ELSE, 0, speed_p)
204 <= rtx_cost (shift_test, mode, IF_THEN_ELSE, 0, speed_p));
207 /* Subroutine of do_jump, dealing with exploded comparisons of the type
208 OP0 CODE OP1 . IF_FALSE_LABEL and IF_TRUE_LABEL like in do_jump.
209 PROB is probability of jump to if_true_label, or -1 if unknown. */
211 void
212 do_jump_1 (enum tree_code code, tree op0, tree op1,
213 rtx_code_label *if_false_label, rtx_code_label *if_true_label,
214 int prob)
216 machine_mode mode;
217 rtx_code_label *drop_through_label = 0;
219 switch (code)
221 case EQ_EXPR:
223 tree inner_type = TREE_TYPE (op0);
225 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
226 != MODE_COMPLEX_FLOAT);
227 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
228 != MODE_COMPLEX_INT);
230 if (integer_zerop (op1))
231 do_jump (op0, if_true_label, if_false_label, inv (prob));
232 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
233 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
234 do_jump_by_parts_equality (op0, op1, if_false_label, if_true_label,
235 prob);
236 else
237 do_compare_and_jump (op0, op1, EQ, EQ, if_false_label, if_true_label,
238 prob);
239 break;
242 case NE_EXPR:
244 tree inner_type = TREE_TYPE (op0);
246 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
247 != MODE_COMPLEX_FLOAT);
248 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
249 != MODE_COMPLEX_INT);
251 if (integer_zerop (op1))
252 do_jump (op0, if_false_label, if_true_label, prob);
253 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
254 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
255 do_jump_by_parts_equality (op0, op1, if_true_label, if_false_label,
256 inv (prob));
257 else
258 do_compare_and_jump (op0, op1, NE, NE, if_false_label, if_true_label,
259 prob);
260 break;
263 case LT_EXPR:
264 mode = TYPE_MODE (TREE_TYPE (op0));
265 if (GET_MODE_CLASS (mode) == MODE_INT
266 && ! can_compare_p (LT, mode, ccp_jump))
267 do_jump_by_parts_greater (op0, op1, 1, if_false_label, if_true_label,
268 prob);
269 else
270 do_compare_and_jump (op0, op1, LT, LTU, if_false_label, if_true_label,
271 prob);
272 break;
274 case LE_EXPR:
275 mode = TYPE_MODE (TREE_TYPE (op0));
276 if (GET_MODE_CLASS (mode) == MODE_INT
277 && ! can_compare_p (LE, mode, ccp_jump))
278 do_jump_by_parts_greater (op0, op1, 0, if_true_label, if_false_label,
279 inv (prob));
280 else
281 do_compare_and_jump (op0, op1, LE, LEU, if_false_label, if_true_label,
282 prob);
283 break;
285 case GT_EXPR:
286 mode = TYPE_MODE (TREE_TYPE (op0));
287 if (GET_MODE_CLASS (mode) == MODE_INT
288 && ! can_compare_p (GT, mode, ccp_jump))
289 do_jump_by_parts_greater (op0, op1, 0, if_false_label, if_true_label,
290 prob);
291 else
292 do_compare_and_jump (op0, op1, GT, GTU, if_false_label, if_true_label,
293 prob);
294 break;
296 case GE_EXPR:
297 mode = TYPE_MODE (TREE_TYPE (op0));
298 if (GET_MODE_CLASS (mode) == MODE_INT
299 && ! can_compare_p (GE, mode, ccp_jump))
300 do_jump_by_parts_greater (op0, op1, 1, if_true_label, if_false_label,
301 inv (prob));
302 else
303 do_compare_and_jump (op0, op1, GE, GEU, if_false_label, if_true_label,
304 prob);
305 break;
307 case ORDERED_EXPR:
308 do_compare_and_jump (op0, op1, ORDERED, ORDERED,
309 if_false_label, if_true_label, prob);
310 break;
312 case UNORDERED_EXPR:
313 do_compare_and_jump (op0, op1, UNORDERED, UNORDERED,
314 if_false_label, if_true_label, prob);
315 break;
317 case UNLT_EXPR:
318 do_compare_and_jump (op0, op1, UNLT, UNLT, if_false_label, if_true_label,
319 prob);
320 break;
322 case UNLE_EXPR:
323 do_compare_and_jump (op0, op1, UNLE, UNLE, if_false_label, if_true_label,
324 prob);
325 break;
327 case UNGT_EXPR:
328 do_compare_and_jump (op0, op1, UNGT, UNGT, if_false_label, if_true_label,
329 prob);
330 break;
332 case UNGE_EXPR:
333 do_compare_and_jump (op0, op1, UNGE, UNGE, if_false_label, if_true_label,
334 prob);
335 break;
337 case UNEQ_EXPR:
338 do_compare_and_jump (op0, op1, UNEQ, UNEQ, if_false_label, if_true_label,
339 prob);
340 break;
342 case LTGT_EXPR:
343 do_compare_and_jump (op0, op1, LTGT, LTGT, if_false_label, if_true_label,
344 prob);
345 break;
347 case TRUTH_ANDIF_EXPR:
349 /* Spread the probability that the expression is false evenly between
350 the two conditions. So the first condition is false half the total
351 probability of being false. The second condition is false the other
352 half of the total probability of being false, so its jump has a false
353 probability of half the total, relative to the probability we
354 reached it (i.e. the first condition was true). */
355 int op0_prob = -1;
356 int op1_prob = -1;
357 if (prob != -1)
359 int false_prob = inv (prob);
360 int op0_false_prob = false_prob / 2;
361 int op1_false_prob = GCOV_COMPUTE_SCALE ((false_prob / 2),
362 inv (op0_false_prob));
363 /* Get the probability that each jump below is true. */
364 op0_prob = inv (op0_false_prob);
365 op1_prob = inv (op1_false_prob);
367 if (if_false_label == NULL)
369 drop_through_label = gen_label_rtx ();
370 do_jump (op0, drop_through_label, NULL, op0_prob);
371 do_jump (op1, NULL, if_true_label, op1_prob);
373 else
375 do_jump (op0, if_false_label, NULL, op0_prob);
376 do_jump (op1, if_false_label, if_true_label, op1_prob);
378 break;
381 case TRUTH_ORIF_EXPR:
383 /* Spread the probability evenly between the two conditions. So
384 the first condition has half the total probability of being true.
385 The second condition has the other half of the total probability,
386 so its jump has a probability of half the total, relative to
387 the probability we reached it (i.e. the first condition was false). */
388 int op0_prob = -1;
389 int op1_prob = -1;
390 if (prob != -1)
392 op0_prob = prob / 2;
393 op1_prob = GCOV_COMPUTE_SCALE ((prob / 2), inv (op0_prob));
395 if (if_true_label == NULL)
397 drop_through_label = gen_label_rtx ();
398 do_jump (op0, NULL, drop_through_label, op0_prob);
399 do_jump (op1, if_false_label, NULL, op1_prob);
401 else
403 do_jump (op0, NULL, if_true_label, op0_prob);
404 do_jump (op1, if_false_label, if_true_label, op1_prob);
406 break;
409 default:
410 gcc_unreachable ();
413 if (drop_through_label)
415 do_pending_stack_adjust ();
416 emit_label (drop_through_label);
420 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
421 the result is zero, or IF_TRUE_LABEL if the result is one.
422 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
423 meaning fall through in that case.
425 do_jump always does any pending stack adjust except when it does not
426 actually perform a jump. An example where there is no jump
427 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
429 PROB is probability of jump to if_true_label, or -1 if unknown. */
431 void
432 do_jump (tree exp, rtx_code_label *if_false_label,
433 rtx_code_label *if_true_label, int prob)
435 enum tree_code code = TREE_CODE (exp);
436 rtx temp;
437 int i;
438 tree type;
439 machine_mode mode;
440 rtx_code_label *drop_through_label = NULL;
442 switch (code)
444 case ERROR_MARK:
445 break;
447 case INTEGER_CST:
449 rtx_code_label *lab = integer_zerop (exp) ? if_false_label
450 : if_true_label;
451 if (lab)
452 emit_jump (lab);
453 break;
456 #if 0
457 /* This is not true with #pragma weak */
458 case ADDR_EXPR:
459 /* The address of something can never be zero. */
460 if (if_true_label)
461 emit_jump (if_true_label);
462 break;
463 #endif
465 case NOP_EXPR:
466 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
467 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
468 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
469 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
470 goto normal;
471 case CONVERT_EXPR:
472 /* If we are narrowing the operand, we have to do the compare in the
473 narrower mode. */
474 if ((TYPE_PRECISION (TREE_TYPE (exp))
475 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
476 goto normal;
477 case NON_LVALUE_EXPR:
478 case ABS_EXPR:
479 case NEGATE_EXPR:
480 case LROTATE_EXPR:
481 case RROTATE_EXPR:
482 /* These cannot change zero->nonzero or vice versa. */
483 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label, prob);
484 break;
486 case TRUTH_NOT_EXPR:
487 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label,
488 inv (prob));
489 break;
491 case COND_EXPR:
493 rtx_code_label *label1 = gen_label_rtx ();
494 if (!if_true_label || !if_false_label)
496 drop_through_label = gen_label_rtx ();
497 if (!if_true_label)
498 if_true_label = drop_through_label;
499 if (!if_false_label)
500 if_false_label = drop_through_label;
503 do_pending_stack_adjust ();
504 do_jump (TREE_OPERAND (exp, 0), label1, NULL, -1);
505 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob);
506 emit_label (label1);
507 do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label, prob);
508 break;
511 case COMPOUND_EXPR:
512 /* Lowered by gimplify.c. */
513 gcc_unreachable ();
515 case MINUS_EXPR:
516 /* Nonzero iff operands of minus differ. */
517 code = NE_EXPR;
519 /* FALLTHRU */
520 case EQ_EXPR:
521 case NE_EXPR:
522 case LT_EXPR:
523 case LE_EXPR:
524 case GT_EXPR:
525 case GE_EXPR:
526 case ORDERED_EXPR:
527 case UNORDERED_EXPR:
528 case UNLT_EXPR:
529 case UNLE_EXPR:
530 case UNGT_EXPR:
531 case UNGE_EXPR:
532 case UNEQ_EXPR:
533 case LTGT_EXPR:
534 case TRUTH_ANDIF_EXPR:
535 case TRUTH_ORIF_EXPR:
536 other_code:
537 do_jump_1 (code, TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
538 if_false_label, if_true_label, prob);
539 break;
541 case BIT_AND_EXPR:
542 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
543 See if the former is preferred for jump tests and restore it
544 if so. */
545 if (integer_onep (TREE_OPERAND (exp, 1)))
547 tree exp0 = TREE_OPERAND (exp, 0);
548 rtx_code_label *set_label, *clr_label;
549 int setclr_prob = prob;
551 /* Strip narrowing integral type conversions. */
552 while (CONVERT_EXPR_P (exp0)
553 && TREE_OPERAND (exp0, 0) != error_mark_node
554 && TYPE_PRECISION (TREE_TYPE (exp0))
555 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
556 exp0 = TREE_OPERAND (exp0, 0);
558 /* "exp0 ^ 1" inverts the sense of the single bit test. */
559 if (TREE_CODE (exp0) == BIT_XOR_EXPR
560 && integer_onep (TREE_OPERAND (exp0, 1)))
562 exp0 = TREE_OPERAND (exp0, 0);
563 clr_label = if_true_label;
564 set_label = if_false_label;
565 setclr_prob = inv (prob);
567 else
569 clr_label = if_false_label;
570 set_label = if_true_label;
573 if (TREE_CODE (exp0) == RSHIFT_EXPR)
575 tree arg = TREE_OPERAND (exp0, 0);
576 tree shift = TREE_OPERAND (exp0, 1);
577 tree argtype = TREE_TYPE (arg);
578 if (TREE_CODE (shift) == INTEGER_CST
579 && compare_tree_int (shift, 0) >= 0
580 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
581 && prefer_and_bit_test (TYPE_MODE (argtype),
582 TREE_INT_CST_LOW (shift)))
584 unsigned HOST_WIDE_INT mask
585 = (unsigned HOST_WIDE_INT) 1 << TREE_INT_CST_LOW (shift);
586 do_jump (build2 (BIT_AND_EXPR, argtype, arg,
587 build_int_cstu (argtype, mask)),
588 clr_label, set_label, setclr_prob);
589 break;
594 /* If we are AND'ing with a small constant, do this comparison in the
595 smallest type that fits. If the machine doesn't have comparisons
596 that small, it will be converted back to the wider comparison.
597 This helps if we are testing the sign bit of a narrower object.
598 combine can't do this for us because it can't know whether a
599 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
601 if (! SLOW_BYTE_ACCESS
602 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
603 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
604 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
605 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
606 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
607 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
608 && have_insn_for (COMPARE, TYPE_MODE (type)))
610 do_jump (fold_convert (type, exp), if_false_label, if_true_label,
611 prob);
612 break;
615 if (TYPE_PRECISION (TREE_TYPE (exp)) > 1
616 || TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
617 goto normal;
619 /* Boolean comparisons can be compiled as TRUTH_AND_EXPR. */
621 case TRUTH_AND_EXPR:
622 /* High branch cost, expand as the bitwise AND of the conditions.
623 Do the same if the RHS has side effects, because we're effectively
624 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */
625 if (BRANCH_COST (optimize_insn_for_speed_p (),
626 false) >= 4
627 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
628 goto normal;
629 code = TRUTH_ANDIF_EXPR;
630 goto other_code;
632 case BIT_IOR_EXPR:
633 case TRUTH_OR_EXPR:
634 /* High branch cost, expand as the bitwise OR of the conditions.
635 Do the same if the RHS has side effects, because we're effectively
636 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */
637 if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4
638 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
639 goto normal;
640 code = TRUTH_ORIF_EXPR;
641 goto other_code;
643 /* Fall through and generate the normal code. */
644 default:
645 normal:
646 temp = expand_normal (exp);
647 do_pending_stack_adjust ();
648 /* The RTL optimizers prefer comparisons against pseudos. */
649 if (GET_CODE (temp) == SUBREG)
651 /* Compare promoted variables in their promoted mode. */
652 if (SUBREG_PROMOTED_VAR_P (temp)
653 && REG_P (XEXP (temp, 0)))
654 temp = XEXP (temp, 0);
655 else
656 temp = copy_to_reg (temp);
658 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
659 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
660 GET_MODE (temp), NULL_RTX,
661 if_false_label, if_true_label, prob);
664 if (drop_through_label)
666 do_pending_stack_adjust ();
667 emit_label (drop_through_label);
671 /* Compare OP0 with OP1, word at a time, in mode MODE.
672 UNSIGNEDP says to do unsigned comparison.
673 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
675 static void
676 do_jump_by_parts_greater_rtx (machine_mode mode, int unsignedp, rtx op0,
677 rtx op1, rtx_code_label *if_false_label,
678 rtx_code_label *if_true_label,
679 int prob)
681 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
682 rtx_code_label *drop_through_label = 0;
683 bool drop_through_if_true = false, drop_through_if_false = false;
684 enum rtx_code code = GT;
685 int i;
687 if (! if_true_label || ! if_false_label)
688 drop_through_label = gen_label_rtx ();
689 if (! if_true_label)
691 if_true_label = drop_through_label;
692 drop_through_if_true = true;
694 if (! if_false_label)
696 if_false_label = drop_through_label;
697 drop_through_if_false = true;
700 /* Deal with the special case 0 > x: only one comparison is necessary and
701 we reverse it to avoid jumping to the drop-through label. */
702 if (op0 == const0_rtx && drop_through_if_true && !drop_through_if_false)
704 code = LE;
705 if_true_label = if_false_label;
706 if_false_label = drop_through_label;
707 drop_through_if_true = false;
708 drop_through_if_false = true;
711 /* Compare a word at a time, high order first. */
712 for (i = 0; i < nwords; i++)
714 rtx op0_word, op1_word;
716 if (WORDS_BIG_ENDIAN)
718 op0_word = operand_subword_force (op0, i, mode);
719 op1_word = operand_subword_force (op1, i, mode);
721 else
723 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
724 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
727 /* All but high-order word must be compared as unsigned. */
728 do_compare_rtx_and_jump (op0_word, op1_word, code, (unsignedp || i > 0),
729 word_mode, NULL_RTX, NULL, if_true_label,
730 prob);
732 /* Emit only one comparison for 0. Do not emit the last cond jump. */
733 if (op0 == const0_rtx || i == nwords - 1)
734 break;
736 /* Consider lower words only if these are equal. */
737 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
738 NULL_RTX, NULL, if_false_label, inv (prob));
741 if (!drop_through_if_false)
742 emit_jump (if_false_label);
743 if (drop_through_label)
744 emit_label (drop_through_label);
747 /* Given a comparison expression EXP for values too wide to be compared
748 with one insn, test the comparison and jump to the appropriate label.
749 The code of EXP is ignored; we always test GT if SWAP is 0,
750 and LT if SWAP is 1. */
752 static void
753 do_jump_by_parts_greater (tree treeop0, tree treeop1, int swap,
754 rtx_code_label *if_false_label,
755 rtx_code_label *if_true_label, int prob)
757 rtx op0 = expand_normal (swap ? treeop1 : treeop0);
758 rtx op1 = expand_normal (swap ? treeop0 : treeop1);
759 machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
760 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (treeop0));
762 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
763 if_true_label, prob);
766 /* Jump according to whether OP0 is 0. We assume that OP0 has an integer
767 mode, MODE, that is too wide for the available compare insns. Either
768 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL
769 to indicate drop through. */
771 static void
772 do_jump_by_parts_zero_rtx (machine_mode mode, rtx op0,
773 rtx_code_label *if_false_label,
774 rtx_code_label *if_true_label, int prob)
776 int nwords = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
777 rtx part;
778 int i;
779 rtx_code_label *drop_through_label = NULL;
781 /* The fastest way of doing this comparison on almost any machine is to
782 "or" all the words and compare the result. If all have to be loaded
783 from memory and this is a very wide item, it's possible this may
784 be slower, but that's highly unlikely. */
786 part = gen_reg_rtx (word_mode);
787 emit_move_insn (part, operand_subword_force (op0, 0, mode));
788 for (i = 1; i < nwords && part != 0; i++)
789 part = expand_binop (word_mode, ior_optab, part,
790 operand_subword_force (op0, i, mode),
791 part, 1, OPTAB_WIDEN);
793 if (part != 0)
795 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
796 NULL_RTX, if_false_label, if_true_label, prob);
797 return;
800 /* If we couldn't do the "or" simply, do this with a series of compares. */
801 if (! if_false_label)
802 if_false_label = drop_through_label = gen_label_rtx ();
804 for (i = 0; i < nwords; i++)
805 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
806 const0_rtx, EQ, 1, word_mode, NULL_RTX,
807 if_false_label, NULL, prob);
809 if (if_true_label)
810 emit_jump (if_true_label);
812 if (drop_through_label)
813 emit_label (drop_through_label);
816 /* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
817 where MODE is an integer mode too wide to be compared with one insn.
818 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
819 to indicate drop through. */
821 static void
822 do_jump_by_parts_equality_rtx (machine_mode mode, rtx op0, rtx op1,
823 rtx_code_label *if_false_label,
824 rtx_code_label *if_true_label, int prob)
826 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
827 rtx_code_label *drop_through_label = NULL;
828 int i;
830 if (op1 == const0_rtx)
832 do_jump_by_parts_zero_rtx (mode, op0, if_false_label, if_true_label,
833 prob);
834 return;
836 else if (op0 == const0_rtx)
838 do_jump_by_parts_zero_rtx (mode, op1, if_false_label, if_true_label,
839 prob);
840 return;
843 if (! if_false_label)
844 drop_through_label = if_false_label = gen_label_rtx ();
846 for (i = 0; i < nwords; i++)
847 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
848 operand_subword_force (op1, i, mode),
849 EQ, 0, word_mode, NULL_RTX,
850 if_false_label, NULL, prob);
852 if (if_true_label)
853 emit_jump (if_true_label);
854 if (drop_through_label)
855 emit_label (drop_through_label);
858 /* Given an EQ_EXPR expression EXP for values too wide to be compared
859 with one insn, test the comparison and jump to the appropriate label. */
861 static void
862 do_jump_by_parts_equality (tree treeop0, tree treeop1,
863 rtx_code_label *if_false_label,
864 rtx_code_label *if_true_label, int prob)
866 rtx op0 = expand_normal (treeop0);
867 rtx op1 = expand_normal (treeop1);
868 machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
869 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
870 if_true_label, prob);
873 /* Split a comparison into two others, the second of which has the other
874 "orderedness". The first is always ORDERED or UNORDERED if MODE
875 does not honor NaNs (which means that it can be skipped in that case;
876 see do_compare_rtx_and_jump).
878 The two conditions are written in *CODE1 and *CODE2. Return true if
879 the conditions must be ANDed, false if they must be ORed. */
881 bool
882 split_comparison (enum rtx_code code, machine_mode mode,
883 enum rtx_code *code1, enum rtx_code *code2)
885 switch (code)
887 case LT:
888 *code1 = ORDERED;
889 *code2 = UNLT;
890 return true;
891 case LE:
892 *code1 = ORDERED;
893 *code2 = UNLE;
894 return true;
895 case GT:
896 *code1 = ORDERED;
897 *code2 = UNGT;
898 return true;
899 case GE:
900 *code1 = ORDERED;
901 *code2 = UNGE;
902 return true;
903 case EQ:
904 *code1 = ORDERED;
905 *code2 = UNEQ;
906 return true;
907 case NE:
908 *code1 = UNORDERED;
909 *code2 = LTGT;
910 return false;
911 case UNLT:
912 *code1 = UNORDERED;
913 *code2 = LT;
914 return false;
915 case UNLE:
916 *code1 = UNORDERED;
917 *code2 = LE;
918 return false;
919 case UNGT:
920 *code1 = UNORDERED;
921 *code2 = GT;
922 return false;
923 case UNGE:
924 *code1 = UNORDERED;
925 *code2 = GE;
926 return false;
927 case UNEQ:
928 *code1 = UNORDERED;
929 *code2 = EQ;
930 return false;
931 case LTGT:
932 /* Do not turn a trapping comparison into a non-trapping one. */
933 if (HONOR_SNANS (mode))
935 *code1 = LT;
936 *code2 = GT;
937 return false;
939 else
941 *code1 = ORDERED;
942 *code2 = NE;
943 return true;
945 default:
946 gcc_unreachable ();
951 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
952 The decision as to signed or unsigned comparison must be made by the caller.
954 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
955 compared. */
957 void
958 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
959 machine_mode mode, rtx size,
960 rtx_code_label *if_false_label,
961 rtx_code_label *if_true_label, int prob)
963 rtx tem;
964 rtx_code_label *dummy_label = NULL;
966 /* Reverse the comparison if that is safe and we want to jump if it is
967 false. Also convert to the reverse comparison if the target can
968 implement it. */
969 if ((! if_true_label
970 || ! can_compare_p (code, mode, ccp_jump))
971 && (! FLOAT_MODE_P (mode)
972 || code == ORDERED || code == UNORDERED
973 || (! HONOR_NANS (mode) && (code == LTGT || code == UNEQ))
974 || (! HONOR_SNANS (mode) && (code == EQ || code == NE))))
976 enum rtx_code rcode;
977 if (FLOAT_MODE_P (mode))
978 rcode = reverse_condition_maybe_unordered (code);
979 else
980 rcode = reverse_condition (code);
982 /* Canonicalize to UNORDERED for the libcall. */
983 if (can_compare_p (rcode, mode, ccp_jump)
984 || (code == ORDERED && ! can_compare_p (ORDERED, mode, ccp_jump)))
986 std::swap (if_true_label, if_false_label);
987 code = rcode;
988 prob = inv (prob);
992 /* If one operand is constant, make it the second one. Only do this
993 if the other operand is not constant as well. */
995 if (swap_commutative_operands_p (op0, op1))
997 std::swap (op0, op1);
998 code = swap_condition (code);
1001 do_pending_stack_adjust ();
1003 code = unsignedp ? unsigned_condition (code) : code;
1004 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
1005 op0, op1)))
1007 if (CONSTANT_P (tem))
1009 rtx_code_label *label = (tem == const0_rtx
1010 || tem == CONST0_RTX (mode))
1011 ? if_false_label : if_true_label;
1012 if (label)
1013 emit_jump (label);
1014 return;
1017 code = GET_CODE (tem);
1018 mode = GET_MODE (tem);
1019 op0 = XEXP (tem, 0);
1020 op1 = XEXP (tem, 1);
1021 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
1024 if (! if_true_label)
1025 dummy_label = if_true_label = gen_label_rtx ();
1027 if (GET_MODE_CLASS (mode) == MODE_INT
1028 && ! can_compare_p (code, mode, ccp_jump))
1030 switch (code)
1032 case LTU:
1033 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
1034 if_false_label, if_true_label, prob);
1035 break;
1037 case LEU:
1038 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
1039 if_true_label, if_false_label,
1040 inv (prob));
1041 break;
1043 case GTU:
1044 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
1045 if_false_label, if_true_label, prob);
1046 break;
1048 case GEU:
1049 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
1050 if_true_label, if_false_label,
1051 inv (prob));
1052 break;
1054 case LT:
1055 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
1056 if_false_label, if_true_label, prob);
1057 break;
1059 case LE:
1060 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
1061 if_true_label, if_false_label,
1062 inv (prob));
1063 break;
1065 case GT:
1066 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
1067 if_false_label, if_true_label, prob);
1068 break;
1070 case GE:
1071 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
1072 if_true_label, if_false_label,
1073 inv (prob));
1074 break;
1076 case EQ:
1077 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
1078 if_true_label, prob);
1079 break;
1081 case NE:
1082 do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label,
1083 if_false_label, inv (prob));
1084 break;
1086 default:
1087 gcc_unreachable ();
1090 else
1092 if (SCALAR_FLOAT_MODE_P (mode)
1093 && ! can_compare_p (code, mode, ccp_jump)
1094 && can_compare_p (swap_condition (code), mode, ccp_jump))
1096 code = swap_condition (code);
1097 std::swap (op0, op1);
1099 else if (SCALAR_FLOAT_MODE_P (mode)
1100 && ! can_compare_p (code, mode, ccp_jump)
1101 /* Never split ORDERED and UNORDERED.
1102 These must be implemented. */
1103 && (code != ORDERED && code != UNORDERED)
1104 /* Split a floating-point comparison if
1105 we can jump on other conditions... */
1106 && (have_insn_for (COMPARE, mode)
1107 /* ... or if there is no libcall for it. */
1108 || code_to_optab (code) == unknown_optab))
1110 enum rtx_code first_code;
1111 bool and_them = split_comparison (code, mode, &first_code, &code);
1113 /* If there are no NaNs, the first comparison should always fall
1114 through. */
1115 if (!HONOR_NANS (mode))
1116 gcc_assert (first_code == (and_them ? ORDERED : UNORDERED));
1118 else
1120 int first_prob = prob;
1121 if (first_code == UNORDERED)
1122 first_prob = REG_BR_PROB_BASE / 100;
1123 else if (first_code == ORDERED)
1124 first_prob = REG_BR_PROB_BASE - REG_BR_PROB_BASE / 100;
1125 if (and_them)
1127 rtx_code_label *dest_label;
1128 /* If we only jump if true, just bypass the second jump. */
1129 if (! if_false_label)
1131 if (! dummy_label)
1132 dummy_label = gen_label_rtx ();
1133 dest_label = dummy_label;
1135 else
1136 dest_label = if_false_label;
1137 do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
1138 size, dest_label, NULL, first_prob);
1140 else
1141 do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
1142 size, NULL, if_true_label, first_prob);
1146 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
1147 if_true_label, prob);
1150 if (if_false_label)
1151 emit_jump (if_false_label);
1152 if (dummy_label)
1153 emit_label (dummy_label);
1156 /* Generate code for a comparison expression EXP (including code to compute
1157 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
1158 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
1159 generated code will drop through.
1160 SIGNED_CODE should be the rtx operation for this comparison for
1161 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
1163 We force a stack adjustment unless there are currently
1164 things pushed on the stack that aren't yet used. */
1166 static void
1167 do_compare_and_jump (tree treeop0, tree treeop1, enum rtx_code signed_code,
1168 enum rtx_code unsigned_code,
1169 rtx_code_label *if_false_label,
1170 rtx_code_label *if_true_label, int prob)
1172 rtx op0, op1;
1173 tree type;
1174 machine_mode mode;
1175 int unsignedp;
1176 enum rtx_code code;
1178 /* Don't crash if the comparison was erroneous. */
1179 op0 = expand_normal (treeop0);
1180 if (TREE_CODE (treeop0) == ERROR_MARK)
1181 return;
1183 op1 = expand_normal (treeop1);
1184 if (TREE_CODE (treeop1) == ERROR_MARK)
1185 return;
1187 type = TREE_TYPE (treeop0);
1188 mode = TYPE_MODE (type);
1189 if (TREE_CODE (treeop0) == INTEGER_CST
1190 && (TREE_CODE (treeop1) != INTEGER_CST
1191 || (GET_MODE_BITSIZE (mode)
1192 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (treeop1))))))
1194 /* op0 might have been replaced by promoted constant, in which
1195 case the type of second argument should be used. */
1196 type = TREE_TYPE (treeop1);
1197 mode = TYPE_MODE (type);
1199 unsignedp = TYPE_UNSIGNED (type);
1200 code = unsignedp ? unsigned_code : signed_code;
1202 /* If function pointers need to be "canonicalized" before they can
1203 be reliably compared, then canonicalize them.
1204 Only do this if *both* sides of the comparison are function pointers.
1205 If one side isn't, we want a noncanonicalized comparison. See PR
1206 middle-end/17564. */
1207 if (targetm.have_canonicalize_funcptr_for_compare ()
1208 && POINTER_TYPE_P (TREE_TYPE (treeop0))
1209 && POINTER_TYPE_P (TREE_TYPE (treeop1))
1210 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))
1211 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (treeop1))))
1213 rtx new_op0 = gen_reg_rtx (mode);
1214 rtx new_op1 = gen_reg_rtx (mode);
1216 emit_insn (targetm.gen_canonicalize_funcptr_for_compare (new_op0, op0));
1217 op0 = new_op0;
1219 emit_insn (targetm.gen_canonicalize_funcptr_for_compare (new_op1, op1));
1220 op1 = new_op1;
1223 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
1224 ((mode == BLKmode)
1225 ? expr_size (treeop0) : NULL_RTX),
1226 if_false_label, if_true_label, prob);
1229 #include "gt-dojump.h"