* config/pa/pa.md (integer_indexed_store splitters): Use
[official-gcc.git] / gcc / dojump.c
blobe5ea58957601e50025fbba1229717ba290bfdf3e
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "real.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "flags.h"
39 #include "hard-reg-set.h"
40 #include "function.h"
41 #include "insn-config.h"
42 #include "insn-attr.h"
43 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "hashtab.h"
45 #include "statistics.h"
46 #include "fixed-value.h"
47 #include "expmed.h"
48 #include "dojump.h"
49 #include "explow.h"
50 #include "calls.h"
51 #include "emit-rtl.h"
52 #include "varasm.h"
53 #include "stmt.h"
54 #include "expr.h"
55 #include "insn-codes.h"
56 #include "optabs.h"
57 #include "langhooks.h"
58 #include "ggc.h"
59 #include "predict.h"
60 #include "basic-block.h"
61 #include "tm_p.h"
63 static bool prefer_and_bit_test (machine_mode, int);
64 static void do_jump_by_parts_greater (tree, tree, int,
65 rtx_code_label *, rtx_code_label *, int);
66 static void do_jump_by_parts_equality (tree, tree, rtx_code_label *,
67 rtx_code_label *, int);
68 static void do_compare_and_jump (tree, tree, enum rtx_code, enum rtx_code,
69 rtx_code_label *, rtx_code_label *, int);
71 /* Invert probability if there is any. -1 stands for unknown. */
73 static inline int
74 inv (int prob)
76 return prob == -1 ? -1 : REG_BR_PROB_BASE - prob;
79 /* At the start of a function, record that we have no previously-pushed
80 arguments waiting to be popped. */
82 void
83 init_pending_stack_adjust (void)
85 pending_stack_adjust = 0;
88 /* Discard any pending stack adjustment. This avoid relying on the
89 RTL optimizers to remove useless adjustments when we know the
90 stack pointer value is dead. */
91 void
92 discard_pending_stack_adjust (void)
94 stack_pointer_delta -= pending_stack_adjust;
95 pending_stack_adjust = 0;
98 /* When exiting from function, if safe, clear out any pending stack adjust
99 so the adjustment won't get done.
101 Note, if the current function calls alloca, then it must have a
102 frame pointer regardless of the value of flag_omit_frame_pointer. */
104 void
105 clear_pending_stack_adjust (void)
107 if (optimize > 0
108 && (! flag_omit_frame_pointer || cfun->calls_alloca)
109 && EXIT_IGNORE_STACK)
110 discard_pending_stack_adjust ();
113 /* Pop any previously-pushed arguments that have not been popped yet. */
115 void
116 do_pending_stack_adjust (void)
118 if (inhibit_defer_pop == 0)
120 if (pending_stack_adjust != 0)
121 adjust_stack (GEN_INT (pending_stack_adjust));
122 pending_stack_adjust = 0;
126 /* Remember pending_stack_adjust/stack_pointer_delta.
127 To be used around code that may call do_pending_stack_adjust (),
128 but the generated code could be discarded e.g. using delete_insns_since. */
130 void
131 save_pending_stack_adjust (saved_pending_stack_adjust *save)
133 save->x_pending_stack_adjust = pending_stack_adjust;
134 save->x_stack_pointer_delta = stack_pointer_delta;
137 /* Restore the saved pending_stack_adjust/stack_pointer_delta. */
139 void
140 restore_pending_stack_adjust (saved_pending_stack_adjust *save)
142 if (inhibit_defer_pop == 0)
144 pending_stack_adjust = save->x_pending_stack_adjust;
145 stack_pointer_delta = save->x_stack_pointer_delta;
149 /* Expand conditional expressions. */
151 /* Generate code to evaluate EXP and jump to LABEL if the value is zero. */
153 void
154 jumpifnot (tree exp, rtx_code_label *label, int prob)
156 do_jump (exp, label, NULL, inv (prob));
159 void
160 jumpifnot_1 (enum tree_code code, tree op0, tree op1, rtx_code_label *label,
161 int prob)
163 do_jump_1 (code, op0, op1, label, NULL, inv (prob));
166 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
168 void
169 jumpif (tree exp, rtx_code_label *label, int prob)
171 do_jump (exp, NULL, label, prob);
174 void
175 jumpif_1 (enum tree_code code, tree op0, tree op1,
176 rtx_code_label *label, int prob)
178 do_jump_1 (code, op0, op1, NULL, label, prob);
181 /* Used internally by prefer_and_bit_test. */
183 static GTY(()) rtx and_reg;
184 static GTY(()) rtx and_test;
185 static GTY(()) rtx shift_test;
187 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
188 where X is an arbitrary register of mode MODE. Return true if the former
189 is preferred. */
191 static bool
192 prefer_and_bit_test (machine_mode mode, int bitnum)
194 bool speed_p;
195 wide_int mask = wi::set_bit_in_zero (bitnum, GET_MODE_PRECISION (mode));
197 if (and_test == 0)
199 /* Set up rtxes for the two variations. Use NULL as a placeholder
200 for the BITNUM-based constants. */
201 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
202 and_test = gen_rtx_AND (mode, and_reg, NULL);
203 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
204 const1_rtx);
206 else
208 /* Change the mode of the previously-created rtxes. */
209 PUT_MODE (and_reg, mode);
210 PUT_MODE (and_test, mode);
211 PUT_MODE (shift_test, mode);
212 PUT_MODE (XEXP (shift_test, 0), mode);
215 /* Fill in the integers. */
216 XEXP (and_test, 1) = immed_wide_int_const (mask, mode);
217 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
219 speed_p = optimize_insn_for_speed_p ();
220 return (rtx_cost (and_test, IF_THEN_ELSE, 0, speed_p)
221 <= rtx_cost (shift_test, IF_THEN_ELSE, 0, speed_p));
224 /* Subroutine of do_jump, dealing with exploded comparisons of the type
225 OP0 CODE OP1 . IF_FALSE_LABEL and IF_TRUE_LABEL like in do_jump.
226 PROB is probability of jump to if_true_label, or -1 if unknown. */
228 void
229 do_jump_1 (enum tree_code code, tree op0, tree op1,
230 rtx_code_label *if_false_label, rtx_code_label *if_true_label,
231 int prob)
233 machine_mode mode;
234 rtx_code_label *drop_through_label = 0;
236 switch (code)
238 case EQ_EXPR:
240 tree inner_type = TREE_TYPE (op0);
242 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
243 != MODE_COMPLEX_FLOAT);
244 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
245 != MODE_COMPLEX_INT);
247 if (integer_zerop (op1))
248 do_jump (op0, if_true_label, if_false_label, inv (prob));
249 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
250 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
251 do_jump_by_parts_equality (op0, op1, if_false_label, if_true_label,
252 prob);
253 else
254 do_compare_and_jump (op0, op1, EQ, EQ, if_false_label, if_true_label,
255 prob);
256 break;
259 case NE_EXPR:
261 tree inner_type = TREE_TYPE (op0);
263 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
264 != MODE_COMPLEX_FLOAT);
265 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
266 != MODE_COMPLEX_INT);
268 if (integer_zerop (op1))
269 do_jump (op0, if_false_label, if_true_label, prob);
270 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
271 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
272 do_jump_by_parts_equality (op0, op1, if_true_label, if_false_label,
273 inv (prob));
274 else
275 do_compare_and_jump (op0, op1, NE, NE, if_false_label, if_true_label,
276 prob);
277 break;
280 case LT_EXPR:
281 mode = TYPE_MODE (TREE_TYPE (op0));
282 if (GET_MODE_CLASS (mode) == MODE_INT
283 && ! can_compare_p (LT, mode, ccp_jump))
284 do_jump_by_parts_greater (op0, op1, 1, if_false_label, if_true_label,
285 prob);
286 else
287 do_compare_and_jump (op0, op1, LT, LTU, if_false_label, if_true_label,
288 prob);
289 break;
291 case LE_EXPR:
292 mode = TYPE_MODE (TREE_TYPE (op0));
293 if (GET_MODE_CLASS (mode) == MODE_INT
294 && ! can_compare_p (LE, mode, ccp_jump))
295 do_jump_by_parts_greater (op0, op1, 0, if_true_label, if_false_label,
296 inv (prob));
297 else
298 do_compare_and_jump (op0, op1, LE, LEU, if_false_label, if_true_label,
299 prob);
300 break;
302 case GT_EXPR:
303 mode = TYPE_MODE (TREE_TYPE (op0));
304 if (GET_MODE_CLASS (mode) == MODE_INT
305 && ! can_compare_p (GT, mode, ccp_jump))
306 do_jump_by_parts_greater (op0, op1, 0, if_false_label, if_true_label,
307 prob);
308 else
309 do_compare_and_jump (op0, op1, GT, GTU, if_false_label, if_true_label,
310 prob);
311 break;
313 case GE_EXPR:
314 mode = TYPE_MODE (TREE_TYPE (op0));
315 if (GET_MODE_CLASS (mode) == MODE_INT
316 && ! can_compare_p (GE, mode, ccp_jump))
317 do_jump_by_parts_greater (op0, op1, 1, if_true_label, if_false_label,
318 inv (prob));
319 else
320 do_compare_and_jump (op0, op1, GE, GEU, if_false_label, if_true_label,
321 prob);
322 break;
324 case ORDERED_EXPR:
325 do_compare_and_jump (op0, op1, ORDERED, ORDERED,
326 if_false_label, if_true_label, prob);
327 break;
329 case UNORDERED_EXPR:
330 do_compare_and_jump (op0, op1, UNORDERED, UNORDERED,
331 if_false_label, if_true_label, prob);
332 break;
334 case UNLT_EXPR:
335 do_compare_and_jump (op0, op1, UNLT, UNLT, if_false_label, if_true_label,
336 prob);
337 break;
339 case UNLE_EXPR:
340 do_compare_and_jump (op0, op1, UNLE, UNLE, if_false_label, if_true_label,
341 prob);
342 break;
344 case UNGT_EXPR:
345 do_compare_and_jump (op0, op1, UNGT, UNGT, if_false_label, if_true_label,
346 prob);
347 break;
349 case UNGE_EXPR:
350 do_compare_and_jump (op0, op1, UNGE, UNGE, if_false_label, if_true_label,
351 prob);
352 break;
354 case UNEQ_EXPR:
355 do_compare_and_jump (op0, op1, UNEQ, UNEQ, if_false_label, if_true_label,
356 prob);
357 break;
359 case LTGT_EXPR:
360 do_compare_and_jump (op0, op1, LTGT, LTGT, if_false_label, if_true_label,
361 prob);
362 break;
364 case TRUTH_ANDIF_EXPR:
366 /* Spread the probability that the expression is false evenly between
367 the two conditions. So the first condition is false half the total
368 probability of being false. The second condition is false the other
369 half of the total probability of being false, so its jump has a false
370 probability of half the total, relative to the probability we
371 reached it (i.e. the first condition was true). */
372 int op0_prob = -1;
373 int op1_prob = -1;
374 if (prob != -1)
376 int false_prob = inv (prob);
377 int op0_false_prob = false_prob / 2;
378 int op1_false_prob = GCOV_COMPUTE_SCALE ((false_prob / 2),
379 inv (op0_false_prob));
380 /* Get the probability that each jump below is true. */
381 op0_prob = inv (op0_false_prob);
382 op1_prob = inv (op1_false_prob);
384 if (if_false_label == NULL)
386 drop_through_label = gen_label_rtx ();
387 do_jump (op0, drop_through_label, NULL, op0_prob);
388 do_jump (op1, NULL, if_true_label, op1_prob);
390 else
392 do_jump (op0, if_false_label, NULL, op0_prob);
393 do_jump (op1, if_false_label, if_true_label, op1_prob);
395 break;
398 case TRUTH_ORIF_EXPR:
400 /* Spread the probability evenly between the two conditions. So
401 the first condition has half the total probability of being true.
402 The second condition has the other half of the total probability,
403 so its jump has a probability of half the total, relative to
404 the probability we reached it (i.e. the first condition was false). */
405 int op0_prob = -1;
406 int op1_prob = -1;
407 if (prob != -1)
409 op0_prob = prob / 2;
410 op1_prob = GCOV_COMPUTE_SCALE ((prob / 2), inv (op0_prob));
412 if (if_true_label == NULL)
414 drop_through_label = gen_label_rtx ();
415 do_jump (op0, NULL, drop_through_label, op0_prob);
416 do_jump (op1, if_false_label, NULL, op1_prob);
418 else
420 do_jump (op0, NULL, if_true_label, op0_prob);
421 do_jump (op1, if_false_label, if_true_label, op1_prob);
423 break;
426 default:
427 gcc_unreachable ();
430 if (drop_through_label)
432 do_pending_stack_adjust ();
433 emit_label (drop_through_label);
437 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
438 the result is zero, or IF_TRUE_LABEL if the result is one.
439 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
440 meaning fall through in that case.
442 do_jump always does any pending stack adjust except when it does not
443 actually perform a jump. An example where there is no jump
444 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
446 PROB is probability of jump to if_true_label, or -1 if unknown. */
448 void
449 do_jump (tree exp, rtx_code_label *if_false_label,
450 rtx_code_label *if_true_label, int prob)
452 enum tree_code code = TREE_CODE (exp);
453 rtx temp;
454 int i;
455 tree type;
456 machine_mode mode;
457 rtx_code_label *drop_through_label = NULL;
459 switch (code)
461 case ERROR_MARK:
462 break;
464 case INTEGER_CST:
466 rtx_code_label *lab = integer_zerop (exp) ? if_false_label
467 : if_true_label;
468 if (lab)
469 emit_jump (lab);
470 break;
473 #if 0
474 /* This is not true with #pragma weak */
475 case ADDR_EXPR:
476 /* The address of something can never be zero. */
477 if (if_true_label)
478 emit_jump (if_true_label);
479 break;
480 #endif
482 case NOP_EXPR:
483 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
484 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
485 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
486 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
487 goto normal;
488 case CONVERT_EXPR:
489 /* If we are narrowing the operand, we have to do the compare in the
490 narrower mode. */
491 if ((TYPE_PRECISION (TREE_TYPE (exp))
492 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
493 goto normal;
494 case NON_LVALUE_EXPR:
495 case ABS_EXPR:
496 case NEGATE_EXPR:
497 case LROTATE_EXPR:
498 case RROTATE_EXPR:
499 /* These cannot change zero->nonzero or vice versa. */
500 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label, prob);
501 break;
503 case TRUTH_NOT_EXPR:
504 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label,
505 inv (prob));
506 break;
508 case COND_EXPR:
510 rtx_code_label *label1 = gen_label_rtx ();
511 if (!if_true_label || !if_false_label)
513 drop_through_label = gen_label_rtx ();
514 if (!if_true_label)
515 if_true_label = drop_through_label;
516 if (!if_false_label)
517 if_false_label = drop_through_label;
520 do_pending_stack_adjust ();
521 do_jump (TREE_OPERAND (exp, 0), label1, NULL, -1);
522 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob);
523 emit_label (label1);
524 do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label, prob);
525 break;
528 case COMPOUND_EXPR:
529 /* Lowered by gimplify.c. */
530 gcc_unreachable ();
532 case MINUS_EXPR:
533 /* Nonzero iff operands of minus differ. */
534 code = NE_EXPR;
536 /* FALLTHRU */
537 case EQ_EXPR:
538 case NE_EXPR:
539 case LT_EXPR:
540 case LE_EXPR:
541 case GT_EXPR:
542 case GE_EXPR:
543 case ORDERED_EXPR:
544 case UNORDERED_EXPR:
545 case UNLT_EXPR:
546 case UNLE_EXPR:
547 case UNGT_EXPR:
548 case UNGE_EXPR:
549 case UNEQ_EXPR:
550 case LTGT_EXPR:
551 case TRUTH_ANDIF_EXPR:
552 case TRUTH_ORIF_EXPR:
553 other_code:
554 do_jump_1 (code, TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
555 if_false_label, if_true_label, prob);
556 break;
558 case BIT_AND_EXPR:
559 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
560 See if the former is preferred for jump tests and restore it
561 if so. */
562 if (integer_onep (TREE_OPERAND (exp, 1)))
564 tree exp0 = TREE_OPERAND (exp, 0);
565 rtx_code_label *set_label, *clr_label;
566 int setclr_prob = prob;
568 /* Strip narrowing integral type conversions. */
569 while (CONVERT_EXPR_P (exp0)
570 && TREE_OPERAND (exp0, 0) != error_mark_node
571 && TYPE_PRECISION (TREE_TYPE (exp0))
572 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
573 exp0 = TREE_OPERAND (exp0, 0);
575 /* "exp0 ^ 1" inverts the sense of the single bit test. */
576 if (TREE_CODE (exp0) == BIT_XOR_EXPR
577 && integer_onep (TREE_OPERAND (exp0, 1)))
579 exp0 = TREE_OPERAND (exp0, 0);
580 clr_label = if_true_label;
581 set_label = if_false_label;
582 setclr_prob = inv (prob);
584 else
586 clr_label = if_false_label;
587 set_label = if_true_label;
590 if (TREE_CODE (exp0) == RSHIFT_EXPR)
592 tree arg = TREE_OPERAND (exp0, 0);
593 tree shift = TREE_OPERAND (exp0, 1);
594 tree argtype = TREE_TYPE (arg);
595 if (TREE_CODE (shift) == INTEGER_CST
596 && compare_tree_int (shift, 0) >= 0
597 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
598 && prefer_and_bit_test (TYPE_MODE (argtype),
599 TREE_INT_CST_LOW (shift)))
601 unsigned HOST_WIDE_INT mask
602 = (unsigned HOST_WIDE_INT) 1 << TREE_INT_CST_LOW (shift);
603 do_jump (build2 (BIT_AND_EXPR, argtype, arg,
604 build_int_cstu (argtype, mask)),
605 clr_label, set_label, setclr_prob);
606 break;
611 /* If we are AND'ing with a small constant, do this comparison in the
612 smallest type that fits. If the machine doesn't have comparisons
613 that small, it will be converted back to the wider comparison.
614 This helps if we are testing the sign bit of a narrower object.
615 combine can't do this for us because it can't know whether a
616 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
618 if (! SLOW_BYTE_ACCESS
619 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
620 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
621 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
622 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
623 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
624 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
625 && have_insn_for (COMPARE, TYPE_MODE (type)))
627 do_jump (fold_convert (type, exp), if_false_label, if_true_label,
628 prob);
629 break;
632 if (TYPE_PRECISION (TREE_TYPE (exp)) > 1
633 || TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
634 goto normal;
636 /* Boolean comparisons can be compiled as TRUTH_AND_EXPR. */
638 case TRUTH_AND_EXPR:
639 /* High branch cost, expand as the bitwise AND of the conditions.
640 Do the same if the RHS has side effects, because we're effectively
641 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */
642 if (BRANCH_COST (optimize_insn_for_speed_p (),
643 false) >= 4
644 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
645 goto normal;
646 code = TRUTH_ANDIF_EXPR;
647 goto other_code;
649 case BIT_IOR_EXPR:
650 case TRUTH_OR_EXPR:
651 /* High branch cost, expand as the bitwise OR of the conditions.
652 Do the same if the RHS has side effects, because we're effectively
653 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */
654 if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4
655 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
656 goto normal;
657 code = TRUTH_ORIF_EXPR;
658 goto other_code;
660 /* Fall through and generate the normal code. */
661 default:
662 normal:
663 temp = expand_normal (exp);
664 do_pending_stack_adjust ();
665 /* The RTL optimizers prefer comparisons against pseudos. */
666 if (GET_CODE (temp) == SUBREG)
668 /* Compare promoted variables in their promoted mode. */
669 if (SUBREG_PROMOTED_VAR_P (temp)
670 && REG_P (XEXP (temp, 0)))
671 temp = XEXP (temp, 0);
672 else
673 temp = copy_to_reg (temp);
675 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
676 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
677 GET_MODE (temp), NULL_RTX,
678 if_false_label, if_true_label, prob);
681 if (drop_through_label)
683 do_pending_stack_adjust ();
684 emit_label (drop_through_label);
688 /* Compare OP0 with OP1, word at a time, in mode MODE.
689 UNSIGNEDP says to do unsigned comparison.
690 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
692 static void
693 do_jump_by_parts_greater_rtx (machine_mode mode, int unsignedp, rtx op0,
694 rtx op1, rtx_code_label *if_false_label,
695 rtx_code_label *if_true_label,
696 int prob)
698 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
699 rtx_code_label *drop_through_label = 0;
700 bool drop_through_if_true = false, drop_through_if_false = false;
701 enum rtx_code code = GT;
702 int i;
704 if (! if_true_label || ! if_false_label)
705 drop_through_label = gen_label_rtx ();
706 if (! if_true_label)
708 if_true_label = drop_through_label;
709 drop_through_if_true = true;
711 if (! if_false_label)
713 if_false_label = drop_through_label;
714 drop_through_if_false = true;
717 /* Deal with the special case 0 > x: only one comparison is necessary and
718 we reverse it to avoid jumping to the drop-through label. */
719 if (op0 == const0_rtx && drop_through_if_true && !drop_through_if_false)
721 code = LE;
722 if_true_label = if_false_label;
723 if_false_label = drop_through_label;
724 drop_through_if_true = false;
725 drop_through_if_false = true;
728 /* Compare a word at a time, high order first. */
729 for (i = 0; i < nwords; i++)
731 rtx op0_word, op1_word;
733 if (WORDS_BIG_ENDIAN)
735 op0_word = operand_subword_force (op0, i, mode);
736 op1_word = operand_subword_force (op1, i, mode);
738 else
740 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
741 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
744 /* All but high-order word must be compared as unsigned. */
745 do_compare_rtx_and_jump (op0_word, op1_word, code, (unsignedp || i > 0),
746 word_mode, NULL_RTX, NULL, if_true_label,
747 prob);
749 /* Emit only one comparison for 0. Do not emit the last cond jump. */
750 if (op0 == const0_rtx || i == nwords - 1)
751 break;
753 /* Consider lower words only if these are equal. */
754 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
755 NULL_RTX, NULL, if_false_label, inv (prob));
758 if (!drop_through_if_false)
759 emit_jump (if_false_label);
760 if (drop_through_label)
761 emit_label (drop_through_label);
764 /* Given a comparison expression EXP for values too wide to be compared
765 with one insn, test the comparison and jump to the appropriate label.
766 The code of EXP is ignored; we always test GT if SWAP is 0,
767 and LT if SWAP is 1. */
769 static void
770 do_jump_by_parts_greater (tree treeop0, tree treeop1, int swap,
771 rtx_code_label *if_false_label,
772 rtx_code_label *if_true_label, int prob)
774 rtx op0 = expand_normal (swap ? treeop1 : treeop0);
775 rtx op1 = expand_normal (swap ? treeop0 : treeop1);
776 machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
777 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (treeop0));
779 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
780 if_true_label, prob);
783 /* Jump according to whether OP0 is 0. We assume that OP0 has an integer
784 mode, MODE, that is too wide for the available compare insns. Either
785 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL
786 to indicate drop through. */
788 static void
789 do_jump_by_parts_zero_rtx (machine_mode mode, rtx op0,
790 rtx_code_label *if_false_label,
791 rtx_code_label *if_true_label, int prob)
793 int nwords = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
794 rtx part;
795 int i;
796 rtx_code_label *drop_through_label = NULL;
798 /* The fastest way of doing this comparison on almost any machine is to
799 "or" all the words and compare the result. If all have to be loaded
800 from memory and this is a very wide item, it's possible this may
801 be slower, but that's highly unlikely. */
803 part = gen_reg_rtx (word_mode);
804 emit_move_insn (part, operand_subword_force (op0, 0, mode));
805 for (i = 1; i < nwords && part != 0; i++)
806 part = expand_binop (word_mode, ior_optab, part,
807 operand_subword_force (op0, i, mode),
808 part, 1, OPTAB_WIDEN);
810 if (part != 0)
812 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
813 NULL_RTX, if_false_label, if_true_label, prob);
814 return;
817 /* If we couldn't do the "or" simply, do this with a series of compares. */
818 if (! if_false_label)
819 if_false_label = drop_through_label = gen_label_rtx ();
821 for (i = 0; i < nwords; i++)
822 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
823 const0_rtx, EQ, 1, word_mode, NULL_RTX,
824 if_false_label, NULL, prob);
826 if (if_true_label)
827 emit_jump (if_true_label);
829 if (drop_through_label)
830 emit_label (drop_through_label);
833 /* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
834 where MODE is an integer mode too wide to be compared with one insn.
835 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
836 to indicate drop through. */
838 static void
839 do_jump_by_parts_equality_rtx (machine_mode mode, rtx op0, rtx op1,
840 rtx_code_label *if_false_label,
841 rtx_code_label *if_true_label, int prob)
843 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
844 rtx_code_label *drop_through_label = NULL;
845 int i;
847 if (op1 == const0_rtx)
849 do_jump_by_parts_zero_rtx (mode, op0, if_false_label, if_true_label,
850 prob);
851 return;
853 else if (op0 == const0_rtx)
855 do_jump_by_parts_zero_rtx (mode, op1, if_false_label, if_true_label,
856 prob);
857 return;
860 if (! if_false_label)
861 drop_through_label = if_false_label = gen_label_rtx ();
863 for (i = 0; i < nwords; i++)
864 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
865 operand_subword_force (op1, i, mode),
866 EQ, 0, word_mode, NULL_RTX,
867 if_false_label, NULL, prob);
869 if (if_true_label)
870 emit_jump (if_true_label);
871 if (drop_through_label)
872 emit_label (drop_through_label);
875 /* Given an EQ_EXPR expression EXP for values too wide to be compared
876 with one insn, test the comparison and jump to the appropriate label. */
878 static void
879 do_jump_by_parts_equality (tree treeop0, tree treeop1,
880 rtx_code_label *if_false_label,
881 rtx_code_label *if_true_label, int prob)
883 rtx op0 = expand_normal (treeop0);
884 rtx op1 = expand_normal (treeop1);
885 machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
886 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
887 if_true_label, prob);
890 /* Split a comparison into two others, the second of which has the other
891 "orderedness". The first is always ORDERED or UNORDERED if MODE
892 does not honor NaNs (which means that it can be skipped in that case;
893 see do_compare_rtx_and_jump).
895 The two conditions are written in *CODE1 and *CODE2. Return true if
896 the conditions must be ANDed, false if they must be ORed. */
898 bool
899 split_comparison (enum rtx_code code, machine_mode mode,
900 enum rtx_code *code1, enum rtx_code *code2)
902 switch (code)
904 case LT:
905 *code1 = ORDERED;
906 *code2 = UNLT;
907 return true;
908 case LE:
909 *code1 = ORDERED;
910 *code2 = UNLE;
911 return true;
912 case GT:
913 *code1 = ORDERED;
914 *code2 = UNGT;
915 return true;
916 case GE:
917 *code1 = ORDERED;
918 *code2 = UNGE;
919 return true;
920 case EQ:
921 *code1 = ORDERED;
922 *code2 = UNEQ;
923 return true;
924 case NE:
925 *code1 = UNORDERED;
926 *code2 = LTGT;
927 return false;
928 case UNLT:
929 *code1 = UNORDERED;
930 *code2 = LT;
931 return false;
932 case UNLE:
933 *code1 = UNORDERED;
934 *code2 = LE;
935 return false;
936 case UNGT:
937 *code1 = UNORDERED;
938 *code2 = GT;
939 return false;
940 case UNGE:
941 *code1 = UNORDERED;
942 *code2 = GE;
943 return false;
944 case UNEQ:
945 *code1 = UNORDERED;
946 *code2 = EQ;
947 return false;
948 case LTGT:
949 /* Do not turn a trapping comparison into a non-trapping one. */
950 if (HONOR_SNANS (mode))
952 *code1 = LT;
953 *code2 = GT;
954 return false;
956 else
958 *code1 = ORDERED;
959 *code2 = NE;
960 return true;
962 default:
963 gcc_unreachable ();
968 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
969 The decision as to signed or unsigned comparison must be made by the caller.
971 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
972 compared. */
974 void
975 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
976 machine_mode mode, rtx size,
977 rtx_code_label *if_false_label,
978 rtx_code_label *if_true_label, int prob)
980 rtx tem;
981 rtx_code_label *dummy_label = NULL;
983 /* Reverse the comparison if that is safe and we want to jump if it is
984 false. Also convert to the reverse comparison if the target can
985 implement it. */
986 if ((! if_true_label
987 || ! can_compare_p (code, mode, ccp_jump))
988 && (! FLOAT_MODE_P (mode)
989 || code == ORDERED || code == UNORDERED
990 || (! HONOR_NANS (mode) && (code == LTGT || code == UNEQ))
991 || (! HONOR_SNANS (mode) && (code == EQ || code == NE))))
993 enum rtx_code rcode;
994 if (FLOAT_MODE_P (mode))
995 rcode = reverse_condition_maybe_unordered (code);
996 else
997 rcode = reverse_condition (code);
999 /* Canonicalize to UNORDERED for the libcall. */
1000 if (can_compare_p (rcode, mode, ccp_jump)
1001 || (code == ORDERED && ! can_compare_p (ORDERED, mode, ccp_jump)))
1003 std::swap (if_true_label, if_false_label);
1004 code = rcode;
1005 prob = inv (prob);
1009 /* If one operand is constant, make it the second one. Only do this
1010 if the other operand is not constant as well. */
1012 if (swap_commutative_operands_p (op0, op1))
1014 std::swap (op0, op1);
1015 code = swap_condition (code);
1018 do_pending_stack_adjust ();
1020 code = unsignedp ? unsigned_condition (code) : code;
1021 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
1022 op0, op1)))
1024 if (CONSTANT_P (tem))
1026 rtx_code_label *label = (tem == const0_rtx
1027 || tem == CONST0_RTX (mode))
1028 ? if_false_label : if_true_label;
1029 if (label)
1030 emit_jump (label);
1031 return;
1034 code = GET_CODE (tem);
1035 mode = GET_MODE (tem);
1036 op0 = XEXP (tem, 0);
1037 op1 = XEXP (tem, 1);
1038 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
1041 if (! if_true_label)
1042 dummy_label = if_true_label = gen_label_rtx ();
1044 if (GET_MODE_CLASS (mode) == MODE_INT
1045 && ! can_compare_p (code, mode, ccp_jump))
1047 switch (code)
1049 case LTU:
1050 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
1051 if_false_label, if_true_label, prob);
1052 break;
1054 case LEU:
1055 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
1056 if_true_label, if_false_label,
1057 inv (prob));
1058 break;
1060 case GTU:
1061 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
1062 if_false_label, if_true_label, prob);
1063 break;
1065 case GEU:
1066 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
1067 if_true_label, if_false_label,
1068 inv (prob));
1069 break;
1071 case LT:
1072 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
1073 if_false_label, if_true_label, prob);
1074 break;
1076 case LE:
1077 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
1078 if_true_label, if_false_label,
1079 inv (prob));
1080 break;
1082 case GT:
1083 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
1084 if_false_label, if_true_label, prob);
1085 break;
1087 case GE:
1088 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
1089 if_true_label, if_false_label,
1090 inv (prob));
1091 break;
1093 case EQ:
1094 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
1095 if_true_label, prob);
1096 break;
1098 case NE:
1099 do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label,
1100 if_false_label, inv (prob));
1101 break;
1103 default:
1104 gcc_unreachable ();
1107 else
1109 if (SCALAR_FLOAT_MODE_P (mode)
1110 && ! can_compare_p (code, mode, ccp_jump)
1111 && can_compare_p (swap_condition (code), mode, ccp_jump))
1113 code = swap_condition (code);
1114 std::swap (op0, op1);
1116 else if (SCALAR_FLOAT_MODE_P (mode)
1117 && ! can_compare_p (code, mode, ccp_jump)
1118 /* Never split ORDERED and UNORDERED.
1119 These must be implemented. */
1120 && (code != ORDERED && code != UNORDERED)
1121 /* Split a floating-point comparison if
1122 we can jump on other conditions... */
1123 && (have_insn_for (COMPARE, mode)
1124 /* ... or if there is no libcall for it. */
1125 || code_to_optab (code) == unknown_optab))
1127 enum rtx_code first_code;
1128 bool and_them = split_comparison (code, mode, &first_code, &code);
1130 /* If there are no NaNs, the first comparison should always fall
1131 through. */
1132 if (!HONOR_NANS (mode))
1133 gcc_assert (first_code == (and_them ? ORDERED : UNORDERED));
1135 else
1137 int first_prob = prob;
1138 if (first_code == UNORDERED)
1139 first_prob = REG_BR_PROB_BASE / 100;
1140 else if (first_code == ORDERED)
1141 first_prob = REG_BR_PROB_BASE - REG_BR_PROB_BASE / 100;
1142 if (and_them)
1144 rtx_code_label *dest_label;
1145 /* If we only jump if true, just bypass the second jump. */
1146 if (! if_false_label)
1148 if (! dummy_label)
1149 dummy_label = gen_label_rtx ();
1150 dest_label = dummy_label;
1152 else
1153 dest_label = if_false_label;
1154 do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
1155 size, dest_label, NULL, first_prob);
1157 else
1158 do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
1159 size, NULL, if_true_label, first_prob);
1163 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
1164 if_true_label, prob);
1167 if (if_false_label)
1168 emit_jump (if_false_label);
1169 if (dummy_label)
1170 emit_label (dummy_label);
1173 /* Generate code for a comparison expression EXP (including code to compute
1174 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
1175 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
1176 generated code will drop through.
1177 SIGNED_CODE should be the rtx operation for this comparison for
1178 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
1180 We force a stack adjustment unless there are currently
1181 things pushed on the stack that aren't yet used. */
1183 static void
1184 do_compare_and_jump (tree treeop0, tree treeop1, enum rtx_code signed_code,
1185 enum rtx_code unsigned_code,
1186 rtx_code_label *if_false_label,
1187 rtx_code_label *if_true_label, int prob)
1189 rtx op0, op1;
1190 tree type;
1191 machine_mode mode;
1192 int unsignedp;
1193 enum rtx_code code;
1195 /* Don't crash if the comparison was erroneous. */
1196 op0 = expand_normal (treeop0);
1197 if (TREE_CODE (treeop0) == ERROR_MARK)
1198 return;
1200 op1 = expand_normal (treeop1);
1201 if (TREE_CODE (treeop1) == ERROR_MARK)
1202 return;
1204 type = TREE_TYPE (treeop0);
1205 mode = TYPE_MODE (type);
1206 if (TREE_CODE (treeop0) == INTEGER_CST
1207 && (TREE_CODE (treeop1) != INTEGER_CST
1208 || (GET_MODE_BITSIZE (mode)
1209 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (treeop1))))))
1211 /* op0 might have been replaced by promoted constant, in which
1212 case the type of second argument should be used. */
1213 type = TREE_TYPE (treeop1);
1214 mode = TYPE_MODE (type);
1216 unsignedp = TYPE_UNSIGNED (type);
1217 code = unsignedp ? unsigned_code : signed_code;
1219 #ifdef HAVE_canonicalize_funcptr_for_compare
1220 /* If function pointers need to be "canonicalized" before they can
1221 be reliably compared, then canonicalize them.
1222 Only do this if *both* sides of the comparison are function pointers.
1223 If one side isn't, we want a noncanonicalized comparison. See PR
1224 middle-end/17564. */
1225 if (HAVE_canonicalize_funcptr_for_compare
1226 && TREE_CODE (TREE_TYPE (treeop0)) == POINTER_TYPE
1227 && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop0)))
1228 == FUNCTION_TYPE
1229 && TREE_CODE (TREE_TYPE (treeop1)) == POINTER_TYPE
1230 && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop1)))
1231 == FUNCTION_TYPE)
1233 rtx new_op0 = gen_reg_rtx (mode);
1234 rtx new_op1 = gen_reg_rtx (mode);
1236 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
1237 op0 = new_op0;
1239 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
1240 op1 = new_op1;
1242 #endif
1244 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
1245 ((mode == BLKmode)
1246 ? expr_size (treeop0) : NULL_RTX),
1247 if_false_label, if_true_label, prob);
1250 #include "gt-dojump.h"