gcc/c-family/
[official-gcc.git] / gcc / dojump.c
blobee12d761eee0833148f07b95d31656fee3ef6e92
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "insn-config.h"
29 #include "insn-attr.h"
30 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
31 #include "expr.h"
32 #include "optabs.h"
33 #include "langhooks.h"
34 #include "ggc.h"
35 #include "basic-block.h"
36 #include "tm_p.h"
38 static bool prefer_and_bit_test (enum machine_mode, int);
39 static void do_jump_by_parts_greater (tree, tree, int, rtx, rtx, int);
40 static void do_jump_by_parts_equality (tree, tree, rtx, rtx, int);
41 static void do_compare_and_jump (tree, tree, enum rtx_code, enum rtx_code, rtx,
42 rtx, int);
44 /* Invert probability if there is any. -1 stands for unknown. */
46 static inline int
47 inv (int prob)
49 return prob == -1 ? -1 : REG_BR_PROB_BASE - prob;
52 /* At the start of a function, record that we have no previously-pushed
53 arguments waiting to be popped. */
55 void
56 init_pending_stack_adjust (void)
58 pending_stack_adjust = 0;
61 /* Discard any pending stack adjustment. This avoid relying on the
62 RTL optimizers to remove useless adjustments when we know the
63 stack pointer value is dead. */
64 void
65 discard_pending_stack_adjust (void)
67 stack_pointer_delta -= pending_stack_adjust;
68 pending_stack_adjust = 0;
71 /* When exiting from function, if safe, clear out any pending stack adjust
72 so the adjustment won't get done.
74 Note, if the current function calls alloca, then it must have a
75 frame pointer regardless of the value of flag_omit_frame_pointer. */
77 void
78 clear_pending_stack_adjust (void)
80 if (optimize > 0
81 && (! flag_omit_frame_pointer || cfun->calls_alloca)
82 && EXIT_IGNORE_STACK)
83 discard_pending_stack_adjust ();
86 /* Pop any previously-pushed arguments that have not been popped yet. */
88 void
89 do_pending_stack_adjust (void)
91 if (inhibit_defer_pop == 0)
93 if (pending_stack_adjust != 0)
94 adjust_stack (GEN_INT (pending_stack_adjust));
95 pending_stack_adjust = 0;
99 /* Expand conditional expressions. */
101 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
102 LABEL is an rtx of code CODE_LABEL, in this function and all the
103 functions here. */
105 void
106 jumpifnot (tree exp, rtx label, int prob)
108 do_jump (exp, label, NULL_RTX, inv (prob));
111 void
112 jumpifnot_1 (enum tree_code code, tree op0, tree op1, rtx label, int prob)
114 do_jump_1 (code, op0, op1, label, NULL_RTX, inv (prob));
117 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
119 void
120 jumpif (tree exp, rtx label, int prob)
122 do_jump (exp, NULL_RTX, label, prob);
125 void
126 jumpif_1 (enum tree_code code, tree op0, tree op1, rtx label, int prob)
128 do_jump_1 (code, op0, op1, NULL_RTX, label, prob);
131 /* Used internally by prefer_and_bit_test. */
133 static GTY(()) rtx and_reg;
134 static GTY(()) rtx and_test;
135 static GTY(()) rtx shift_test;
137 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
138 where X is an arbitrary register of mode MODE. Return true if the former
139 is preferred. */
141 static bool
142 prefer_and_bit_test (enum machine_mode mode, int bitnum)
144 bool speed_p;
146 if (and_test == 0)
148 /* Set up rtxes for the two variations. Use NULL as a placeholder
149 for the BITNUM-based constants. */
150 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
151 and_test = gen_rtx_AND (mode, and_reg, NULL);
152 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
153 const1_rtx);
155 else
157 /* Change the mode of the previously-created rtxes. */
158 PUT_MODE (and_reg, mode);
159 PUT_MODE (and_test, mode);
160 PUT_MODE (shift_test, mode);
161 PUT_MODE (XEXP (shift_test, 0), mode);
164 /* Fill in the integers. */
165 XEXP (and_test, 1)
166 = immed_double_int_const (double_int_zero.set_bit (bitnum), mode);
167 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
169 speed_p = optimize_insn_for_speed_p ();
170 return (rtx_cost (and_test, IF_THEN_ELSE, 0, speed_p)
171 <= rtx_cost (shift_test, IF_THEN_ELSE, 0, speed_p));
174 /* Subroutine of do_jump, dealing with exploded comparisons of the type
175 OP0 CODE OP1 . IF_FALSE_LABEL and IF_TRUE_LABEL like in do_jump.
176 PROB is probability of jump to if_true_label, or -1 if unknown. */
178 void
179 do_jump_1 (enum tree_code code, tree op0, tree op1,
180 rtx if_false_label, rtx if_true_label, int prob)
182 enum machine_mode mode;
183 rtx drop_through_label = 0;
185 switch (code)
187 case EQ_EXPR:
189 tree inner_type = TREE_TYPE (op0);
191 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
192 != MODE_COMPLEX_FLOAT);
193 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
194 != MODE_COMPLEX_INT);
196 if (integer_zerop (op1))
197 do_jump (op0, if_true_label, if_false_label, inv (prob));
198 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
199 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
200 do_jump_by_parts_equality (op0, op1, if_false_label, if_true_label,
201 prob);
202 else
203 do_compare_and_jump (op0, op1, EQ, EQ, if_false_label, if_true_label,
204 prob);
205 break;
208 case NE_EXPR:
210 tree inner_type = TREE_TYPE (op0);
212 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
213 != MODE_COMPLEX_FLOAT);
214 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
215 != MODE_COMPLEX_INT);
217 if (integer_zerop (op1))
218 do_jump (op0, if_false_label, if_true_label, prob);
219 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
220 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
221 do_jump_by_parts_equality (op0, op1, if_true_label, if_false_label,
222 inv (prob));
223 else
224 do_compare_and_jump (op0, op1, NE, NE, if_false_label, if_true_label,
225 prob);
226 break;
229 case LT_EXPR:
230 mode = TYPE_MODE (TREE_TYPE (op0));
231 if (GET_MODE_CLASS (mode) == MODE_INT
232 && ! can_compare_p (LT, mode, ccp_jump))
233 do_jump_by_parts_greater (op0, op1, 1, if_false_label, if_true_label,
234 prob);
235 else
236 do_compare_and_jump (op0, op1, LT, LTU, if_false_label, if_true_label,
237 prob);
238 break;
240 case LE_EXPR:
241 mode = TYPE_MODE (TREE_TYPE (op0));
242 if (GET_MODE_CLASS (mode) == MODE_INT
243 && ! can_compare_p (LE, mode, ccp_jump))
244 do_jump_by_parts_greater (op0, op1, 0, if_true_label, if_false_label,
245 inv (prob));
246 else
247 do_compare_and_jump (op0, op1, LE, LEU, if_false_label, if_true_label,
248 prob);
249 break;
251 case GT_EXPR:
252 mode = TYPE_MODE (TREE_TYPE (op0));
253 if (GET_MODE_CLASS (mode) == MODE_INT
254 && ! can_compare_p (GT, mode, ccp_jump))
255 do_jump_by_parts_greater (op0, op1, 0, if_false_label, if_true_label,
256 prob);
257 else
258 do_compare_and_jump (op0, op1, GT, GTU, if_false_label, if_true_label,
259 prob);
260 break;
262 case GE_EXPR:
263 mode = TYPE_MODE (TREE_TYPE (op0));
264 if (GET_MODE_CLASS (mode) == MODE_INT
265 && ! can_compare_p (GE, mode, ccp_jump))
266 do_jump_by_parts_greater (op0, op1, 1, if_true_label, if_false_label,
267 inv (prob));
268 else
269 do_compare_and_jump (op0, op1, GE, GEU, if_false_label, if_true_label,
270 prob);
271 break;
273 case ORDERED_EXPR:
274 do_compare_and_jump (op0, op1, ORDERED, ORDERED,
275 if_false_label, if_true_label, prob);
276 break;
278 case UNORDERED_EXPR:
279 do_compare_and_jump (op0, op1, UNORDERED, UNORDERED,
280 if_false_label, if_true_label, prob);
281 break;
283 case UNLT_EXPR:
284 do_compare_and_jump (op0, op1, UNLT, UNLT, if_false_label, if_true_label,
285 prob);
286 break;
288 case UNLE_EXPR:
289 do_compare_and_jump (op0, op1, UNLE, UNLE, if_false_label, if_true_label,
290 prob);
291 break;
293 case UNGT_EXPR:
294 do_compare_and_jump (op0, op1, UNGT, UNGT, if_false_label, if_true_label,
295 prob);
296 break;
298 case UNGE_EXPR:
299 do_compare_and_jump (op0, op1, UNGE, UNGE, if_false_label, if_true_label,
300 prob);
301 break;
303 case UNEQ_EXPR:
304 do_compare_and_jump (op0, op1, UNEQ, UNEQ, if_false_label, if_true_label,
305 prob);
306 break;
308 case LTGT_EXPR:
309 do_compare_and_jump (op0, op1, LTGT, LTGT, if_false_label, if_true_label,
310 prob);
311 break;
313 case TRUTH_ANDIF_EXPR:
315 /* Spread the probability that the expression is false evenly between
316 the two conditions. So the first condition is false half the total
317 probability of being false. The second condition is false the other
318 half of the total probability of being false, so its jump has a false
319 probability of half the total, relative to the probability we
320 reached it (i.e. the first condition was true). */
321 int op0_prob = -1;
322 int op1_prob = -1;
323 if (prob != -1)
325 int false_prob = inv (prob);
326 int op0_false_prob = false_prob / 2;
327 int op1_false_prob = GCOV_COMPUTE_SCALE ((false_prob / 2),
328 inv (op0_false_prob));
329 /* Get the probability that each jump below is true. */
330 op0_prob = inv (op0_false_prob);
331 op1_prob = inv (op1_false_prob);
333 if (if_false_label == NULL_RTX)
335 drop_through_label = gen_label_rtx ();
336 do_jump (op0, drop_through_label, NULL_RTX, op0_prob);
337 do_jump (op1, NULL_RTX, if_true_label, op1_prob);
339 else
341 do_jump (op0, if_false_label, NULL_RTX, op0_prob);
342 do_jump (op1, if_false_label, if_true_label, op1_prob);
344 break;
347 case TRUTH_ORIF_EXPR:
349 /* Spread the probability evenly between the two conditions. So
350 the first condition has half the total probability of being true.
351 The second condition has the other half of the total probability,
352 so its jump has a probability of half the total, relative to
353 the probability we reached it (i.e. the first condition was false). */
354 int op0_prob = -1;
355 int op1_prob = -1;
356 if (prob != -1)
358 op0_prob = prob / 2;
359 op1_prob = GCOV_COMPUTE_SCALE ((prob / 2), inv (op0_prob));
361 if (if_true_label == NULL_RTX)
363 drop_through_label = gen_label_rtx ();
364 do_jump (op0, NULL_RTX, drop_through_label, op0_prob);
365 do_jump (op1, if_false_label, NULL_RTX, op1_prob);
367 else
369 do_jump (op0, NULL_RTX, if_true_label, op0_prob);
370 do_jump (op1, if_false_label, if_true_label, op1_prob);
372 break;
375 default:
376 gcc_unreachable ();
379 if (drop_through_label)
381 do_pending_stack_adjust ();
382 emit_label (drop_through_label);
386 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
387 the result is zero, or IF_TRUE_LABEL if the result is one.
388 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
389 meaning fall through in that case.
391 do_jump always does any pending stack adjust except when it does not
392 actually perform a jump. An example where there is no jump
393 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
395 PROB is probability of jump to if_true_label, or -1 if unknown. */
397 void
398 do_jump (tree exp, rtx if_false_label, rtx if_true_label, int prob)
400 enum tree_code code = TREE_CODE (exp);
401 rtx temp;
402 int i;
403 tree type;
404 enum machine_mode mode;
405 rtx drop_through_label = 0;
407 switch (code)
409 case ERROR_MARK:
410 break;
412 case INTEGER_CST:
413 temp = integer_zerop (exp) ? if_false_label : if_true_label;
414 if (temp)
415 emit_jump (temp);
416 break;
418 #if 0
419 /* This is not true with #pragma weak */
420 case ADDR_EXPR:
421 /* The address of something can never be zero. */
422 if (if_true_label)
423 emit_jump (if_true_label);
424 break;
425 #endif
427 case NOP_EXPR:
428 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
429 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
430 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
431 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
432 goto normal;
433 case CONVERT_EXPR:
434 /* If we are narrowing the operand, we have to do the compare in the
435 narrower mode. */
436 if ((TYPE_PRECISION (TREE_TYPE (exp))
437 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
438 goto normal;
439 case NON_LVALUE_EXPR:
440 case ABS_EXPR:
441 case NEGATE_EXPR:
442 case LROTATE_EXPR:
443 case RROTATE_EXPR:
444 /* These cannot change zero->nonzero or vice versa. */
445 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label, prob);
446 break;
448 case TRUTH_NOT_EXPR:
449 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label,
450 inv (prob));
451 break;
453 case COND_EXPR:
455 rtx label1 = gen_label_rtx ();
456 if (!if_true_label || !if_false_label)
458 drop_through_label = gen_label_rtx ();
459 if (!if_true_label)
460 if_true_label = drop_through_label;
461 if (!if_false_label)
462 if_false_label = drop_through_label;
465 do_pending_stack_adjust ();
466 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX, -1);
467 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob);
468 emit_label (label1);
469 do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label, prob);
470 break;
473 case COMPOUND_EXPR:
474 /* Lowered by gimplify.c. */
475 gcc_unreachable ();
477 case MINUS_EXPR:
478 /* Nonzero iff operands of minus differ. */
479 code = NE_EXPR;
481 /* FALLTHRU */
482 case EQ_EXPR:
483 case NE_EXPR:
484 case LT_EXPR:
485 case LE_EXPR:
486 case GT_EXPR:
487 case GE_EXPR:
488 case ORDERED_EXPR:
489 case UNORDERED_EXPR:
490 case UNLT_EXPR:
491 case UNLE_EXPR:
492 case UNGT_EXPR:
493 case UNGE_EXPR:
494 case UNEQ_EXPR:
495 case LTGT_EXPR:
496 case TRUTH_ANDIF_EXPR:
497 case TRUTH_ORIF_EXPR:
498 other_code:
499 do_jump_1 (code, TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
500 if_false_label, if_true_label, prob);
501 break;
503 case BIT_AND_EXPR:
504 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
505 See if the former is preferred for jump tests and restore it
506 if so. */
507 if (integer_onep (TREE_OPERAND (exp, 1)))
509 tree exp0 = TREE_OPERAND (exp, 0);
510 rtx set_label, clr_label;
511 int setclr_prob = prob;
513 /* Strip narrowing integral type conversions. */
514 while (CONVERT_EXPR_P (exp0)
515 && TREE_OPERAND (exp0, 0) != error_mark_node
516 && TYPE_PRECISION (TREE_TYPE (exp0))
517 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
518 exp0 = TREE_OPERAND (exp0, 0);
520 /* "exp0 ^ 1" inverts the sense of the single bit test. */
521 if (TREE_CODE (exp0) == BIT_XOR_EXPR
522 && integer_onep (TREE_OPERAND (exp0, 1)))
524 exp0 = TREE_OPERAND (exp0, 0);
525 clr_label = if_true_label;
526 set_label = if_false_label;
527 setclr_prob = inv (prob);
529 else
531 clr_label = if_false_label;
532 set_label = if_true_label;
535 if (TREE_CODE (exp0) == RSHIFT_EXPR)
537 tree arg = TREE_OPERAND (exp0, 0);
538 tree shift = TREE_OPERAND (exp0, 1);
539 tree argtype = TREE_TYPE (arg);
540 if (TREE_CODE (shift) == INTEGER_CST
541 && compare_tree_int (shift, 0) >= 0
542 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
543 && prefer_and_bit_test (TYPE_MODE (argtype),
544 TREE_INT_CST_LOW (shift)))
546 unsigned HOST_WIDE_INT mask
547 = (unsigned HOST_WIDE_INT) 1 << TREE_INT_CST_LOW (shift);
548 do_jump (build2 (BIT_AND_EXPR, argtype, arg,
549 build_int_cstu (argtype, mask)),
550 clr_label, set_label, setclr_prob);
551 break;
556 /* If we are AND'ing with a small constant, do this comparison in the
557 smallest type that fits. If the machine doesn't have comparisons
558 that small, it will be converted back to the wider comparison.
559 This helps if we are testing the sign bit of a narrower object.
560 combine can't do this for us because it can't know whether a
561 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
563 if (! SLOW_BYTE_ACCESS
564 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
565 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
566 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
567 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
568 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
569 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
570 && have_insn_for (COMPARE, TYPE_MODE (type)))
572 do_jump (fold_convert (type, exp), if_false_label, if_true_label,
573 prob);
574 break;
577 if (TYPE_PRECISION (TREE_TYPE (exp)) > 1
578 || TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
579 goto normal;
581 /* Boolean comparisons can be compiled as TRUTH_AND_EXPR. */
583 case TRUTH_AND_EXPR:
584 /* High branch cost, expand as the bitwise AND of the conditions.
585 Do the same if the RHS has side effects, because we're effectively
586 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */
587 if (BRANCH_COST (optimize_insn_for_speed_p (),
588 false) >= 4
589 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
590 goto normal;
591 code = TRUTH_ANDIF_EXPR;
592 goto other_code;
594 case BIT_IOR_EXPR:
595 case TRUTH_OR_EXPR:
596 /* High branch cost, expand as the bitwise OR of the conditions.
597 Do the same if the RHS has side effects, because we're effectively
598 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */
599 if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4
600 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
601 goto normal;
602 code = TRUTH_ORIF_EXPR;
603 goto other_code;
605 /* Fall through and generate the normal code. */
606 default:
607 normal:
608 temp = expand_normal (exp);
609 do_pending_stack_adjust ();
610 /* The RTL optimizers prefer comparisons against pseudos. */
611 if (GET_CODE (temp) == SUBREG)
613 /* Compare promoted variables in their promoted mode. */
614 if (SUBREG_PROMOTED_VAR_P (temp)
615 && REG_P (XEXP (temp, 0)))
616 temp = XEXP (temp, 0);
617 else
618 temp = copy_to_reg (temp);
620 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
621 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
622 GET_MODE (temp), NULL_RTX,
623 if_false_label, if_true_label, prob);
626 if (drop_through_label)
628 do_pending_stack_adjust ();
629 emit_label (drop_through_label);
633 /* Compare OP0 with OP1, word at a time, in mode MODE.
634 UNSIGNEDP says to do unsigned comparison.
635 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
637 static void
638 do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
639 rtx op1, rtx if_false_label, rtx if_true_label,
640 int prob)
642 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
643 rtx drop_through_label = 0;
644 bool drop_through_if_true = false, drop_through_if_false = false;
645 enum rtx_code code = GT;
646 int i;
648 if (! if_true_label || ! if_false_label)
649 drop_through_label = gen_label_rtx ();
650 if (! if_true_label)
652 if_true_label = drop_through_label;
653 drop_through_if_true = true;
655 if (! if_false_label)
657 if_false_label = drop_through_label;
658 drop_through_if_false = true;
661 /* Deal with the special case 0 > x: only one comparison is necessary and
662 we reverse it to avoid jumping to the drop-through label. */
663 if (op0 == const0_rtx && drop_through_if_true && !drop_through_if_false)
665 code = LE;
666 if_true_label = if_false_label;
667 if_false_label = drop_through_label;
668 drop_through_if_true = false;
669 drop_through_if_false = true;
672 /* Compare a word at a time, high order first. */
673 for (i = 0; i < nwords; i++)
675 rtx op0_word, op1_word;
677 if (WORDS_BIG_ENDIAN)
679 op0_word = operand_subword_force (op0, i, mode);
680 op1_word = operand_subword_force (op1, i, mode);
682 else
684 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
685 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
688 /* All but high-order word must be compared as unsigned. */
689 do_compare_rtx_and_jump (op0_word, op1_word, code, (unsignedp || i > 0),
690 word_mode, NULL_RTX, NULL_RTX, if_true_label,
691 prob);
693 /* Emit only one comparison for 0. Do not emit the last cond jump. */
694 if (op0 == const0_rtx || i == nwords - 1)
695 break;
697 /* Consider lower words only if these are equal. */
698 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
699 NULL_RTX, NULL_RTX, if_false_label, inv (prob));
702 if (!drop_through_if_false)
703 emit_jump (if_false_label);
704 if (drop_through_label)
705 emit_label (drop_through_label);
708 /* Given a comparison expression EXP for values too wide to be compared
709 with one insn, test the comparison and jump to the appropriate label.
710 The code of EXP is ignored; we always test GT if SWAP is 0,
711 and LT if SWAP is 1. */
713 static void
714 do_jump_by_parts_greater (tree treeop0, tree treeop1, int swap,
715 rtx if_false_label, rtx if_true_label, int prob)
717 rtx op0 = expand_normal (swap ? treeop1 : treeop0);
718 rtx op1 = expand_normal (swap ? treeop0 : treeop1);
719 enum machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
720 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (treeop0));
722 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
723 if_true_label, prob);
726 /* Jump according to whether OP0 is 0. We assume that OP0 has an integer
727 mode, MODE, that is too wide for the available compare insns. Either
728 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
729 to indicate drop through. */
731 static void
732 do_jump_by_parts_zero_rtx (enum machine_mode mode, rtx op0,
733 rtx if_false_label, rtx if_true_label, int prob)
735 int nwords = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
736 rtx part;
737 int i;
738 rtx drop_through_label = 0;
740 /* The fastest way of doing this comparison on almost any machine is to
741 "or" all the words and compare the result. If all have to be loaded
742 from memory and this is a very wide item, it's possible this may
743 be slower, but that's highly unlikely. */
745 part = gen_reg_rtx (word_mode);
746 emit_move_insn (part, operand_subword_force (op0, 0, mode));
747 for (i = 1; i < nwords && part != 0; i++)
748 part = expand_binop (word_mode, ior_optab, part,
749 operand_subword_force (op0, i, mode),
750 part, 1, OPTAB_WIDEN);
752 if (part != 0)
754 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
755 NULL_RTX, if_false_label, if_true_label, prob);
756 return;
759 /* If we couldn't do the "or" simply, do this with a series of compares. */
760 if (! if_false_label)
761 drop_through_label = if_false_label = gen_label_rtx ();
763 for (i = 0; i < nwords; i++)
764 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
765 const0_rtx, EQ, 1, word_mode, NULL_RTX,
766 if_false_label, NULL_RTX, prob);
768 if (if_true_label)
769 emit_jump (if_true_label);
771 if (drop_through_label)
772 emit_label (drop_through_label);
775 /* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
776 where MODE is an integer mode too wide to be compared with one insn.
777 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
778 to indicate drop through. */
780 static void
781 do_jump_by_parts_equality_rtx (enum machine_mode mode, rtx op0, rtx op1,
782 rtx if_false_label, rtx if_true_label, int prob)
784 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
785 rtx drop_through_label = 0;
786 int i;
788 if (op1 == const0_rtx)
790 do_jump_by_parts_zero_rtx (mode, op0, if_false_label, if_true_label,
791 prob);
792 return;
794 else if (op0 == const0_rtx)
796 do_jump_by_parts_zero_rtx (mode, op1, if_false_label, if_true_label,
797 prob);
798 return;
801 if (! if_false_label)
802 drop_through_label = if_false_label = gen_label_rtx ();
804 for (i = 0; i < nwords; i++)
805 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
806 operand_subword_force (op1, i, mode),
807 EQ, 0, word_mode, NULL_RTX,
808 if_false_label, NULL_RTX, prob);
810 if (if_true_label)
811 emit_jump (if_true_label);
812 if (drop_through_label)
813 emit_label (drop_through_label);
816 /* Given an EQ_EXPR expression EXP for values too wide to be compared
817 with one insn, test the comparison and jump to the appropriate label. */
819 static void
820 do_jump_by_parts_equality (tree treeop0, tree treeop1, rtx if_false_label,
821 rtx if_true_label, int prob)
823 rtx op0 = expand_normal (treeop0);
824 rtx op1 = expand_normal (treeop1);
825 enum machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
826 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
827 if_true_label, prob);
830 /* Split a comparison into two others, the second of which has the other
831 "orderedness". The first is always ORDERED or UNORDERED if MODE
832 does not honor NaNs (which means that it can be skipped in that case;
833 see do_compare_rtx_and_jump).
835 The two conditions are written in *CODE1 and *CODE2. Return true if
836 the conditions must be ANDed, false if they must be ORed. */
838 bool
839 split_comparison (enum rtx_code code, enum machine_mode mode,
840 enum rtx_code *code1, enum rtx_code *code2)
842 switch (code)
844 case LT:
845 *code1 = ORDERED;
846 *code2 = UNLT;
847 return true;
848 case LE:
849 *code1 = ORDERED;
850 *code2 = UNLE;
851 return true;
852 case GT:
853 *code1 = ORDERED;
854 *code2 = UNGT;
855 return true;
856 case GE:
857 *code1 = ORDERED;
858 *code2 = UNGE;
859 return true;
860 case EQ:
861 *code1 = ORDERED;
862 *code2 = UNEQ;
863 return true;
864 case NE:
865 *code1 = UNORDERED;
866 *code2 = LTGT;
867 return false;
868 case UNLT:
869 *code1 = UNORDERED;
870 *code2 = LT;
871 return false;
872 case UNLE:
873 *code1 = UNORDERED;
874 *code2 = LE;
875 return false;
876 case UNGT:
877 *code1 = UNORDERED;
878 *code2 = GT;
879 return false;
880 case UNGE:
881 *code1 = UNORDERED;
882 *code2 = GE;
883 return false;
884 case UNEQ:
885 *code1 = UNORDERED;
886 *code2 = EQ;
887 return false;
888 case LTGT:
889 /* Do not turn a trapping comparison into a non-trapping one. */
890 if (HONOR_SNANS (mode))
892 *code1 = LT;
893 *code2 = GT;
894 return false;
896 else
898 *code1 = ORDERED;
899 *code2 = NE;
900 return true;
902 default:
903 gcc_unreachable ();
908 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
909 The decision as to signed or unsigned comparison must be made by the caller.
911 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
912 compared. */
914 void
915 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
916 enum machine_mode mode, rtx size, rtx if_false_label,
917 rtx if_true_label, int prob)
919 rtx tem;
920 rtx dummy_label = NULL_RTX;
922 /* Reverse the comparison if that is safe and we want to jump if it is
923 false. Also convert to the reverse comparison if the target can
924 implement it. */
925 if ((! if_true_label
926 || ! can_compare_p (code, mode, ccp_jump))
927 && (! FLOAT_MODE_P (mode)
928 || code == ORDERED || code == UNORDERED
929 || (! HONOR_NANS (mode) && (code == LTGT || code == UNEQ))
930 || (! HONOR_SNANS (mode) && (code == EQ || code == NE))))
932 enum rtx_code rcode;
933 if (FLOAT_MODE_P (mode))
934 rcode = reverse_condition_maybe_unordered (code);
935 else
936 rcode = reverse_condition (code);
938 /* Canonicalize to UNORDERED for the libcall. */
939 if (can_compare_p (rcode, mode, ccp_jump)
940 || (code == ORDERED && ! can_compare_p (ORDERED, mode, ccp_jump)))
942 tem = if_true_label;
943 if_true_label = if_false_label;
944 if_false_label = tem;
945 code = rcode;
946 prob = inv (prob);
950 /* If one operand is constant, make it the second one. Only do this
951 if the other operand is not constant as well. */
953 if (swap_commutative_operands_p (op0, op1))
955 tem = op0;
956 op0 = op1;
957 op1 = tem;
958 code = swap_condition (code);
961 do_pending_stack_adjust ();
963 code = unsignedp ? unsigned_condition (code) : code;
964 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
965 op0, op1)))
967 if (CONSTANT_P (tem))
969 rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
970 ? if_false_label : if_true_label;
971 if (label)
972 emit_jump (label);
973 return;
976 code = GET_CODE (tem);
977 mode = GET_MODE (tem);
978 op0 = XEXP (tem, 0);
979 op1 = XEXP (tem, 1);
980 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
983 if (! if_true_label)
984 dummy_label = if_true_label = gen_label_rtx ();
986 if (GET_MODE_CLASS (mode) == MODE_INT
987 && ! can_compare_p (code, mode, ccp_jump))
989 switch (code)
991 case LTU:
992 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
993 if_false_label, if_true_label, prob);
994 break;
996 case LEU:
997 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
998 if_true_label, if_false_label,
999 inv (prob));
1000 break;
1002 case GTU:
1003 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
1004 if_false_label, if_true_label, prob);
1005 break;
1007 case GEU:
1008 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
1009 if_true_label, if_false_label,
1010 inv (prob));
1011 break;
1013 case LT:
1014 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
1015 if_false_label, if_true_label, prob);
1016 break;
1018 case LE:
1019 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
1020 if_true_label, if_false_label,
1021 inv (prob));
1022 break;
1024 case GT:
1025 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
1026 if_false_label, if_true_label, prob);
1027 break;
1029 case GE:
1030 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
1031 if_true_label, if_false_label,
1032 inv (prob));
1033 break;
1035 case EQ:
1036 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
1037 if_true_label, prob);
1038 break;
1040 case NE:
1041 do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label,
1042 if_false_label, inv (prob));
1043 break;
1045 default:
1046 gcc_unreachable ();
1049 else
1051 if (SCALAR_FLOAT_MODE_P (mode)
1052 && ! can_compare_p (code, mode, ccp_jump)
1053 && can_compare_p (swap_condition (code), mode, ccp_jump))
1055 rtx tmp;
1056 code = swap_condition (code);
1057 tmp = op0;
1058 op0 = op1;
1059 op1 = tmp;
1061 else if (SCALAR_FLOAT_MODE_P (mode)
1062 && ! can_compare_p (code, mode, ccp_jump)
1063 /* Never split ORDERED and UNORDERED.
1064 These must be implemented. */
1065 && (code != ORDERED && code != UNORDERED)
1066 /* Split a floating-point comparison if
1067 we can jump on other conditions... */
1068 && (have_insn_for (COMPARE, mode)
1069 /* ... or if there is no libcall for it. */
1070 || code_to_optab (code) == unknown_optab))
1072 enum rtx_code first_code;
1073 bool and_them = split_comparison (code, mode, &first_code, &code);
1075 /* If there are no NaNs, the first comparison should always fall
1076 through. */
1077 if (!HONOR_NANS (mode))
1078 gcc_assert (first_code == (and_them ? ORDERED : UNORDERED));
1080 else
1082 if (and_them)
1084 rtx dest_label;
1085 /* If we only jump if true, just bypass the second jump. */
1086 if (! if_false_label)
1088 if (! dummy_label)
1089 dummy_label = gen_label_rtx ();
1090 dest_label = dummy_label;
1092 else
1093 dest_label = if_false_label;
1094 do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
1095 size, dest_label, NULL_RTX, prob);
1097 else
1098 do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
1099 size, NULL_RTX, if_true_label, prob);
1103 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
1104 if_true_label, prob);
1107 if (if_false_label)
1108 emit_jump (if_false_label);
1109 if (dummy_label)
1110 emit_label (dummy_label);
1113 /* Generate code for a comparison expression EXP (including code to compute
1114 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
1115 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
1116 generated code will drop through.
1117 SIGNED_CODE should be the rtx operation for this comparison for
1118 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
1120 We force a stack adjustment unless there are currently
1121 things pushed on the stack that aren't yet used. */
1123 static void
1124 do_compare_and_jump (tree treeop0, tree treeop1, enum rtx_code signed_code,
1125 enum rtx_code unsigned_code, rtx if_false_label,
1126 rtx if_true_label, int prob)
1128 rtx op0, op1;
1129 tree type;
1130 enum machine_mode mode;
1131 int unsignedp;
1132 enum rtx_code code;
1134 /* Don't crash if the comparison was erroneous. */
1135 op0 = expand_normal (treeop0);
1136 if (TREE_CODE (treeop0) == ERROR_MARK)
1137 return;
1139 op1 = expand_normal (treeop1);
1140 if (TREE_CODE (treeop1) == ERROR_MARK)
1141 return;
1143 type = TREE_TYPE (treeop0);
1144 mode = TYPE_MODE (type);
1145 if (TREE_CODE (treeop0) == INTEGER_CST
1146 && (TREE_CODE (treeop1) != INTEGER_CST
1147 || (GET_MODE_BITSIZE (mode)
1148 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (treeop1))))))
1150 /* op0 might have been replaced by promoted constant, in which
1151 case the type of second argument should be used. */
1152 type = TREE_TYPE (treeop1);
1153 mode = TYPE_MODE (type);
1155 unsignedp = TYPE_UNSIGNED (type);
1156 code = unsignedp ? unsigned_code : signed_code;
1158 #ifdef HAVE_canonicalize_funcptr_for_compare
1159 /* If function pointers need to be "canonicalized" before they can
1160 be reliably compared, then canonicalize them.
1161 Only do this if *both* sides of the comparison are function pointers.
1162 If one side isn't, we want a noncanonicalized comparison. See PR
1163 middle-end/17564. */
1164 if (HAVE_canonicalize_funcptr_for_compare
1165 && TREE_CODE (TREE_TYPE (treeop0)) == POINTER_TYPE
1166 && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop0)))
1167 == FUNCTION_TYPE
1168 && TREE_CODE (TREE_TYPE (treeop1)) == POINTER_TYPE
1169 && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop1)))
1170 == FUNCTION_TYPE)
1172 rtx new_op0 = gen_reg_rtx (mode);
1173 rtx new_op1 = gen_reg_rtx (mode);
1175 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
1176 op0 = new_op0;
1178 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
1179 op1 = new_op1;
1181 #endif
1183 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
1184 ((mode == BLKmode)
1185 ? expr_size (treeop0) : NULL_RTX),
1186 if_false_label, if_true_label, prob);
1189 #include "gt-dojump.h"