* gcc.dg/store-motion-fgcse-sm.c (dg-final): Cleanup
[official-gcc.git] / gcc / dojump.c
blob40b1bf412ea5117d5600b6e977febc0802b2fe56
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "stor-layout.h"
27 #include "flags.h"
28 #include "hashtab.h"
29 #include "hash-set.h"
30 #include "vec.h"
31 #include "machmode.h"
32 #include "hard-reg-set.h"
33 #include "input.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "insn-codes.h"
40 #include "optabs.h"
41 #include "langhooks.h"
42 #include "ggc.h"
43 #include "predict.h"
44 #include "basic-block.h"
45 #include "tm_p.h"
47 static bool prefer_and_bit_test (machine_mode, int);
48 static void do_jump_by_parts_greater (tree, tree, int, rtx, rtx, int);
49 static void do_jump_by_parts_equality (tree, tree, rtx, rtx, int);
50 static void do_compare_and_jump (tree, tree, enum rtx_code, enum rtx_code, rtx,
51 rtx, int);
53 /* Invert probability if there is any. -1 stands for unknown. */
55 static inline int
56 inv (int prob)
58 return prob == -1 ? -1 : REG_BR_PROB_BASE - prob;
61 /* At the start of a function, record that we have no previously-pushed
62 arguments waiting to be popped. */
64 void
65 init_pending_stack_adjust (void)
67 pending_stack_adjust = 0;
70 /* Discard any pending stack adjustment. This avoid relying on the
71 RTL optimizers to remove useless adjustments when we know the
72 stack pointer value is dead. */
73 void
74 discard_pending_stack_adjust (void)
76 stack_pointer_delta -= pending_stack_adjust;
77 pending_stack_adjust = 0;
80 /* When exiting from function, if safe, clear out any pending stack adjust
81 so the adjustment won't get done.
83 Note, if the current function calls alloca, then it must have a
84 frame pointer regardless of the value of flag_omit_frame_pointer. */
86 void
87 clear_pending_stack_adjust (void)
89 if (optimize > 0
90 && (! flag_omit_frame_pointer || cfun->calls_alloca)
91 && EXIT_IGNORE_STACK)
92 discard_pending_stack_adjust ();
95 /* Pop any previously-pushed arguments that have not been popped yet. */
97 void
98 do_pending_stack_adjust (void)
100 if (inhibit_defer_pop == 0)
102 if (pending_stack_adjust != 0)
103 adjust_stack (GEN_INT (pending_stack_adjust));
104 pending_stack_adjust = 0;
108 /* Remember pending_stack_adjust/stack_pointer_delta.
109 To be used around code that may call do_pending_stack_adjust (),
110 but the generated code could be discarded e.g. using delete_insns_since. */
112 void
113 save_pending_stack_adjust (saved_pending_stack_adjust *save)
115 save->x_pending_stack_adjust = pending_stack_adjust;
116 save->x_stack_pointer_delta = stack_pointer_delta;
119 /* Restore the saved pending_stack_adjust/stack_pointer_delta. */
121 void
122 restore_pending_stack_adjust (saved_pending_stack_adjust *save)
124 if (inhibit_defer_pop == 0)
126 pending_stack_adjust = save->x_pending_stack_adjust;
127 stack_pointer_delta = save->x_stack_pointer_delta;
131 /* Expand conditional expressions. */
133 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
134 LABEL is an rtx of code CODE_LABEL, in this function and all the
135 functions here. */
137 void
138 jumpifnot (tree exp, rtx label, int prob)
140 do_jump (exp, label, NULL_RTX, inv (prob));
143 void
144 jumpifnot_1 (enum tree_code code, tree op0, tree op1, rtx label, int prob)
146 do_jump_1 (code, op0, op1, label, NULL_RTX, inv (prob));
149 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
151 void
152 jumpif (tree exp, rtx label, int prob)
154 do_jump (exp, NULL_RTX, label, prob);
157 void
158 jumpif_1 (enum tree_code code, tree op0, tree op1, rtx label, int prob)
160 do_jump_1 (code, op0, op1, NULL_RTX, label, prob);
163 /* Used internally by prefer_and_bit_test. */
165 static GTY(()) rtx and_reg;
166 static GTY(()) rtx and_test;
167 static GTY(()) rtx shift_test;
169 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
170 where X is an arbitrary register of mode MODE. Return true if the former
171 is preferred. */
173 static bool
174 prefer_and_bit_test (machine_mode mode, int bitnum)
176 bool speed_p;
177 wide_int mask = wi::set_bit_in_zero (bitnum, GET_MODE_PRECISION (mode));
179 if (and_test == 0)
181 /* Set up rtxes for the two variations. Use NULL as a placeholder
182 for the BITNUM-based constants. */
183 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
184 and_test = gen_rtx_AND (mode, and_reg, NULL);
185 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
186 const1_rtx);
188 else
190 /* Change the mode of the previously-created rtxes. */
191 PUT_MODE (and_reg, mode);
192 PUT_MODE (and_test, mode);
193 PUT_MODE (shift_test, mode);
194 PUT_MODE (XEXP (shift_test, 0), mode);
197 /* Fill in the integers. */
198 XEXP (and_test, 1) = immed_wide_int_const (mask, mode);
199 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
201 speed_p = optimize_insn_for_speed_p ();
202 return (rtx_cost (and_test, IF_THEN_ELSE, 0, speed_p)
203 <= rtx_cost (shift_test, IF_THEN_ELSE, 0, speed_p));
206 /* Subroutine of do_jump, dealing with exploded comparisons of the type
207 OP0 CODE OP1 . IF_FALSE_LABEL and IF_TRUE_LABEL like in do_jump.
208 PROB is probability of jump to if_true_label, or -1 if unknown. */
210 void
211 do_jump_1 (enum tree_code code, tree op0, tree op1,
212 rtx if_false_label, rtx if_true_label, int prob)
214 machine_mode mode;
215 rtx_code_label *drop_through_label = 0;
217 switch (code)
219 case EQ_EXPR:
221 tree inner_type = TREE_TYPE (op0);
223 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
224 != MODE_COMPLEX_FLOAT);
225 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
226 != MODE_COMPLEX_INT);
228 if (integer_zerop (op1))
229 do_jump (op0, if_true_label, if_false_label, inv (prob));
230 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
231 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
232 do_jump_by_parts_equality (op0, op1, if_false_label, if_true_label,
233 prob);
234 else
235 do_compare_and_jump (op0, op1, EQ, EQ, if_false_label, if_true_label,
236 prob);
237 break;
240 case NE_EXPR:
242 tree inner_type = TREE_TYPE (op0);
244 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
245 != MODE_COMPLEX_FLOAT);
246 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
247 != MODE_COMPLEX_INT);
249 if (integer_zerop (op1))
250 do_jump (op0, if_false_label, if_true_label, prob);
251 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
252 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
253 do_jump_by_parts_equality (op0, op1, if_true_label, if_false_label,
254 inv (prob));
255 else
256 do_compare_and_jump (op0, op1, NE, NE, if_false_label, if_true_label,
257 prob);
258 break;
261 case LT_EXPR:
262 mode = TYPE_MODE (TREE_TYPE (op0));
263 if (GET_MODE_CLASS (mode) == MODE_INT
264 && ! can_compare_p (LT, mode, ccp_jump))
265 do_jump_by_parts_greater (op0, op1, 1, if_false_label, if_true_label,
266 prob);
267 else
268 do_compare_and_jump (op0, op1, LT, LTU, if_false_label, if_true_label,
269 prob);
270 break;
272 case LE_EXPR:
273 mode = TYPE_MODE (TREE_TYPE (op0));
274 if (GET_MODE_CLASS (mode) == MODE_INT
275 && ! can_compare_p (LE, mode, ccp_jump))
276 do_jump_by_parts_greater (op0, op1, 0, if_true_label, if_false_label,
277 inv (prob));
278 else
279 do_compare_and_jump (op0, op1, LE, LEU, if_false_label, if_true_label,
280 prob);
281 break;
283 case GT_EXPR:
284 mode = TYPE_MODE (TREE_TYPE (op0));
285 if (GET_MODE_CLASS (mode) == MODE_INT
286 && ! can_compare_p (GT, mode, ccp_jump))
287 do_jump_by_parts_greater (op0, op1, 0, if_false_label, if_true_label,
288 prob);
289 else
290 do_compare_and_jump (op0, op1, GT, GTU, if_false_label, if_true_label,
291 prob);
292 break;
294 case GE_EXPR:
295 mode = TYPE_MODE (TREE_TYPE (op0));
296 if (GET_MODE_CLASS (mode) == MODE_INT
297 && ! can_compare_p (GE, mode, ccp_jump))
298 do_jump_by_parts_greater (op0, op1, 1, if_true_label, if_false_label,
299 inv (prob));
300 else
301 do_compare_and_jump (op0, op1, GE, GEU, if_false_label, if_true_label,
302 prob);
303 break;
305 case ORDERED_EXPR:
306 do_compare_and_jump (op0, op1, ORDERED, ORDERED,
307 if_false_label, if_true_label, prob);
308 break;
310 case UNORDERED_EXPR:
311 do_compare_and_jump (op0, op1, UNORDERED, UNORDERED,
312 if_false_label, if_true_label, prob);
313 break;
315 case UNLT_EXPR:
316 do_compare_and_jump (op0, op1, UNLT, UNLT, if_false_label, if_true_label,
317 prob);
318 break;
320 case UNLE_EXPR:
321 do_compare_and_jump (op0, op1, UNLE, UNLE, if_false_label, if_true_label,
322 prob);
323 break;
325 case UNGT_EXPR:
326 do_compare_and_jump (op0, op1, UNGT, UNGT, if_false_label, if_true_label,
327 prob);
328 break;
330 case UNGE_EXPR:
331 do_compare_and_jump (op0, op1, UNGE, UNGE, if_false_label, if_true_label,
332 prob);
333 break;
335 case UNEQ_EXPR:
336 do_compare_and_jump (op0, op1, UNEQ, UNEQ, if_false_label, if_true_label,
337 prob);
338 break;
340 case LTGT_EXPR:
341 do_compare_and_jump (op0, op1, LTGT, LTGT, if_false_label, if_true_label,
342 prob);
343 break;
345 case TRUTH_ANDIF_EXPR:
347 /* Spread the probability that the expression is false evenly between
348 the two conditions. So the first condition is false half the total
349 probability of being false. The second condition is false the other
350 half of the total probability of being false, so its jump has a false
351 probability of half the total, relative to the probability we
352 reached it (i.e. the first condition was true). */
353 int op0_prob = -1;
354 int op1_prob = -1;
355 if (prob != -1)
357 int false_prob = inv (prob);
358 int op0_false_prob = false_prob / 2;
359 int op1_false_prob = GCOV_COMPUTE_SCALE ((false_prob / 2),
360 inv (op0_false_prob));
361 /* Get the probability that each jump below is true. */
362 op0_prob = inv (op0_false_prob);
363 op1_prob = inv (op1_false_prob);
365 if (if_false_label == NULL_RTX)
367 drop_through_label = gen_label_rtx ();
368 do_jump (op0, drop_through_label, NULL_RTX, op0_prob);
369 do_jump (op1, NULL_RTX, if_true_label, op1_prob);
371 else
373 do_jump (op0, if_false_label, NULL_RTX, op0_prob);
374 do_jump (op1, if_false_label, if_true_label, op1_prob);
376 break;
379 case TRUTH_ORIF_EXPR:
381 /* Spread the probability evenly between the two conditions. So
382 the first condition has half the total probability of being true.
383 The second condition has the other half of the total probability,
384 so its jump has a probability of half the total, relative to
385 the probability we reached it (i.e. the first condition was false). */
386 int op0_prob = -1;
387 int op1_prob = -1;
388 if (prob != -1)
390 op0_prob = prob / 2;
391 op1_prob = GCOV_COMPUTE_SCALE ((prob / 2), inv (op0_prob));
393 if (if_true_label == NULL_RTX)
395 drop_through_label = gen_label_rtx ();
396 do_jump (op0, NULL_RTX, drop_through_label, op0_prob);
397 do_jump (op1, if_false_label, NULL_RTX, op1_prob);
399 else
401 do_jump (op0, NULL_RTX, if_true_label, op0_prob);
402 do_jump (op1, if_false_label, if_true_label, op1_prob);
404 break;
407 default:
408 gcc_unreachable ();
411 if (drop_through_label)
413 do_pending_stack_adjust ();
414 emit_label (drop_through_label);
418 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
419 the result is zero, or IF_TRUE_LABEL if the result is one.
420 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
421 meaning fall through in that case.
423 do_jump always does any pending stack adjust except when it does not
424 actually perform a jump. An example where there is no jump
425 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
427 PROB is probability of jump to if_true_label, or -1 if unknown. */
429 void
430 do_jump (tree exp, rtx if_false_label, rtx if_true_label, int prob)
432 enum tree_code code = TREE_CODE (exp);
433 rtx temp;
434 int i;
435 tree type;
436 machine_mode mode;
437 rtx_code_label *drop_through_label = 0;
439 switch (code)
441 case ERROR_MARK:
442 break;
444 case INTEGER_CST:
445 temp = integer_zerop (exp) ? if_false_label : if_true_label;
446 if (temp)
447 emit_jump (temp);
448 break;
450 #if 0
451 /* This is not true with #pragma weak */
452 case ADDR_EXPR:
453 /* The address of something can never be zero. */
454 if (if_true_label)
455 emit_jump (if_true_label);
456 break;
457 #endif
459 case NOP_EXPR:
460 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
461 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
462 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
463 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
464 goto normal;
465 case CONVERT_EXPR:
466 /* If we are narrowing the operand, we have to do the compare in the
467 narrower mode. */
468 if ((TYPE_PRECISION (TREE_TYPE (exp))
469 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
470 goto normal;
471 case NON_LVALUE_EXPR:
472 case ABS_EXPR:
473 case NEGATE_EXPR:
474 case LROTATE_EXPR:
475 case RROTATE_EXPR:
476 /* These cannot change zero->nonzero or vice versa. */
477 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label, prob);
478 break;
480 case TRUTH_NOT_EXPR:
481 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label,
482 inv (prob));
483 break;
485 case COND_EXPR:
487 rtx_code_label *label1 = gen_label_rtx ();
488 if (!if_true_label || !if_false_label)
490 drop_through_label = gen_label_rtx ();
491 if (!if_true_label)
492 if_true_label = drop_through_label;
493 if (!if_false_label)
494 if_false_label = drop_through_label;
497 do_pending_stack_adjust ();
498 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX, -1);
499 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob);
500 emit_label (label1);
501 do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label, prob);
502 break;
505 case COMPOUND_EXPR:
506 /* Lowered by gimplify.c. */
507 gcc_unreachable ();
509 case MINUS_EXPR:
510 /* Nonzero iff operands of minus differ. */
511 code = NE_EXPR;
513 /* FALLTHRU */
514 case EQ_EXPR:
515 case NE_EXPR:
516 case LT_EXPR:
517 case LE_EXPR:
518 case GT_EXPR:
519 case GE_EXPR:
520 case ORDERED_EXPR:
521 case UNORDERED_EXPR:
522 case UNLT_EXPR:
523 case UNLE_EXPR:
524 case UNGT_EXPR:
525 case UNGE_EXPR:
526 case UNEQ_EXPR:
527 case LTGT_EXPR:
528 case TRUTH_ANDIF_EXPR:
529 case TRUTH_ORIF_EXPR:
530 other_code:
531 do_jump_1 (code, TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
532 if_false_label, if_true_label, prob);
533 break;
535 case BIT_AND_EXPR:
536 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
537 See if the former is preferred for jump tests and restore it
538 if so. */
539 if (integer_onep (TREE_OPERAND (exp, 1)))
541 tree exp0 = TREE_OPERAND (exp, 0);
542 rtx set_label, clr_label;
543 int setclr_prob = prob;
545 /* Strip narrowing integral type conversions. */
546 while (CONVERT_EXPR_P (exp0)
547 && TREE_OPERAND (exp0, 0) != error_mark_node
548 && TYPE_PRECISION (TREE_TYPE (exp0))
549 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
550 exp0 = TREE_OPERAND (exp0, 0);
552 /* "exp0 ^ 1" inverts the sense of the single bit test. */
553 if (TREE_CODE (exp0) == BIT_XOR_EXPR
554 && integer_onep (TREE_OPERAND (exp0, 1)))
556 exp0 = TREE_OPERAND (exp0, 0);
557 clr_label = if_true_label;
558 set_label = if_false_label;
559 setclr_prob = inv (prob);
561 else
563 clr_label = if_false_label;
564 set_label = if_true_label;
567 if (TREE_CODE (exp0) == RSHIFT_EXPR)
569 tree arg = TREE_OPERAND (exp0, 0);
570 tree shift = TREE_OPERAND (exp0, 1);
571 tree argtype = TREE_TYPE (arg);
572 if (TREE_CODE (shift) == INTEGER_CST
573 && compare_tree_int (shift, 0) >= 0
574 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
575 && prefer_and_bit_test (TYPE_MODE (argtype),
576 TREE_INT_CST_LOW (shift)))
578 unsigned HOST_WIDE_INT mask
579 = (unsigned HOST_WIDE_INT) 1 << TREE_INT_CST_LOW (shift);
580 do_jump (build2 (BIT_AND_EXPR, argtype, arg,
581 build_int_cstu (argtype, mask)),
582 clr_label, set_label, setclr_prob);
583 break;
588 /* If we are AND'ing with a small constant, do this comparison in the
589 smallest type that fits. If the machine doesn't have comparisons
590 that small, it will be converted back to the wider comparison.
591 This helps if we are testing the sign bit of a narrower object.
592 combine can't do this for us because it can't know whether a
593 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
595 if (! SLOW_BYTE_ACCESS
596 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
597 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
598 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
599 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
600 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
601 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
602 && have_insn_for (COMPARE, TYPE_MODE (type)))
604 do_jump (fold_convert (type, exp), if_false_label, if_true_label,
605 prob);
606 break;
609 if (TYPE_PRECISION (TREE_TYPE (exp)) > 1
610 || TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
611 goto normal;
613 /* Boolean comparisons can be compiled as TRUTH_AND_EXPR. */
615 case TRUTH_AND_EXPR:
616 /* High branch cost, expand as the bitwise AND of the conditions.
617 Do the same if the RHS has side effects, because we're effectively
618 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */
619 if (BRANCH_COST (optimize_insn_for_speed_p (),
620 false) >= 4
621 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
622 goto normal;
623 code = TRUTH_ANDIF_EXPR;
624 goto other_code;
626 case BIT_IOR_EXPR:
627 case TRUTH_OR_EXPR:
628 /* High branch cost, expand as the bitwise OR of the conditions.
629 Do the same if the RHS has side effects, because we're effectively
630 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */
631 if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4
632 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
633 goto normal;
634 code = TRUTH_ORIF_EXPR;
635 goto other_code;
637 /* Fall through and generate the normal code. */
638 default:
639 normal:
640 temp = expand_normal (exp);
641 do_pending_stack_adjust ();
642 /* The RTL optimizers prefer comparisons against pseudos. */
643 if (GET_CODE (temp) == SUBREG)
645 /* Compare promoted variables in their promoted mode. */
646 if (SUBREG_PROMOTED_VAR_P (temp)
647 && REG_P (XEXP (temp, 0)))
648 temp = XEXP (temp, 0);
649 else
650 temp = copy_to_reg (temp);
652 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
653 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
654 GET_MODE (temp), NULL_RTX,
655 if_false_label, if_true_label, prob);
658 if (drop_through_label)
660 do_pending_stack_adjust ();
661 emit_label (drop_through_label);
665 /* Compare OP0 with OP1, word at a time, in mode MODE.
666 UNSIGNEDP says to do unsigned comparison.
667 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
669 static void
670 do_jump_by_parts_greater_rtx (machine_mode mode, int unsignedp, rtx op0,
671 rtx op1, rtx if_false_label, rtx if_true_label,
672 int prob)
674 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
675 rtx drop_through_label = 0;
676 bool drop_through_if_true = false, drop_through_if_false = false;
677 enum rtx_code code = GT;
678 int i;
680 if (! if_true_label || ! if_false_label)
681 drop_through_label = gen_label_rtx ();
682 if (! if_true_label)
684 if_true_label = drop_through_label;
685 drop_through_if_true = true;
687 if (! if_false_label)
689 if_false_label = drop_through_label;
690 drop_through_if_false = true;
693 /* Deal with the special case 0 > x: only one comparison is necessary and
694 we reverse it to avoid jumping to the drop-through label. */
695 if (op0 == const0_rtx && drop_through_if_true && !drop_through_if_false)
697 code = LE;
698 if_true_label = if_false_label;
699 if_false_label = drop_through_label;
700 drop_through_if_true = false;
701 drop_through_if_false = true;
704 /* Compare a word at a time, high order first. */
705 for (i = 0; i < nwords; i++)
707 rtx op0_word, op1_word;
709 if (WORDS_BIG_ENDIAN)
711 op0_word = operand_subword_force (op0, i, mode);
712 op1_word = operand_subword_force (op1, i, mode);
714 else
716 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
717 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
720 /* All but high-order word must be compared as unsigned. */
721 do_compare_rtx_and_jump (op0_word, op1_word, code, (unsignedp || i > 0),
722 word_mode, NULL_RTX, NULL_RTX, if_true_label,
723 prob);
725 /* Emit only one comparison for 0. Do not emit the last cond jump. */
726 if (op0 == const0_rtx || i == nwords - 1)
727 break;
729 /* Consider lower words only if these are equal. */
730 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
731 NULL_RTX, NULL_RTX, if_false_label, inv (prob));
734 if (!drop_through_if_false)
735 emit_jump (if_false_label);
736 if (drop_through_label)
737 emit_label (drop_through_label);
740 /* Given a comparison expression EXP for values too wide to be compared
741 with one insn, test the comparison and jump to the appropriate label.
742 The code of EXP is ignored; we always test GT if SWAP is 0,
743 and LT if SWAP is 1. */
745 static void
746 do_jump_by_parts_greater (tree treeop0, tree treeop1, int swap,
747 rtx if_false_label, rtx if_true_label, int prob)
749 rtx op0 = expand_normal (swap ? treeop1 : treeop0);
750 rtx op1 = expand_normal (swap ? treeop0 : treeop1);
751 machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
752 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (treeop0));
754 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
755 if_true_label, prob);
758 /* Jump according to whether OP0 is 0. We assume that OP0 has an integer
759 mode, MODE, that is too wide for the available compare insns. Either
760 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
761 to indicate drop through. */
763 static void
764 do_jump_by_parts_zero_rtx (machine_mode mode, rtx op0,
765 rtx if_false_label, rtx if_true_label, int prob)
767 int nwords = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
768 rtx part;
769 int i;
770 rtx drop_through_label = 0;
772 /* The fastest way of doing this comparison on almost any machine is to
773 "or" all the words and compare the result. If all have to be loaded
774 from memory and this is a very wide item, it's possible this may
775 be slower, but that's highly unlikely. */
777 part = gen_reg_rtx (word_mode);
778 emit_move_insn (part, operand_subword_force (op0, 0, mode));
779 for (i = 1; i < nwords && part != 0; i++)
780 part = expand_binop (word_mode, ior_optab, part,
781 operand_subword_force (op0, i, mode),
782 part, 1, OPTAB_WIDEN);
784 if (part != 0)
786 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
787 NULL_RTX, if_false_label, if_true_label, prob);
788 return;
791 /* If we couldn't do the "or" simply, do this with a series of compares. */
792 if (! if_false_label)
793 drop_through_label = if_false_label = gen_label_rtx ();
795 for (i = 0; i < nwords; i++)
796 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
797 const0_rtx, EQ, 1, word_mode, NULL_RTX,
798 if_false_label, NULL_RTX, prob);
800 if (if_true_label)
801 emit_jump (if_true_label);
803 if (drop_through_label)
804 emit_label (drop_through_label);
807 /* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
808 where MODE is an integer mode too wide to be compared with one insn.
809 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
810 to indicate drop through. */
812 static void
813 do_jump_by_parts_equality_rtx (machine_mode mode, rtx op0, rtx op1,
814 rtx if_false_label, rtx if_true_label, int prob)
816 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
817 rtx drop_through_label = 0;
818 int i;
820 if (op1 == const0_rtx)
822 do_jump_by_parts_zero_rtx (mode, op0, if_false_label, if_true_label,
823 prob);
824 return;
826 else if (op0 == const0_rtx)
828 do_jump_by_parts_zero_rtx (mode, op1, if_false_label, if_true_label,
829 prob);
830 return;
833 if (! if_false_label)
834 drop_through_label = if_false_label = gen_label_rtx ();
836 for (i = 0; i < nwords; i++)
837 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
838 operand_subword_force (op1, i, mode),
839 EQ, 0, word_mode, NULL_RTX,
840 if_false_label, NULL_RTX, prob);
842 if (if_true_label)
843 emit_jump (if_true_label);
844 if (drop_through_label)
845 emit_label (drop_through_label);
848 /* Given an EQ_EXPR expression EXP for values too wide to be compared
849 with one insn, test the comparison and jump to the appropriate label. */
851 static void
852 do_jump_by_parts_equality (tree treeop0, tree treeop1, rtx if_false_label,
853 rtx if_true_label, int prob)
855 rtx op0 = expand_normal (treeop0);
856 rtx op1 = expand_normal (treeop1);
857 machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
858 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
859 if_true_label, prob);
862 /* Split a comparison into two others, the second of which has the other
863 "orderedness". The first is always ORDERED or UNORDERED if MODE
864 does not honor NaNs (which means that it can be skipped in that case;
865 see do_compare_rtx_and_jump).
867 The two conditions are written in *CODE1 and *CODE2. Return true if
868 the conditions must be ANDed, false if they must be ORed. */
870 bool
871 split_comparison (enum rtx_code code, machine_mode mode,
872 enum rtx_code *code1, enum rtx_code *code2)
874 switch (code)
876 case LT:
877 *code1 = ORDERED;
878 *code2 = UNLT;
879 return true;
880 case LE:
881 *code1 = ORDERED;
882 *code2 = UNLE;
883 return true;
884 case GT:
885 *code1 = ORDERED;
886 *code2 = UNGT;
887 return true;
888 case GE:
889 *code1 = ORDERED;
890 *code2 = UNGE;
891 return true;
892 case EQ:
893 *code1 = ORDERED;
894 *code2 = UNEQ;
895 return true;
896 case NE:
897 *code1 = UNORDERED;
898 *code2 = LTGT;
899 return false;
900 case UNLT:
901 *code1 = UNORDERED;
902 *code2 = LT;
903 return false;
904 case UNLE:
905 *code1 = UNORDERED;
906 *code2 = LE;
907 return false;
908 case UNGT:
909 *code1 = UNORDERED;
910 *code2 = GT;
911 return false;
912 case UNGE:
913 *code1 = UNORDERED;
914 *code2 = GE;
915 return false;
916 case UNEQ:
917 *code1 = UNORDERED;
918 *code2 = EQ;
919 return false;
920 case LTGT:
921 /* Do not turn a trapping comparison into a non-trapping one. */
922 if (HONOR_SNANS (mode))
924 *code1 = LT;
925 *code2 = GT;
926 return false;
928 else
930 *code1 = ORDERED;
931 *code2 = NE;
932 return true;
934 default:
935 gcc_unreachable ();
940 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
941 The decision as to signed or unsigned comparison must be made by the caller.
943 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
944 compared. */
946 void
947 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
948 machine_mode mode, rtx size, rtx if_false_label,
949 rtx if_true_label, int prob)
951 rtx tem;
952 rtx dummy_label = NULL;
954 /* Reverse the comparison if that is safe and we want to jump if it is
955 false. Also convert to the reverse comparison if the target can
956 implement it. */
957 if ((! if_true_label
958 || ! can_compare_p (code, mode, ccp_jump))
959 && (! FLOAT_MODE_P (mode)
960 || code == ORDERED || code == UNORDERED
961 || (! HONOR_NANS (mode) && (code == LTGT || code == UNEQ))
962 || (! HONOR_SNANS (mode) && (code == EQ || code == NE))))
964 enum rtx_code rcode;
965 if (FLOAT_MODE_P (mode))
966 rcode = reverse_condition_maybe_unordered (code);
967 else
968 rcode = reverse_condition (code);
970 /* Canonicalize to UNORDERED for the libcall. */
971 if (can_compare_p (rcode, mode, ccp_jump)
972 || (code == ORDERED && ! can_compare_p (ORDERED, mode, ccp_jump)))
974 tem = if_true_label;
975 if_true_label = if_false_label;
976 if_false_label = tem;
977 code = rcode;
978 prob = inv (prob);
982 /* If one operand is constant, make it the second one. Only do this
983 if the other operand is not constant as well. */
985 if (swap_commutative_operands_p (op0, op1))
987 tem = op0;
988 op0 = op1;
989 op1 = tem;
990 code = swap_condition (code);
993 do_pending_stack_adjust ();
995 code = unsignedp ? unsigned_condition (code) : code;
996 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
997 op0, op1)))
999 if (CONSTANT_P (tem))
1001 rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
1002 ? if_false_label : if_true_label;
1003 if (label)
1004 emit_jump (label);
1005 return;
1008 code = GET_CODE (tem);
1009 mode = GET_MODE (tem);
1010 op0 = XEXP (tem, 0);
1011 op1 = XEXP (tem, 1);
1012 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
1015 if (! if_true_label)
1016 dummy_label = if_true_label = gen_label_rtx ();
1018 if (GET_MODE_CLASS (mode) == MODE_INT
1019 && ! can_compare_p (code, mode, ccp_jump))
1021 switch (code)
1023 case LTU:
1024 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
1025 if_false_label, if_true_label, prob);
1026 break;
1028 case LEU:
1029 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
1030 if_true_label, if_false_label,
1031 inv (prob));
1032 break;
1034 case GTU:
1035 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
1036 if_false_label, if_true_label, prob);
1037 break;
1039 case GEU:
1040 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
1041 if_true_label, if_false_label,
1042 inv (prob));
1043 break;
1045 case LT:
1046 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
1047 if_false_label, if_true_label, prob);
1048 break;
1050 case LE:
1051 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
1052 if_true_label, if_false_label,
1053 inv (prob));
1054 break;
1056 case GT:
1057 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
1058 if_false_label, if_true_label, prob);
1059 break;
1061 case GE:
1062 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
1063 if_true_label, if_false_label,
1064 inv (prob));
1065 break;
1067 case EQ:
1068 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
1069 if_true_label, prob);
1070 break;
1072 case NE:
1073 do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label,
1074 if_false_label, inv (prob));
1075 break;
1077 default:
1078 gcc_unreachable ();
1081 else
1083 if (SCALAR_FLOAT_MODE_P (mode)
1084 && ! can_compare_p (code, mode, ccp_jump)
1085 && can_compare_p (swap_condition (code), mode, ccp_jump))
1087 rtx tmp;
1088 code = swap_condition (code);
1089 tmp = op0;
1090 op0 = op1;
1091 op1 = tmp;
1093 else if (SCALAR_FLOAT_MODE_P (mode)
1094 && ! can_compare_p (code, mode, ccp_jump)
1095 /* Never split ORDERED and UNORDERED.
1096 These must be implemented. */
1097 && (code != ORDERED && code != UNORDERED)
1098 /* Split a floating-point comparison if
1099 we can jump on other conditions... */
1100 && (have_insn_for (COMPARE, mode)
1101 /* ... or if there is no libcall for it. */
1102 || code_to_optab (code) == unknown_optab))
1104 enum rtx_code first_code;
1105 bool and_them = split_comparison (code, mode, &first_code, &code);
1107 /* If there are no NaNs, the first comparison should always fall
1108 through. */
1109 if (!HONOR_NANS (mode))
1110 gcc_assert (first_code == (and_them ? ORDERED : UNORDERED));
1112 else
1114 int first_prob = prob;
1115 if (first_code == UNORDERED)
1116 first_prob = REG_BR_PROB_BASE / 100;
1117 else if (first_code == ORDERED)
1118 first_prob = REG_BR_PROB_BASE - REG_BR_PROB_BASE / 100;
1119 if (and_them)
1121 rtx dest_label;
1122 /* If we only jump if true, just bypass the second jump. */
1123 if (! if_false_label)
1125 if (! dummy_label)
1126 dummy_label = gen_label_rtx ();
1127 dest_label = dummy_label;
1129 else
1130 dest_label = if_false_label;
1131 do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
1132 size, dest_label, NULL_RTX,
1133 first_prob);
1135 else
1136 do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
1137 size, NULL_RTX, if_true_label,
1138 first_prob);
1142 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
1143 if_true_label, prob);
1146 if (if_false_label)
1147 emit_jump (if_false_label);
1148 if (dummy_label)
1149 emit_label (dummy_label);
1152 /* Generate code for a comparison expression EXP (including code to compute
1153 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
1154 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
1155 generated code will drop through.
1156 SIGNED_CODE should be the rtx operation for this comparison for
1157 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
1159 We force a stack adjustment unless there are currently
1160 things pushed on the stack that aren't yet used. */
1162 static void
1163 do_compare_and_jump (tree treeop0, tree treeop1, enum rtx_code signed_code,
1164 enum rtx_code unsigned_code, rtx if_false_label,
1165 rtx if_true_label, int prob)
1167 rtx op0, op1;
1168 tree type;
1169 machine_mode mode;
1170 int unsignedp;
1171 enum rtx_code code;
1173 /* Don't crash if the comparison was erroneous. */
1174 op0 = expand_normal (treeop0);
1175 if (TREE_CODE (treeop0) == ERROR_MARK)
1176 return;
1178 op1 = expand_normal (treeop1);
1179 if (TREE_CODE (treeop1) == ERROR_MARK)
1180 return;
1182 type = TREE_TYPE (treeop0);
1183 mode = TYPE_MODE (type);
1184 if (TREE_CODE (treeop0) == INTEGER_CST
1185 && (TREE_CODE (treeop1) != INTEGER_CST
1186 || (GET_MODE_BITSIZE (mode)
1187 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (treeop1))))))
1189 /* op0 might have been replaced by promoted constant, in which
1190 case the type of second argument should be used. */
1191 type = TREE_TYPE (treeop1);
1192 mode = TYPE_MODE (type);
1194 unsignedp = TYPE_UNSIGNED (type);
1195 code = unsignedp ? unsigned_code : signed_code;
1197 #ifdef HAVE_canonicalize_funcptr_for_compare
1198 /* If function pointers need to be "canonicalized" before they can
1199 be reliably compared, then canonicalize them.
1200 Only do this if *both* sides of the comparison are function pointers.
1201 If one side isn't, we want a noncanonicalized comparison. See PR
1202 middle-end/17564. */
1203 if (HAVE_canonicalize_funcptr_for_compare
1204 && TREE_CODE (TREE_TYPE (treeop0)) == POINTER_TYPE
1205 && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop0)))
1206 == FUNCTION_TYPE
1207 && TREE_CODE (TREE_TYPE (treeop1)) == POINTER_TYPE
1208 && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop1)))
1209 == FUNCTION_TYPE)
1211 rtx new_op0 = gen_reg_rtx (mode);
1212 rtx new_op1 = gen_reg_rtx (mode);
1214 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
1215 op0 = new_op0;
1217 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
1218 op1 = new_op1;
1220 #endif
1222 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
1223 ((mode == BLKmode)
1224 ? expr_size (treeop0) : NULL_RTX),
1225 if_false_label, if_true_label, prob);
1228 #include "gt-dojump.h"