Implement #pragma GCC warning/error
[official-gcc.git] / gcc / dojump.c
blob66d3b04bd035e20e0668ef9a4a7b2b4085688972
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
33 #include "expr.h"
34 #include "optabs.h"
35 #include "langhooks.h"
36 #include "ggc.h"
37 #include "basic-block.h"
38 #include "tm_p.h"
40 static bool prefer_and_bit_test (enum machine_mode, int);
41 static void do_jump_by_parts_greater (tree, tree, int, rtx, rtx, int);
42 static void do_jump_by_parts_equality (tree, tree, rtx, rtx, int);
43 static void do_compare_and_jump (tree, tree, enum rtx_code, enum rtx_code, rtx,
44 rtx, int);
46 /* Invert probability if there is any. -1 stands for unknown. */
48 static inline int
49 inv (int prob)
51 return prob == -1 ? -1 : REG_BR_PROB_BASE - prob;
54 /* At the start of a function, record that we have no previously-pushed
55 arguments waiting to be popped. */
57 void
58 init_pending_stack_adjust (void)
60 pending_stack_adjust = 0;
63 /* Discard any pending stack adjustment. This avoid relying on the
64 RTL optimizers to remove useless adjustments when we know the
65 stack pointer value is dead. */
66 void
67 discard_pending_stack_adjust (void)
69 stack_pointer_delta -= pending_stack_adjust;
70 pending_stack_adjust = 0;
73 /* When exiting from function, if safe, clear out any pending stack adjust
74 so the adjustment won't get done.
76 Note, if the current function calls alloca, then it must have a
77 frame pointer regardless of the value of flag_omit_frame_pointer. */
79 void
80 clear_pending_stack_adjust (void)
82 if (optimize > 0
83 && (! flag_omit_frame_pointer || cfun->calls_alloca)
84 && EXIT_IGNORE_STACK)
85 discard_pending_stack_adjust ();
88 /* Pop any previously-pushed arguments that have not been popped yet. */
90 void
91 do_pending_stack_adjust (void)
93 if (inhibit_defer_pop == 0)
95 if (pending_stack_adjust != 0)
96 adjust_stack (GEN_INT (pending_stack_adjust));
97 pending_stack_adjust = 0;
101 /* Expand conditional expressions. */
103 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
104 LABEL is an rtx of code CODE_LABEL, in this function and all the
105 functions here. */
107 void
108 jumpifnot (tree exp, rtx label, int prob)
110 do_jump (exp, label, NULL_RTX, inv (prob));
113 void
114 jumpifnot_1 (enum tree_code code, tree op0, tree op1, rtx label, int prob)
116 do_jump_1 (code, op0, op1, label, NULL_RTX, inv (prob));
119 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
121 void
122 jumpif (tree exp, rtx label, int prob)
124 do_jump (exp, NULL_RTX, label, prob);
127 void
128 jumpif_1 (enum tree_code code, tree op0, tree op1, rtx label, int prob)
130 do_jump_1 (code, op0, op1, NULL_RTX, label, prob);
133 /* Used internally by prefer_and_bit_test. */
135 static GTY(()) rtx and_reg;
136 static GTY(()) rtx and_test;
137 static GTY(()) rtx shift_test;
139 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
140 where X is an arbitrary register of mode MODE. Return true if the former
141 is preferred. */
143 static bool
144 prefer_and_bit_test (enum machine_mode mode, int bitnum)
146 bool speed_p;
148 if (and_test == 0)
150 /* Set up rtxes for the two variations. Use NULL as a placeholder
151 for the BITNUM-based constants. */
152 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
153 and_test = gen_rtx_AND (mode, and_reg, NULL);
154 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
155 const1_rtx);
157 else
159 /* Change the mode of the previously-created rtxes. */
160 PUT_MODE (and_reg, mode);
161 PUT_MODE (and_test, mode);
162 PUT_MODE (shift_test, mode);
163 PUT_MODE (XEXP (shift_test, 0), mode);
166 /* Fill in the integers. */
167 XEXP (and_test, 1)
168 = immed_double_int_const (double_int_zero.set_bit (bitnum), mode);
169 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
171 speed_p = optimize_insn_for_speed_p ();
172 return (rtx_cost (and_test, IF_THEN_ELSE, 0, speed_p)
173 <= rtx_cost (shift_test, IF_THEN_ELSE, 0, speed_p));
176 /* Subroutine of do_jump, dealing with exploded comparisons of the type
177 OP0 CODE OP1 . IF_FALSE_LABEL and IF_TRUE_LABEL like in do_jump.
178 PROB is probability of jump to if_true_label, or -1 if unknown. */
180 void
181 do_jump_1 (enum tree_code code, tree op0, tree op1,
182 rtx if_false_label, rtx if_true_label, int prob)
184 enum machine_mode mode;
185 rtx drop_through_label = 0;
187 switch (code)
189 case EQ_EXPR:
191 tree inner_type = TREE_TYPE (op0);
193 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
194 != MODE_COMPLEX_FLOAT);
195 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
196 != MODE_COMPLEX_INT);
198 if (integer_zerop (op1))
199 do_jump (op0, if_true_label, if_false_label, inv (prob));
200 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
201 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
202 do_jump_by_parts_equality (op0, op1, if_false_label, if_true_label,
203 prob);
204 else
205 do_compare_and_jump (op0, op1, EQ, EQ, if_false_label, if_true_label,
206 prob);
207 break;
210 case NE_EXPR:
212 tree inner_type = TREE_TYPE (op0);
214 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
215 != MODE_COMPLEX_FLOAT);
216 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
217 != MODE_COMPLEX_INT);
219 if (integer_zerop (op1))
220 do_jump (op0, if_false_label, if_true_label, prob);
221 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
222 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
223 do_jump_by_parts_equality (op0, op1, if_true_label, if_false_label,
224 inv (prob));
225 else
226 do_compare_and_jump (op0, op1, NE, NE, if_false_label, if_true_label,
227 prob);
228 break;
231 case LT_EXPR:
232 mode = TYPE_MODE (TREE_TYPE (op0));
233 if (GET_MODE_CLASS (mode) == MODE_INT
234 && ! can_compare_p (LT, mode, ccp_jump))
235 do_jump_by_parts_greater (op0, op1, 1, if_false_label, if_true_label,
236 prob);
237 else
238 do_compare_and_jump (op0, op1, LT, LTU, if_false_label, if_true_label,
239 prob);
240 break;
242 case LE_EXPR:
243 mode = TYPE_MODE (TREE_TYPE (op0));
244 if (GET_MODE_CLASS (mode) == MODE_INT
245 && ! can_compare_p (LE, mode, ccp_jump))
246 do_jump_by_parts_greater (op0, op1, 0, if_true_label, if_false_label,
247 inv (prob));
248 else
249 do_compare_and_jump (op0, op1, LE, LEU, if_false_label, if_true_label,
250 prob);
251 break;
253 case GT_EXPR:
254 mode = TYPE_MODE (TREE_TYPE (op0));
255 if (GET_MODE_CLASS (mode) == MODE_INT
256 && ! can_compare_p (GT, mode, ccp_jump))
257 do_jump_by_parts_greater (op0, op1, 0, if_false_label, if_true_label,
258 prob);
259 else
260 do_compare_and_jump (op0, op1, GT, GTU, if_false_label, if_true_label,
261 prob);
262 break;
264 case GE_EXPR:
265 mode = TYPE_MODE (TREE_TYPE (op0));
266 if (GET_MODE_CLASS (mode) == MODE_INT
267 && ! can_compare_p (GE, mode, ccp_jump))
268 do_jump_by_parts_greater (op0, op1, 1, if_true_label, if_false_label,
269 inv (prob));
270 else
271 do_compare_and_jump (op0, op1, GE, GEU, if_false_label, if_true_label,
272 prob);
273 break;
275 case ORDERED_EXPR:
276 do_compare_and_jump (op0, op1, ORDERED, ORDERED,
277 if_false_label, if_true_label, prob);
278 break;
280 case UNORDERED_EXPR:
281 do_compare_and_jump (op0, op1, UNORDERED, UNORDERED,
282 if_false_label, if_true_label, prob);
283 break;
285 case UNLT_EXPR:
286 do_compare_and_jump (op0, op1, UNLT, UNLT, if_false_label, if_true_label,
287 prob);
288 break;
290 case UNLE_EXPR:
291 do_compare_and_jump (op0, op1, UNLE, UNLE, if_false_label, if_true_label,
292 prob);
293 break;
295 case UNGT_EXPR:
296 do_compare_and_jump (op0, op1, UNGT, UNGT, if_false_label, if_true_label,
297 prob);
298 break;
300 case UNGE_EXPR:
301 do_compare_and_jump (op0, op1, UNGE, UNGE, if_false_label, if_true_label,
302 prob);
303 break;
305 case UNEQ_EXPR:
306 do_compare_and_jump (op0, op1, UNEQ, UNEQ, if_false_label, if_true_label,
307 prob);
308 break;
310 case LTGT_EXPR:
311 do_compare_and_jump (op0, op1, LTGT, LTGT, if_false_label, if_true_label,
312 prob);
313 break;
315 case TRUTH_ANDIF_EXPR:
316 if (if_false_label == NULL_RTX)
318 drop_through_label = gen_label_rtx ();
319 do_jump (op0, drop_through_label, NULL_RTX, prob);
320 do_jump (op1, NULL_RTX, if_true_label, prob);
322 else
324 do_jump (op0, if_false_label, NULL_RTX, prob);
325 do_jump (op1, if_false_label, if_true_label, prob);
327 break;
329 case TRUTH_ORIF_EXPR:
330 if (if_true_label == NULL_RTX)
332 drop_through_label = gen_label_rtx ();
333 do_jump (op0, NULL_RTX, drop_through_label, prob);
334 do_jump (op1, if_false_label, NULL_RTX, prob);
336 else
338 do_jump (op0, NULL_RTX, if_true_label, prob);
339 do_jump (op1, if_false_label, if_true_label, prob);
341 break;
343 default:
344 gcc_unreachable ();
347 if (drop_through_label)
349 do_pending_stack_adjust ();
350 emit_label (drop_through_label);
354 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
355 the result is zero, or IF_TRUE_LABEL if the result is one.
356 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
357 meaning fall through in that case.
359 do_jump always does any pending stack adjust except when it does not
360 actually perform a jump. An example where there is no jump
361 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
363 PROB is probability of jump to if_true_label, or -1 if unknown. */
365 void
366 do_jump (tree exp, rtx if_false_label, rtx if_true_label, int prob)
368 enum tree_code code = TREE_CODE (exp);
369 rtx temp;
370 int i;
371 tree type;
372 enum machine_mode mode;
373 rtx drop_through_label = 0;
375 switch (code)
377 case ERROR_MARK:
378 break;
380 case INTEGER_CST:
381 temp = integer_zerop (exp) ? if_false_label : if_true_label;
382 if (temp)
383 emit_jump (temp);
384 break;
386 #if 0
387 /* This is not true with #pragma weak */
388 case ADDR_EXPR:
389 /* The address of something can never be zero. */
390 if (if_true_label)
391 emit_jump (if_true_label);
392 break;
393 #endif
395 case NOP_EXPR:
396 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
397 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
398 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
399 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
400 goto normal;
401 case CONVERT_EXPR:
402 /* If we are narrowing the operand, we have to do the compare in the
403 narrower mode. */
404 if ((TYPE_PRECISION (TREE_TYPE (exp))
405 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
406 goto normal;
407 case NON_LVALUE_EXPR:
408 case ABS_EXPR:
409 case NEGATE_EXPR:
410 case LROTATE_EXPR:
411 case RROTATE_EXPR:
412 /* These cannot change zero->nonzero or vice versa. */
413 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label, prob);
414 break;
416 case TRUTH_NOT_EXPR:
417 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label,
418 inv (prob));
419 break;
421 case COND_EXPR:
423 rtx label1 = gen_label_rtx ();
424 if (!if_true_label || !if_false_label)
426 drop_through_label = gen_label_rtx ();
427 if (!if_true_label)
428 if_true_label = drop_through_label;
429 if (!if_false_label)
430 if_false_label = drop_through_label;
433 do_pending_stack_adjust ();
434 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX, -1);
435 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob);
436 emit_label (label1);
437 do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label, prob);
438 break;
441 case COMPOUND_EXPR:
442 /* Lowered by gimplify.c. */
443 gcc_unreachable ();
445 case MINUS_EXPR:
446 /* Nonzero iff operands of minus differ. */
447 code = NE_EXPR;
449 /* FALLTHRU */
450 case EQ_EXPR:
451 case NE_EXPR:
452 case LT_EXPR:
453 case LE_EXPR:
454 case GT_EXPR:
455 case GE_EXPR:
456 case ORDERED_EXPR:
457 case UNORDERED_EXPR:
458 case UNLT_EXPR:
459 case UNLE_EXPR:
460 case UNGT_EXPR:
461 case UNGE_EXPR:
462 case UNEQ_EXPR:
463 case LTGT_EXPR:
464 case TRUTH_ANDIF_EXPR:
465 case TRUTH_ORIF_EXPR:
466 other_code:
467 do_jump_1 (code, TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
468 if_false_label, if_true_label, prob);
469 break;
471 case BIT_AND_EXPR:
472 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
473 See if the former is preferred for jump tests and restore it
474 if so. */
475 if (integer_onep (TREE_OPERAND (exp, 1)))
477 tree exp0 = TREE_OPERAND (exp, 0);
478 rtx set_label, clr_label;
479 int setclr_prob = prob;
481 /* Strip narrowing integral type conversions. */
482 while (CONVERT_EXPR_P (exp0)
483 && TREE_OPERAND (exp0, 0) != error_mark_node
484 && TYPE_PRECISION (TREE_TYPE (exp0))
485 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
486 exp0 = TREE_OPERAND (exp0, 0);
488 /* "exp0 ^ 1" inverts the sense of the single bit test. */
489 if (TREE_CODE (exp0) == BIT_XOR_EXPR
490 && integer_onep (TREE_OPERAND (exp0, 1)))
492 exp0 = TREE_OPERAND (exp0, 0);
493 clr_label = if_true_label;
494 set_label = if_false_label;
495 setclr_prob = inv (prob);
497 else
499 clr_label = if_false_label;
500 set_label = if_true_label;
503 if (TREE_CODE (exp0) == RSHIFT_EXPR)
505 tree arg = TREE_OPERAND (exp0, 0);
506 tree shift = TREE_OPERAND (exp0, 1);
507 tree argtype = TREE_TYPE (arg);
508 if (TREE_CODE (shift) == INTEGER_CST
509 && compare_tree_int (shift, 0) >= 0
510 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
511 && prefer_and_bit_test (TYPE_MODE (argtype),
512 TREE_INT_CST_LOW (shift)))
514 unsigned HOST_WIDE_INT mask
515 = (unsigned HOST_WIDE_INT) 1 << TREE_INT_CST_LOW (shift);
516 do_jump (build2 (BIT_AND_EXPR, argtype, arg,
517 build_int_cstu (argtype, mask)),
518 clr_label, set_label, setclr_prob);
519 break;
524 /* If we are AND'ing with a small constant, do this comparison in the
525 smallest type that fits. If the machine doesn't have comparisons
526 that small, it will be converted back to the wider comparison.
527 This helps if we are testing the sign bit of a narrower object.
528 combine can't do this for us because it can't know whether a
529 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
531 if (! SLOW_BYTE_ACCESS
532 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
533 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
534 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
535 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
536 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
537 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
538 && have_insn_for (COMPARE, TYPE_MODE (type)))
540 do_jump (fold_convert (type, exp), if_false_label, if_true_label,
541 prob);
542 break;
545 if (TYPE_PRECISION (TREE_TYPE (exp)) > 1
546 || TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
547 goto normal;
549 /* Boolean comparisons can be compiled as TRUTH_AND_EXPR. */
551 case TRUTH_AND_EXPR:
552 /* High branch cost, expand as the bitwise AND of the conditions.
553 Do the same if the RHS has side effects, because we're effectively
554 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */
555 if (BRANCH_COST (optimize_insn_for_speed_p (),
556 false) >= 4
557 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
558 goto normal;
559 code = TRUTH_ANDIF_EXPR;
560 goto other_code;
562 case BIT_IOR_EXPR:
563 case TRUTH_OR_EXPR:
564 /* High branch cost, expand as the bitwise OR of the conditions.
565 Do the same if the RHS has side effects, because we're effectively
566 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */
567 if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4
568 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
569 goto normal;
570 code = TRUTH_ORIF_EXPR;
571 goto other_code;
573 /* Fall through and generate the normal code. */
574 default:
575 normal:
576 temp = expand_normal (exp);
577 do_pending_stack_adjust ();
578 /* The RTL optimizers prefer comparisons against pseudos. */
579 if (GET_CODE (temp) == SUBREG)
581 /* Compare promoted variables in their promoted mode. */
582 if (SUBREG_PROMOTED_VAR_P (temp)
583 && REG_P (XEXP (temp, 0)))
584 temp = XEXP (temp, 0);
585 else
586 temp = copy_to_reg (temp);
588 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
589 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
590 GET_MODE (temp), NULL_RTX,
591 if_false_label, if_true_label, prob);
594 if (drop_through_label)
596 do_pending_stack_adjust ();
597 emit_label (drop_through_label);
601 /* Compare OP0 with OP1, word at a time, in mode MODE.
602 UNSIGNEDP says to do unsigned comparison.
603 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
605 static void
606 do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
607 rtx op1, rtx if_false_label, rtx if_true_label,
608 int prob)
610 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
611 rtx drop_through_label = 0;
612 bool drop_through_if_true = false, drop_through_if_false = false;
613 enum rtx_code code = GT;
614 int i;
616 if (! if_true_label || ! if_false_label)
617 drop_through_label = gen_label_rtx ();
618 if (! if_true_label)
620 if_true_label = drop_through_label;
621 drop_through_if_true = true;
623 if (! if_false_label)
625 if_false_label = drop_through_label;
626 drop_through_if_false = true;
629 /* Deal with the special case 0 > x: only one comparison is necessary and
630 we reverse it to avoid jumping to the drop-through label. */
631 if (op0 == const0_rtx && drop_through_if_true && !drop_through_if_false)
633 code = LE;
634 if_true_label = if_false_label;
635 if_false_label = drop_through_label;
636 drop_through_if_true = false;
637 drop_through_if_false = true;
640 /* Compare a word at a time, high order first. */
641 for (i = 0; i < nwords; i++)
643 rtx op0_word, op1_word;
645 if (WORDS_BIG_ENDIAN)
647 op0_word = operand_subword_force (op0, i, mode);
648 op1_word = operand_subword_force (op1, i, mode);
650 else
652 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
653 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
656 /* All but high-order word must be compared as unsigned. */
657 do_compare_rtx_and_jump (op0_word, op1_word, code, (unsignedp || i > 0),
658 word_mode, NULL_RTX, NULL_RTX, if_true_label,
659 prob);
661 /* Emit only one comparison for 0. Do not emit the last cond jump. */
662 if (op0 == const0_rtx || i == nwords - 1)
663 break;
665 /* Consider lower words only if these are equal. */
666 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
667 NULL_RTX, NULL_RTX, if_false_label, inv (prob));
670 if (!drop_through_if_false)
671 emit_jump (if_false_label);
672 if (drop_through_label)
673 emit_label (drop_through_label);
676 /* Given a comparison expression EXP for values too wide to be compared
677 with one insn, test the comparison and jump to the appropriate label.
678 The code of EXP is ignored; we always test GT if SWAP is 0,
679 and LT if SWAP is 1. */
681 static void
682 do_jump_by_parts_greater (tree treeop0, tree treeop1, int swap,
683 rtx if_false_label, rtx if_true_label, int prob)
685 rtx op0 = expand_normal (swap ? treeop1 : treeop0);
686 rtx op1 = expand_normal (swap ? treeop0 : treeop1);
687 enum machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
688 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (treeop0));
690 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
691 if_true_label, prob);
694 /* Jump according to whether OP0 is 0. We assume that OP0 has an integer
695 mode, MODE, that is too wide for the available compare insns. Either
696 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
697 to indicate drop through. */
699 static void
700 do_jump_by_parts_zero_rtx (enum machine_mode mode, rtx op0,
701 rtx if_false_label, rtx if_true_label, int prob)
703 int nwords = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
704 rtx part;
705 int i;
706 rtx drop_through_label = 0;
708 /* The fastest way of doing this comparison on almost any machine is to
709 "or" all the words and compare the result. If all have to be loaded
710 from memory and this is a very wide item, it's possible this may
711 be slower, but that's highly unlikely. */
713 part = gen_reg_rtx (word_mode);
714 emit_move_insn (part, operand_subword_force (op0, 0, mode));
715 for (i = 1; i < nwords && part != 0; i++)
716 part = expand_binop (word_mode, ior_optab, part,
717 operand_subword_force (op0, i, mode),
718 part, 1, OPTAB_WIDEN);
720 if (part != 0)
722 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
723 NULL_RTX, if_false_label, if_true_label, prob);
724 return;
727 /* If we couldn't do the "or" simply, do this with a series of compares. */
728 if (! if_false_label)
729 drop_through_label = if_false_label = gen_label_rtx ();
731 for (i = 0; i < nwords; i++)
732 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
733 const0_rtx, EQ, 1, word_mode, NULL_RTX,
734 if_false_label, NULL_RTX, prob);
736 if (if_true_label)
737 emit_jump (if_true_label);
739 if (drop_through_label)
740 emit_label (drop_through_label);
743 /* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
744 where MODE is an integer mode too wide to be compared with one insn.
745 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
746 to indicate drop through. */
748 static void
749 do_jump_by_parts_equality_rtx (enum machine_mode mode, rtx op0, rtx op1,
750 rtx if_false_label, rtx if_true_label, int prob)
752 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
753 rtx drop_through_label = 0;
754 int i;
756 if (op1 == const0_rtx)
758 do_jump_by_parts_zero_rtx (mode, op0, if_false_label, if_true_label,
759 prob);
760 return;
762 else if (op0 == const0_rtx)
764 do_jump_by_parts_zero_rtx (mode, op1, if_false_label, if_true_label,
765 prob);
766 return;
769 if (! if_false_label)
770 drop_through_label = if_false_label = gen_label_rtx ();
772 for (i = 0; i < nwords; i++)
773 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
774 operand_subword_force (op1, i, mode),
775 EQ, 0, word_mode, NULL_RTX,
776 if_false_label, NULL_RTX, prob);
778 if (if_true_label)
779 emit_jump (if_true_label);
780 if (drop_through_label)
781 emit_label (drop_through_label);
784 /* Given an EQ_EXPR expression EXP for values too wide to be compared
785 with one insn, test the comparison and jump to the appropriate label. */
787 static void
788 do_jump_by_parts_equality (tree treeop0, tree treeop1, rtx if_false_label,
789 rtx if_true_label, int prob)
791 rtx op0 = expand_normal (treeop0);
792 rtx op1 = expand_normal (treeop1);
793 enum machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
794 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
795 if_true_label, prob);
798 /* Split a comparison into two others, the second of which has the other
799 "orderedness". The first is always ORDERED or UNORDERED if MODE
800 does not honor NaNs (which means that it can be skipped in that case;
801 see do_compare_rtx_and_jump).
803 The two conditions are written in *CODE1 and *CODE2. Return true if
804 the conditions must be ANDed, false if they must be ORed. */
806 bool
807 split_comparison (enum rtx_code code, enum machine_mode mode,
808 enum rtx_code *code1, enum rtx_code *code2)
810 switch (code)
812 case LT:
813 *code1 = ORDERED;
814 *code2 = UNLT;
815 return true;
816 case LE:
817 *code1 = ORDERED;
818 *code2 = UNLE;
819 return true;
820 case GT:
821 *code1 = ORDERED;
822 *code2 = UNGT;
823 return true;
824 case GE:
825 *code1 = ORDERED;
826 *code2 = UNGE;
827 return true;
828 case EQ:
829 *code1 = ORDERED;
830 *code2 = UNEQ;
831 return true;
832 case NE:
833 *code1 = UNORDERED;
834 *code2 = LTGT;
835 return false;
836 case UNLT:
837 *code1 = UNORDERED;
838 *code2 = LT;
839 return false;
840 case UNLE:
841 *code1 = UNORDERED;
842 *code2 = LE;
843 return false;
844 case UNGT:
845 *code1 = UNORDERED;
846 *code2 = GT;
847 return false;
848 case UNGE:
849 *code1 = UNORDERED;
850 *code2 = GE;
851 return false;
852 case UNEQ:
853 *code1 = UNORDERED;
854 *code2 = EQ;
855 return false;
856 case LTGT:
857 /* Do not turn a trapping comparison into a non-trapping one. */
858 if (HONOR_SNANS (mode))
860 *code1 = LT;
861 *code2 = GT;
862 return false;
864 else
866 *code1 = ORDERED;
867 *code2 = NE;
868 return true;
870 default:
871 gcc_unreachable ();
876 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
877 The decision as to signed or unsigned comparison must be made by the caller.
879 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
880 compared. */
882 void
883 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
884 enum machine_mode mode, rtx size, rtx if_false_label,
885 rtx if_true_label, int prob)
887 rtx tem;
888 rtx dummy_label = NULL_RTX;
889 rtx last;
891 /* Reverse the comparison if that is safe and we want to jump if it is
892 false. Also convert to the reverse comparison if the target can
893 implement it. */
894 if ((! if_true_label
895 || ! can_compare_p (code, mode, ccp_jump))
896 && (! FLOAT_MODE_P (mode)
897 || code == ORDERED || code == UNORDERED
898 || (! HONOR_NANS (mode) && (code == LTGT || code == UNEQ))
899 || (! HONOR_SNANS (mode) && (code == EQ || code == NE))))
901 enum rtx_code rcode;
902 if (FLOAT_MODE_P (mode))
903 rcode = reverse_condition_maybe_unordered (code);
904 else
905 rcode = reverse_condition (code);
907 /* Canonicalize to UNORDERED for the libcall. */
908 if (can_compare_p (rcode, mode, ccp_jump)
909 || (code == ORDERED && ! can_compare_p (ORDERED, mode, ccp_jump)))
911 tem = if_true_label;
912 if_true_label = if_false_label;
913 if_false_label = tem;
914 code = rcode;
915 prob = inv (prob);
919 /* If one operand is constant, make it the second one. Only do this
920 if the other operand is not constant as well. */
922 if (swap_commutative_operands_p (op0, op1))
924 tem = op0;
925 op0 = op1;
926 op1 = tem;
927 code = swap_condition (code);
930 do_pending_stack_adjust ();
932 code = unsignedp ? unsigned_condition (code) : code;
933 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
934 op0, op1)))
936 if (CONSTANT_P (tem))
938 rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
939 ? if_false_label : if_true_label;
940 if (label)
941 emit_jump (label);
942 return;
945 code = GET_CODE (tem);
946 mode = GET_MODE (tem);
947 op0 = XEXP (tem, 0);
948 op1 = XEXP (tem, 1);
949 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
952 if (! if_true_label)
953 dummy_label = if_true_label = gen_label_rtx ();
955 if (GET_MODE_CLASS (mode) == MODE_INT
956 && ! can_compare_p (code, mode, ccp_jump))
958 switch (code)
960 case LTU:
961 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
962 if_false_label, if_true_label, prob);
963 break;
965 case LEU:
966 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
967 if_true_label, if_false_label,
968 inv (prob));
969 break;
971 case GTU:
972 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
973 if_false_label, if_true_label, prob);
974 break;
976 case GEU:
977 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
978 if_true_label, if_false_label,
979 inv (prob));
980 break;
982 case LT:
983 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
984 if_false_label, if_true_label, prob);
985 break;
987 case LE:
988 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
989 if_true_label, if_false_label,
990 inv (prob));
991 break;
993 case GT:
994 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
995 if_false_label, if_true_label, prob);
996 break;
998 case GE:
999 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
1000 if_true_label, if_false_label,
1001 inv (prob));
1002 break;
1004 case EQ:
1005 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
1006 if_true_label, prob);
1007 break;
1009 case NE:
1010 do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label,
1011 if_false_label, inv (prob));
1012 break;
1014 default:
1015 gcc_unreachable ();
1018 else
1020 if (SCALAR_FLOAT_MODE_P (mode)
1021 && ! can_compare_p (code, mode, ccp_jump)
1022 && can_compare_p (swap_condition (code), mode, ccp_jump))
1024 rtx tmp;
1025 code = swap_condition (code);
1026 tmp = op0;
1027 op0 = op1;
1028 op1 = tmp;
1030 else if (SCALAR_FLOAT_MODE_P (mode)
1031 && ! can_compare_p (code, mode, ccp_jump)
1032 /* Never split ORDERED and UNORDERED.
1033 These must be implemented. */
1034 && (code != ORDERED && code != UNORDERED)
1035 /* Split a floating-point comparison if
1036 we can jump on other conditions... */
1037 && (have_insn_for (COMPARE, mode)
1038 /* ... or if there is no libcall for it. */
1039 || code_to_optab (code) == unknown_optab))
1041 enum rtx_code first_code;
1042 bool and_them = split_comparison (code, mode, &first_code, &code);
1044 /* If there are no NaNs, the first comparison should always fall
1045 through. */
1046 if (!HONOR_NANS (mode))
1047 gcc_assert (first_code == (and_them ? ORDERED : UNORDERED));
1049 else
1051 if (and_them)
1053 rtx dest_label;
1054 /* If we only jump if true, just bypass the second jump. */
1055 if (! if_false_label)
1057 if (! dummy_label)
1058 dummy_label = gen_label_rtx ();
1059 dest_label = dummy_label;
1061 else
1062 dest_label = if_false_label;
1063 do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
1064 size, dest_label, NULL_RTX, prob);
1066 else
1067 do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
1068 size, NULL_RTX, if_true_label, prob);
1072 last = get_last_insn ();
1073 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
1074 if_true_label);
1075 if (prob != -1 && profile_status != PROFILE_ABSENT)
1077 for (last = NEXT_INSN (last);
1078 last && NEXT_INSN (last);
1079 last = NEXT_INSN (last))
1080 if (JUMP_P (last))
1081 break;
1082 if (last
1083 && JUMP_P (last)
1084 && ! NEXT_INSN (last)
1085 && any_condjump_p (last))
1087 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
1088 add_reg_note (last, REG_BR_PROB, GEN_INT (prob));
1093 if (if_false_label)
1094 emit_jump (if_false_label);
1095 if (dummy_label)
1096 emit_label (dummy_label);
1099 /* Generate code for a comparison expression EXP (including code to compute
1100 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
1101 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
1102 generated code will drop through.
1103 SIGNED_CODE should be the rtx operation for this comparison for
1104 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
1106 We force a stack adjustment unless there are currently
1107 things pushed on the stack that aren't yet used. */
1109 static void
1110 do_compare_and_jump (tree treeop0, tree treeop1, enum rtx_code signed_code,
1111 enum rtx_code unsigned_code, rtx if_false_label,
1112 rtx if_true_label, int prob)
1114 rtx op0, op1;
1115 tree type;
1116 enum machine_mode mode;
1117 int unsignedp;
1118 enum rtx_code code;
1120 /* Don't crash if the comparison was erroneous. */
1121 op0 = expand_normal (treeop0);
1122 if (TREE_CODE (treeop0) == ERROR_MARK)
1123 return;
1125 op1 = expand_normal (treeop1);
1126 if (TREE_CODE (treeop1) == ERROR_MARK)
1127 return;
1129 type = TREE_TYPE (treeop0);
1130 mode = TYPE_MODE (type);
1131 if (TREE_CODE (treeop0) == INTEGER_CST
1132 && (TREE_CODE (treeop1) != INTEGER_CST
1133 || (GET_MODE_BITSIZE (mode)
1134 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (treeop1))))))
1136 /* op0 might have been replaced by promoted constant, in which
1137 case the type of second argument should be used. */
1138 type = TREE_TYPE (treeop1);
1139 mode = TYPE_MODE (type);
1141 unsignedp = TYPE_UNSIGNED (type);
1142 code = unsignedp ? unsigned_code : signed_code;
1144 #ifdef HAVE_canonicalize_funcptr_for_compare
1145 /* If function pointers need to be "canonicalized" before they can
1146 be reliably compared, then canonicalize them.
1147 Only do this if *both* sides of the comparison are function pointers.
1148 If one side isn't, we want a noncanonicalized comparison. See PR
1149 middle-end/17564. */
1150 if (HAVE_canonicalize_funcptr_for_compare
1151 && TREE_CODE (TREE_TYPE (treeop0)) == POINTER_TYPE
1152 && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop0)))
1153 == FUNCTION_TYPE
1154 && TREE_CODE (TREE_TYPE (treeop1)) == POINTER_TYPE
1155 && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop1)))
1156 == FUNCTION_TYPE)
1158 rtx new_op0 = gen_reg_rtx (mode);
1159 rtx new_op1 = gen_reg_rtx (mode);
1161 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
1162 op0 = new_op0;
1164 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
1165 op1 = new_op1;
1167 #endif
1169 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
1170 ((mode == BLKmode)
1171 ? expr_size (treeop0) : NULL_RTX),
1172 if_false_label, if_true_label, prob);
1175 #include "gt-dojump.h"