Nuke USB_DO_ATTACH and remove device_t dv, since it is no longer needed.
[dragonfly.git] / contrib / gcc-3.4 / gcc / dojump.c
blobb232576d440ef58c04b51ac1e5f851a576c74c9b
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
33 #include "expr.h"
34 #include "optabs.h"
35 #include "langhooks.h"
36 #include "ggc.h"
38 static bool prefer_and_bit_test (enum machine_mode, int);
39 static void do_jump_by_parts_greater (tree, int, rtx, rtx);
40 static void do_jump_by_parts_equality (tree, rtx, rtx);
41 static void do_compare_and_jump (tree, enum rtx_code, enum rtx_code, rtx,
42 rtx);
44 /* At the start of a function, record that we have no previously-pushed
45 arguments waiting to be popped. */
47 void
48 init_pending_stack_adjust (void)
50 pending_stack_adjust = 0;
53 /* Discard any pending stack adjustment. This avoid relying on the
54 RTL optimizers to remove useless adjustments when we know the
55 stack pointer value is dead. */
56 void discard_pending_stack_adjust (void)
58 stack_pointer_delta -= pending_stack_adjust;
59 pending_stack_adjust = 0;
62 /* When exiting from function, if safe, clear out any pending stack adjust
63 so the adjustment won't get done.
65 Note, if the current function calls alloca, then it must have a
66 frame pointer regardless of the value of flag_omit_frame_pointer. */
68 void
69 clear_pending_stack_adjust (void)
71 if (optimize > 0
72 && (! flag_omit_frame_pointer || current_function_calls_alloca)
73 && EXIT_IGNORE_STACK
74 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
75 && ! flag_inline_functions)
76 discard_pending_stack_adjust ();
79 /* Pop any previously-pushed arguments that have not been popped yet. */
81 void
82 do_pending_stack_adjust (void)
84 if (inhibit_defer_pop == 0)
86 if (pending_stack_adjust != 0)
87 adjust_stack (GEN_INT (pending_stack_adjust));
88 pending_stack_adjust = 0;
92 /* Expand conditional expressions. */
94 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
95 LABEL is an rtx of code CODE_LABEL, in this function and all the
96 functions here. */
98 void
99 jumpifnot (tree exp, rtx label)
101 do_jump (exp, label, NULL_RTX);
104 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
106 void
107 jumpif (tree exp, rtx label)
109 do_jump (exp, NULL_RTX, label);
112 /* Used internally by prefer_and_bit_test. */
114 static GTY(()) rtx and_reg;
115 static GTY(()) rtx and_test;
116 static GTY(()) rtx shift_test;
118 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1"
119 where X is an arbitrary register of mode MODE. Return true if the former
120 is preferred. */
122 static bool
123 prefer_and_bit_test (enum machine_mode mode, int bitnum)
125 if (and_test == 0)
127 /* Set up rtxes for the two variations. Use NULL as a placeholder
128 for the BITNUM-based constants. */
129 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
130 and_test = gen_rtx_AND (mode, and_reg, NULL);
131 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
132 const1_rtx);
134 else
136 /* Change the mode of the previously-created rtxes. */
137 PUT_MODE (and_reg, mode);
138 PUT_MODE (and_test, mode);
139 PUT_MODE (shift_test, mode);
140 PUT_MODE (XEXP (shift_test, 0), mode);
143 /* Fill in the integers. */
144 XEXP (and_test, 1) = GEN_INT ((unsigned HOST_WIDE_INT) 1 << bitnum);
145 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
147 return (rtx_cost (and_test, IF_THEN_ELSE)
148 <= rtx_cost (shift_test, IF_THEN_ELSE));
151 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
152 the result is zero, or IF_TRUE_LABEL if the result is one.
153 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
154 meaning fall through in that case.
156 do_jump always does any pending stack adjust except when it does not
157 actually perform a jump. An example where there is no jump
158 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
160 This function is responsible for optimizing cases such as
161 &&, || and comparison operators in EXP. */
163 void
164 do_jump (tree exp, rtx if_false_label, rtx if_true_label)
166 enum tree_code code = TREE_CODE (exp);
167 /* Some cases need to create a label to jump to
168 in order to properly fall through.
169 These cases set DROP_THROUGH_LABEL nonzero. */
170 rtx drop_through_label = 0;
171 rtx temp;
172 int i;
173 tree type;
174 enum machine_mode mode;
176 emit_queue ();
178 switch (code)
180 case ERROR_MARK:
181 break;
183 case INTEGER_CST:
184 temp = integer_zerop (exp) ? if_false_label : if_true_label;
185 if (temp)
186 emit_jump (temp);
187 break;
189 #if 0
190 /* This is not true with #pragma weak */
191 case ADDR_EXPR:
192 /* The address of something can never be zero. */
193 if (if_true_label)
194 emit_jump (if_true_label);
195 break;
196 #endif
198 case UNSAVE_EXPR:
199 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
200 TREE_OPERAND (exp, 0)
201 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
202 break;
204 case NOP_EXPR:
205 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
206 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
207 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
208 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
209 goto normal;
210 case CONVERT_EXPR:
211 /* If we are narrowing the operand, we have to do the compare in the
212 narrower mode. */
213 if ((TYPE_PRECISION (TREE_TYPE (exp))
214 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
215 goto normal;
216 case NON_LVALUE_EXPR:
217 case REFERENCE_EXPR:
218 case ABS_EXPR:
219 case NEGATE_EXPR:
220 case LROTATE_EXPR:
221 case RROTATE_EXPR:
222 /* These cannot change zero->nonzero or vice versa. */
223 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
224 break;
226 case WITH_RECORD_EXPR:
227 /* Put the object on the placeholder list, recurse through our first
228 operand, and pop the list. */
229 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
230 placeholder_list);
231 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
232 placeholder_list = TREE_CHAIN (placeholder_list);
233 break;
235 #if 0
236 /* This is never less insns than evaluating the PLUS_EXPR followed by
237 a test and can be longer if the test is eliminated. */
238 case PLUS_EXPR:
239 /* Reduce to minus. */
240 exp = build (MINUS_EXPR, TREE_TYPE (exp),
241 TREE_OPERAND (exp, 0),
242 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
243 TREE_OPERAND (exp, 1))));
244 /* Process as MINUS. */
245 #endif
247 case MINUS_EXPR:
248 /* Nonzero iff operands of minus differ. */
249 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
250 TREE_OPERAND (exp, 0),
251 TREE_OPERAND (exp, 1)),
252 NE, NE, if_false_label, if_true_label);
253 break;
255 case BIT_AND_EXPR:
256 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
257 See if the former is preferred for jump tests and restore it
258 if so. */
259 if (integer_onep (TREE_OPERAND (exp, 1)))
261 tree exp0 = TREE_OPERAND (exp, 0);
262 rtx set_label, clr_label;
264 /* Strip narrowing integral type conversions. */
265 while ((TREE_CODE (exp0) == NOP_EXPR
266 || TREE_CODE (exp0) == CONVERT_EXPR
267 || TREE_CODE (exp0) == NON_LVALUE_EXPR)
268 && TREE_OPERAND (exp0, 0) != error_mark_node
269 && TYPE_PRECISION (TREE_TYPE (exp0))
270 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
271 exp0 = TREE_OPERAND (exp0, 0);
273 /* "exp0 ^ 1" inverts the sense of the single bit test. */
274 if (TREE_CODE (exp0) == BIT_XOR_EXPR
275 && integer_onep (TREE_OPERAND (exp0, 1)))
277 exp0 = TREE_OPERAND (exp0, 0);
278 clr_label = if_true_label;
279 set_label = if_false_label;
281 else
283 clr_label = if_false_label;
284 set_label = if_true_label;
287 if (TREE_CODE (exp0) == RSHIFT_EXPR)
289 tree arg = TREE_OPERAND (exp0, 0);
290 tree shift = TREE_OPERAND (exp0, 1);
291 tree argtype = TREE_TYPE (arg);
292 if (TREE_CODE (shift) == INTEGER_CST
293 && compare_tree_int (shift, 0) >= 0
294 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
295 && prefer_and_bit_test (TYPE_MODE (argtype),
296 TREE_INT_CST_LOW (shift)))
298 HOST_WIDE_INT mask = (HOST_WIDE_INT) 1
299 << TREE_INT_CST_LOW (shift);
300 tree t = build_int_2 (mask, 0);
301 TREE_TYPE (t) = argtype;
302 do_jump (build (BIT_AND_EXPR, argtype, arg, t),
303 clr_label, set_label);
304 break;
309 /* If we are AND'ing with a small constant, do this comparison in the
310 smallest type that fits. If the machine doesn't have comparisons
311 that small, it will be converted back to the wider comparison.
312 This helps if we are testing the sign bit of a narrower object.
313 combine can't do this for us because it can't know whether a
314 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
316 if (! SLOW_BYTE_ACCESS
317 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
318 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
319 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
320 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
321 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
322 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
323 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
324 != CODE_FOR_nothing))
326 do_jump (convert (type, exp), if_false_label, if_true_label);
327 break;
329 goto normal;
331 case TRUTH_NOT_EXPR:
332 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
333 break;
335 case TRUTH_ANDIF_EXPR:
336 if (if_false_label == 0)
337 if_false_label = drop_through_label = gen_label_rtx ();
338 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
339 start_cleanup_deferral ();
340 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
341 end_cleanup_deferral ();
342 break;
344 case TRUTH_ORIF_EXPR:
345 if (if_true_label == 0)
346 if_true_label = drop_through_label = gen_label_rtx ();
347 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
348 start_cleanup_deferral ();
349 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
350 end_cleanup_deferral ();
351 break;
353 case COMPOUND_EXPR:
354 push_temp_slots ();
355 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
356 preserve_temp_slots (NULL_RTX);
357 free_temp_slots ();
358 pop_temp_slots ();
359 emit_queue ();
360 do_pending_stack_adjust ();
361 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
362 break;
364 case COMPONENT_REF:
365 case BIT_FIELD_REF:
366 case ARRAY_REF:
367 case ARRAY_RANGE_REF:
369 HOST_WIDE_INT bitsize, bitpos;
370 int unsignedp;
371 enum machine_mode mode;
372 tree type;
373 tree offset;
374 int volatilep = 0;
376 /* Get description of this reference. We don't actually care
377 about the underlying object here. */
378 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
379 &unsignedp, &volatilep);
381 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
382 if (! SLOW_BYTE_ACCESS
383 && type != 0 && bitsize >= 0
384 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
385 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
386 != CODE_FOR_nothing))
388 do_jump (convert (type, exp), if_false_label, if_true_label);
389 break;
391 goto normal;
394 case COND_EXPR:
395 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
396 if (integer_onep (TREE_OPERAND (exp, 1))
397 && integer_zerop (TREE_OPERAND (exp, 2)))
398 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
400 else if (integer_zerop (TREE_OPERAND (exp, 1))
401 && integer_onep (TREE_OPERAND (exp, 2)))
402 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
404 else
406 rtx label1 = gen_label_rtx ();
407 drop_through_label = gen_label_rtx ();
409 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
411 start_cleanup_deferral ();
412 /* Now the THEN-expression. */
413 do_jump (TREE_OPERAND (exp, 1),
414 if_false_label ? if_false_label : drop_through_label,
415 if_true_label ? if_true_label : drop_through_label);
416 /* In case the do_jump just above never jumps. */
417 do_pending_stack_adjust ();
418 emit_label (label1);
420 /* Now the ELSE-expression. */
421 do_jump (TREE_OPERAND (exp, 2),
422 if_false_label ? if_false_label : drop_through_label,
423 if_true_label ? if_true_label : drop_through_label);
424 end_cleanup_deferral ();
426 break;
428 case EQ_EXPR:
430 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
432 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
433 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
435 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
436 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
437 do_jump
438 (fold
439 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
440 fold (build (EQ_EXPR, TREE_TYPE (exp),
441 fold (build1 (REALPART_EXPR,
442 TREE_TYPE (inner_type),
443 exp0)),
444 fold (build1 (REALPART_EXPR,
445 TREE_TYPE (inner_type),
446 exp1)))),
447 fold (build (EQ_EXPR, TREE_TYPE (exp),
448 fold (build1 (IMAGPART_EXPR,
449 TREE_TYPE (inner_type),
450 exp0)),
451 fold (build1 (IMAGPART_EXPR,
452 TREE_TYPE (inner_type),
453 exp1)))))),
454 if_false_label, if_true_label);
457 else if (integer_zerop (TREE_OPERAND (exp, 1)))
458 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
460 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
461 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
462 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
463 else
464 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
465 break;
468 case NE_EXPR:
470 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
472 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
473 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
475 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
476 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
477 do_jump
478 (fold
479 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
480 fold (build (NE_EXPR, TREE_TYPE (exp),
481 fold (build1 (REALPART_EXPR,
482 TREE_TYPE (inner_type),
483 exp0)),
484 fold (build1 (REALPART_EXPR,
485 TREE_TYPE (inner_type),
486 exp1)))),
487 fold (build (NE_EXPR, TREE_TYPE (exp),
488 fold (build1 (IMAGPART_EXPR,
489 TREE_TYPE (inner_type),
490 exp0)),
491 fold (build1 (IMAGPART_EXPR,
492 TREE_TYPE (inner_type),
493 exp1)))))),
494 if_false_label, if_true_label);
497 else if (integer_zerop (TREE_OPERAND (exp, 1)))
498 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
500 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
501 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
502 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
503 else
504 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
505 break;
508 case LT_EXPR:
509 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
510 if (GET_MODE_CLASS (mode) == MODE_INT
511 && ! can_compare_p (LT, mode, ccp_jump))
512 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
513 else
514 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
515 break;
517 case LE_EXPR:
518 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
519 if (GET_MODE_CLASS (mode) == MODE_INT
520 && ! can_compare_p (LE, mode, ccp_jump))
521 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
522 else
523 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
524 break;
526 case GT_EXPR:
527 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
528 if (GET_MODE_CLASS (mode) == MODE_INT
529 && ! can_compare_p (GT, mode, ccp_jump))
530 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
531 else
532 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
533 break;
535 case GE_EXPR:
536 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
537 if (GET_MODE_CLASS (mode) == MODE_INT
538 && ! can_compare_p (GE, mode, ccp_jump))
539 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
540 else
541 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
542 break;
544 case UNORDERED_EXPR:
545 case ORDERED_EXPR:
547 enum rtx_code cmp, rcmp;
548 int do_rev;
550 if (code == UNORDERED_EXPR)
551 cmp = UNORDERED, rcmp = ORDERED;
552 else
553 cmp = ORDERED, rcmp = UNORDERED;
554 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
556 do_rev = 0;
557 if (! can_compare_p (cmp, mode, ccp_jump)
558 && (can_compare_p (rcmp, mode, ccp_jump)
559 /* If the target doesn't provide either UNORDERED or ORDERED
560 comparisons, canonicalize on UNORDERED for the library. */
561 || rcmp == UNORDERED))
562 do_rev = 1;
564 if (! do_rev)
565 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
566 else
567 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
569 break;
572 enum rtx_code rcode1;
573 enum tree_code tcode2;
575 case UNLT_EXPR:
576 rcode1 = UNLT;
577 tcode2 = LT_EXPR;
578 goto unordered_bcc;
579 case UNLE_EXPR:
580 rcode1 = UNLE;
581 tcode2 = LE_EXPR;
582 goto unordered_bcc;
583 case UNGT_EXPR:
584 rcode1 = UNGT;
585 tcode2 = GT_EXPR;
586 goto unordered_bcc;
587 case UNGE_EXPR:
588 rcode1 = UNGE;
589 tcode2 = GE_EXPR;
590 goto unordered_bcc;
591 case UNEQ_EXPR:
592 rcode1 = UNEQ;
593 tcode2 = EQ_EXPR;
594 goto unordered_bcc;
596 unordered_bcc:
597 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
598 if (can_compare_p (rcode1, mode, ccp_jump))
599 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
600 if_true_label);
601 else
603 tree op0 = save_expr (TREE_OPERAND (exp, 0));
604 tree op1 = save_expr (TREE_OPERAND (exp, 1));
605 tree cmp0, cmp1;
607 /* If the target doesn't support combined unordered
608 compares, decompose into UNORDERED + comparison. */
609 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
610 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
611 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
612 do_jump (exp, if_false_label, if_true_label);
615 break;
617 /* Special case:
618 __builtin_expect (<test>, 0) and
619 __builtin_expect (<test>, 1)
621 We need to do this here, so that <test> is not converted to a SCC
622 operation on machines that use condition code registers and COMPARE
623 like the PowerPC, and then the jump is done based on whether the SCC
624 operation produced a 1 or 0. */
625 case CALL_EXPR:
626 /* Check for a built-in function. */
628 tree fndecl = get_callee_fndecl (exp);
629 tree arglist = TREE_OPERAND (exp, 1);
631 if (fndecl
632 && DECL_BUILT_IN (fndecl)
633 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
634 && arglist != NULL_TREE
635 && TREE_CHAIN (arglist) != NULL_TREE)
637 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
638 if_true_label);
640 if (seq != NULL_RTX)
642 emit_insn (seq);
643 return;
647 /* Fall through and generate the normal code. */
649 default:
650 normal:
651 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
652 #if 0
653 /* This is not needed any more and causes poor code since it causes
654 comparisons and tests from non-SI objects to have different code
655 sequences. */
656 /* Copy to register to avoid generating bad insns by cse
657 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
658 if (!cse_not_expected && GET_CODE (temp) == MEM)
659 temp = copy_to_reg (temp);
660 #endif
661 do_pending_stack_adjust ();
662 /* Do any postincrements in the expression that was tested. */
663 emit_queue ();
665 if (GET_CODE (temp) == CONST_INT
666 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
667 || GET_CODE (temp) == LABEL_REF)
669 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
670 if (target)
671 emit_jump (target);
673 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
674 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
675 /* Note swapping the labels gives us not-equal. */
676 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
677 else if (GET_MODE (temp) != VOIDmode)
679 /* The RTL optimizers prefer comparisons against pseudos. */
680 if (GET_CODE (temp) == SUBREG)
682 /* Compare promoted variables in their promoted mode. */
683 if (SUBREG_PROMOTED_VAR_P (temp)
684 && GET_CODE (XEXP (temp, 0)) == REG)
685 temp = XEXP (temp, 0);
686 else
687 temp = copy_to_reg (temp);
689 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
690 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
691 GET_MODE (temp), NULL_RTX,
692 if_false_label, if_true_label);
694 else
695 abort ();
698 if (drop_through_label)
700 /* If do_jump produces code that might be jumped around,
701 do any stack adjusts from that code, before the place
702 where control merges in. */
703 do_pending_stack_adjust ();
704 emit_label (drop_through_label);
708 /* Given a comparison expression EXP for values too wide to be compared
709 with one insn, test the comparison and jump to the appropriate label.
710 The code of EXP is ignored; we always test GT if SWAP is 0,
711 and LT if SWAP is 1. */
713 static void
714 do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label,
715 rtx if_true_label)
717 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
718 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
719 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
720 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
722 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
725 /* Compare OP0 with OP1, word at a time, in mode MODE.
726 UNSIGNEDP says to do unsigned comparison.
727 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
729 void
730 do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
731 rtx op1, rtx if_false_label, rtx if_true_label)
733 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
734 rtx drop_through_label = 0;
735 int i;
737 if (! if_true_label || ! if_false_label)
738 drop_through_label = gen_label_rtx ();
739 if (! if_true_label)
740 if_true_label = drop_through_label;
741 if (! if_false_label)
742 if_false_label = drop_through_label;
744 /* Compare a word at a time, high order first. */
745 for (i = 0; i < nwords; i++)
747 rtx op0_word, op1_word;
749 if (WORDS_BIG_ENDIAN)
751 op0_word = operand_subword_force (op0, i, mode);
752 op1_word = operand_subword_force (op1, i, mode);
754 else
756 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
757 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
760 /* All but high-order word must be compared as unsigned. */
761 do_compare_rtx_and_jump (op0_word, op1_word, GT,
762 (unsignedp || i > 0), word_mode, NULL_RTX,
763 NULL_RTX, if_true_label);
765 /* Consider lower words only if these are equal. */
766 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
767 NULL_RTX, NULL_RTX, if_false_label);
770 if (if_false_label)
771 emit_jump (if_false_label);
772 if (drop_through_label)
773 emit_label (drop_through_label);
776 /* Given an EQ_EXPR expression EXP for values too wide to be compared
777 with one insn, test the comparison and jump to the appropriate label. */
779 static void
780 do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label)
782 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
783 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
784 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
785 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
786 int i;
787 rtx drop_through_label = 0;
789 if (! if_false_label)
790 drop_through_label = if_false_label = gen_label_rtx ();
792 for (i = 0; i < nwords; i++)
793 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
794 operand_subword_force (op1, i, mode),
795 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
796 word_mode, NULL_RTX, if_false_label, NULL_RTX);
798 if (if_true_label)
799 emit_jump (if_true_label);
800 if (drop_through_label)
801 emit_label (drop_through_label);
804 /* Jump according to whether OP0 is 0.
805 We assume that OP0 has an integer mode that is too wide
806 for the available compare insns. */
808 void
809 do_jump_by_parts_equality_rtx (rtx op0, rtx if_false_label, rtx if_true_label)
811 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
812 rtx part;
813 int i;
814 rtx drop_through_label = 0;
816 /* The fastest way of doing this comparison on almost any machine is to
817 "or" all the words and compare the result. If all have to be loaded
818 from memory and this is a very wide item, it's possible this may
819 be slower, but that's highly unlikely. */
821 part = gen_reg_rtx (word_mode);
822 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
823 for (i = 1; i < nwords && part != 0; i++)
824 part = expand_binop (word_mode, ior_optab, part,
825 operand_subword_force (op0, i, GET_MODE (op0)),
826 part, 1, OPTAB_WIDEN);
828 if (part != 0)
830 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
831 NULL_RTX, if_false_label, if_true_label);
833 return;
836 /* If we couldn't do the "or" simply, do this with a series of compares. */
837 if (! if_false_label)
838 drop_through_label = if_false_label = gen_label_rtx ();
840 for (i = 0; i < nwords; i++)
841 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
842 const0_rtx, EQ, 1, word_mode, NULL_RTX,
843 if_false_label, NULL_RTX);
845 if (if_true_label)
846 emit_jump (if_true_label);
848 if (drop_through_label)
849 emit_label (drop_through_label);
852 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
853 (including code to compute the values to be compared)
854 and set (CC0) according to the result.
855 The decision as to signed or unsigned comparison must be made by the caller.
857 We force a stack adjustment unless there are currently
858 things pushed on the stack that aren't yet used.
860 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
861 compared. */
864 compare_from_rtx (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
865 enum machine_mode mode, rtx size)
867 enum rtx_code ucode;
868 rtx tem;
870 /* If one operand is constant, make it the second one. Only do this
871 if the other operand is not constant as well. */
873 if (swap_commutative_operands_p (op0, op1))
875 tem = op0;
876 op0 = op1;
877 op1 = tem;
878 code = swap_condition (code);
881 if (flag_force_mem)
883 op0 = force_not_mem (op0);
884 op1 = force_not_mem (op1);
887 do_pending_stack_adjust ();
889 ucode = unsignedp ? unsigned_condition (code) : code;
890 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
891 return tem;
893 #if 0
894 /* There's no need to do this now that combine.c can eliminate lots of
895 sign extensions. This can be less efficient in certain cases on other
896 machines. */
898 /* If this is a signed equality comparison, we can do it as an
899 unsigned comparison since zero-extension is cheaper than sign
900 extension and comparisons with zero are done as unsigned. This is
901 the case even on machines that can do fast sign extension, since
902 zero-extension is easier to combine with other operations than
903 sign-extension is. If we are comparing against a constant, we must
904 convert it to what it would look like unsigned. */
905 if ((code == EQ || code == NE) && ! unsignedp
906 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
908 if (GET_CODE (op1) == CONST_INT
909 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
910 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
911 unsignedp = 1;
913 #endif
915 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
917 #if HAVE_cc0
918 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
919 #else
920 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
921 #endif
924 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
925 The decision as to signed or unsigned comparison must be made by the caller.
927 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
928 compared. */
930 void
931 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
932 enum machine_mode mode, rtx size, rtx if_false_label,
933 rtx if_true_label)
935 enum rtx_code ucode;
936 rtx tem;
937 int dummy_true_label = 0;
939 /* Reverse the comparison if that is safe and we want to jump if it is
940 false. */
941 if (! if_true_label && ! FLOAT_MODE_P (mode))
943 if_true_label = if_false_label;
944 if_false_label = 0;
945 code = reverse_condition (code);
948 /* If one operand is constant, make it the second one. Only do this
949 if the other operand is not constant as well. */
951 if (swap_commutative_operands_p (op0, op1))
953 tem = op0;
954 op0 = op1;
955 op1 = tem;
956 code = swap_condition (code);
959 if (flag_force_mem)
961 op0 = force_not_mem (op0);
962 op1 = force_not_mem (op1);
965 do_pending_stack_adjust ();
967 ucode = unsignedp ? unsigned_condition (code) : code;
968 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
970 if (tem == const_true_rtx)
972 if (if_true_label)
973 emit_jump (if_true_label);
975 else
977 if (if_false_label)
978 emit_jump (if_false_label);
980 return;
983 #if 0
984 /* There's no need to do this now that combine.c can eliminate lots of
985 sign extensions. This can be less efficient in certain cases on other
986 machines. */
988 /* If this is a signed equality comparison, we can do it as an
989 unsigned comparison since zero-extension is cheaper than sign
990 extension and comparisons with zero are done as unsigned. This is
991 the case even on machines that can do fast sign extension, since
992 zero-extension is easier to combine with other operations than
993 sign-extension is. If we are comparing against a constant, we must
994 convert it to what it would look like unsigned. */
995 if ((code == EQ || code == NE) && ! unsignedp
996 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
998 if (GET_CODE (op1) == CONST_INT
999 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
1000 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
1001 unsignedp = 1;
1003 #endif
1005 if (! if_true_label)
1007 dummy_true_label = 1;
1008 if_true_label = gen_label_rtx ();
1011 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
1012 if_true_label);
1014 if (if_false_label)
1015 emit_jump (if_false_label);
1016 if (dummy_true_label)
1017 emit_label (if_true_label);
1020 /* Generate code for a comparison expression EXP (including code to compute
1021 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
1022 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
1023 generated code will drop through.
1024 SIGNED_CODE should be the rtx operation for this comparison for
1025 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
1027 We force a stack adjustment unless there are currently
1028 things pushed on the stack that aren't yet used. */
1030 static void
1031 do_compare_and_jump (tree exp, enum rtx_code signed_code,
1032 enum rtx_code unsigned_code, rtx if_false_label,
1033 rtx if_true_label)
1035 rtx op0, op1;
1036 tree type;
1037 enum machine_mode mode;
1038 int unsignedp;
1039 enum rtx_code code;
1041 /* Don't crash if the comparison was erroneous. */
1042 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1043 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
1044 return;
1046 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
1047 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
1048 return;
1050 type = TREE_TYPE (TREE_OPERAND (exp, 0));
1051 mode = TYPE_MODE (type);
1052 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
1053 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
1054 || (GET_MODE_BITSIZE (mode)
1055 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
1056 1)))))))
1058 /* op0 might have been replaced by promoted constant, in which
1059 case the type of second argument should be used. */
1060 type = TREE_TYPE (TREE_OPERAND (exp, 1));
1061 mode = TYPE_MODE (type);
1063 unsignedp = TREE_UNSIGNED (type);
1064 code = unsignedp ? unsigned_code : signed_code;
1066 #ifdef HAVE_canonicalize_funcptr_for_compare
1067 /* If function pointers need to be "canonicalized" before they can
1068 be reliably compared, then canonicalize them. */
1069 if (HAVE_canonicalize_funcptr_for_compare
1070 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
1071 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
1072 == FUNCTION_TYPE))
1074 rtx new_op0 = gen_reg_rtx (mode);
1076 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
1077 op0 = new_op0;
1080 if (HAVE_canonicalize_funcptr_for_compare
1081 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
1082 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
1083 == FUNCTION_TYPE))
1085 rtx new_op1 = gen_reg_rtx (mode);
1087 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
1088 op1 = new_op1;
1090 #endif
1092 /* Do any postincrements in the expression that was tested. */
1093 emit_queue ();
1095 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
1096 ((mode == BLKmode)
1097 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
1098 if_false_label, if_true_label);
1101 #include "gt-dojump.h"