* dbxout.c (current_file): Also wrap inside DBX_DEBUGGING_INFO ||
[official-gcc.git] / gcc / dojump.c
blob2ed014bacf93b687cc4ac6d3115393147f0df18b
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
33 #include "expr.h"
34 #include "optabs.h"
35 #include "langhooks.h"
37 static void do_jump_by_parts_greater (tree, int, rtx, rtx);
38 static void do_jump_by_parts_equality (tree, rtx, rtx);
39 static void do_compare_and_jump (tree, enum rtx_code, enum rtx_code, rtx,
40 rtx);
42 /* At the start of a function, record that we have no previously-pushed
43 arguments waiting to be popped. */
45 void
46 init_pending_stack_adjust (void)
48 pending_stack_adjust = 0;
51 /* When exiting from function, if safe, clear out any pending stack adjust
52 so the adjustment won't get done.
54 Note, if the current function calls alloca, then it must have a
55 frame pointer regardless of the value of flag_omit_frame_pointer. */
57 void
58 clear_pending_stack_adjust (void)
60 #ifdef EXIT_IGNORE_STACK
61 if (optimize > 0
62 && (! flag_omit_frame_pointer || current_function_calls_alloca)
63 && EXIT_IGNORE_STACK
64 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
65 && ! flag_inline_functions)
67 stack_pointer_delta -= pending_stack_adjust,
68 pending_stack_adjust = 0;
70 #endif
73 /* Pop any previously-pushed arguments that have not been popped yet. */
75 void
76 do_pending_stack_adjust (void)
78 if (inhibit_defer_pop == 0)
80 if (pending_stack_adjust != 0)
81 adjust_stack (GEN_INT (pending_stack_adjust));
82 pending_stack_adjust = 0;
86 /* Expand conditional expressions. */
88 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
89 LABEL is an rtx of code CODE_LABEL, in this function and all the
90 functions here. */
92 void
93 jumpifnot (tree exp, rtx label)
95 do_jump (exp, label, NULL_RTX);
98 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
100 void
101 jumpif (tree exp, rtx label)
103 do_jump (exp, NULL_RTX, label);
106 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
107 the result is zero, or IF_TRUE_LABEL if the result is one.
108 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
109 meaning fall through in that case.
111 do_jump always does any pending stack adjust except when it does not
112 actually perform a jump. An example where there is no jump
113 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
115 This function is responsible for optimizing cases such as
116 &&, || and comparison operators in EXP. */
118 void
119 do_jump (tree exp, rtx if_false_label, rtx if_true_label)
121 enum tree_code code = TREE_CODE (exp);
122 /* Some cases need to create a label to jump to
123 in order to properly fall through.
124 These cases set DROP_THROUGH_LABEL nonzero. */
125 rtx drop_through_label = 0;
126 rtx temp;
127 int i;
128 tree type;
129 enum machine_mode mode;
131 #ifdef MAX_INTEGER_COMPUTATION_MODE
132 check_max_integer_computation_mode (exp);
133 #endif
135 emit_queue ();
137 switch (code)
139 case ERROR_MARK:
140 break;
142 case INTEGER_CST:
143 temp = integer_zerop (exp) ? if_false_label : if_true_label;
144 if (temp)
145 emit_jump (temp);
146 break;
148 #if 0
149 /* This is not true with #pragma weak */
150 case ADDR_EXPR:
151 /* The address of something can never be zero. */
152 if (if_true_label)
153 emit_jump (if_true_label);
154 break;
155 #endif
157 case UNSAVE_EXPR:
158 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
159 TREE_OPERAND (exp, 0)
160 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
161 break;
163 case NOP_EXPR:
164 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
165 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
166 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
167 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
168 goto normal;
169 case CONVERT_EXPR:
170 /* If we are narrowing the operand, we have to do the compare in the
171 narrower mode. */
172 if ((TYPE_PRECISION (TREE_TYPE (exp))
173 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
174 goto normal;
175 case NON_LVALUE_EXPR:
176 case REFERENCE_EXPR:
177 case ABS_EXPR:
178 case NEGATE_EXPR:
179 case LROTATE_EXPR:
180 case RROTATE_EXPR:
181 /* These cannot change zero->nonzero or vice versa. */
182 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
183 break;
185 case WITH_RECORD_EXPR:
186 /* Put the object on the placeholder list, recurse through our first
187 operand, and pop the list. */
188 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
189 placeholder_list);
190 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
191 placeholder_list = TREE_CHAIN (placeholder_list);
192 break;
194 #if 0
195 /* This is never less insns than evaluating the PLUS_EXPR followed by
196 a test and can be longer if the test is eliminated. */
197 case PLUS_EXPR:
198 /* Reduce to minus. */
199 exp = build (MINUS_EXPR, TREE_TYPE (exp),
200 TREE_OPERAND (exp, 0),
201 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
202 TREE_OPERAND (exp, 1))));
203 /* Process as MINUS. */
204 #endif
206 case MINUS_EXPR:
207 /* Nonzero iff operands of minus differ. */
208 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
209 TREE_OPERAND (exp, 0),
210 TREE_OPERAND (exp, 1)),
211 NE, NE, if_false_label, if_true_label);
212 break;
214 case BIT_AND_EXPR:
215 /* If we are AND'ing with a small constant, do this comparison in the
216 smallest type that fits. If the machine doesn't have comparisons
217 that small, it will be converted back to the wider comparison.
218 This helps if we are testing the sign bit of a narrower object.
219 combine can't do this for us because it can't know whether a
220 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
222 if (! SLOW_BYTE_ACCESS
223 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
224 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
225 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
226 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
227 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
228 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
229 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
230 != CODE_FOR_nothing))
232 do_jump (convert (type, exp), if_false_label, if_true_label);
233 break;
235 goto normal;
237 case TRUTH_NOT_EXPR:
238 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
239 break;
241 case TRUTH_ANDIF_EXPR:
242 if (if_false_label == 0)
243 if_false_label = drop_through_label = gen_label_rtx ();
244 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
245 start_cleanup_deferral ();
246 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
247 end_cleanup_deferral ();
248 break;
250 case TRUTH_ORIF_EXPR:
251 if (if_true_label == 0)
252 if_true_label = drop_through_label = gen_label_rtx ();
253 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
254 start_cleanup_deferral ();
255 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
256 end_cleanup_deferral ();
257 break;
259 case COMPOUND_EXPR:
260 push_temp_slots ();
261 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
262 preserve_temp_slots (NULL_RTX);
263 free_temp_slots ();
264 pop_temp_slots ();
265 emit_queue ();
266 do_pending_stack_adjust ();
267 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
268 break;
270 case COMPONENT_REF:
271 case BIT_FIELD_REF:
272 case ARRAY_REF:
273 case ARRAY_RANGE_REF:
275 HOST_WIDE_INT bitsize, bitpos;
276 int unsignedp;
277 enum machine_mode mode;
278 tree type;
279 tree offset;
280 int volatilep = 0;
282 /* Get description of this reference. We don't actually care
283 about the underlying object here. */
284 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
285 &unsignedp, &volatilep);
287 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
288 if (! SLOW_BYTE_ACCESS
289 && type != 0 && bitsize >= 0
290 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
291 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
292 != CODE_FOR_nothing))
294 do_jump (convert (type, exp), if_false_label, if_true_label);
295 break;
297 goto normal;
300 case COND_EXPR:
301 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
302 if (integer_onep (TREE_OPERAND (exp, 1))
303 && integer_zerop (TREE_OPERAND (exp, 2)))
304 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
306 else if (integer_zerop (TREE_OPERAND (exp, 1))
307 && integer_onep (TREE_OPERAND (exp, 2)))
308 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
310 else
312 rtx label1 = gen_label_rtx ();
313 drop_through_label = gen_label_rtx ();
315 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
317 start_cleanup_deferral ();
318 /* Now the THEN-expression. */
319 do_jump (TREE_OPERAND (exp, 1),
320 if_false_label ? if_false_label : drop_through_label,
321 if_true_label ? if_true_label : drop_through_label);
322 /* In case the do_jump just above never jumps. */
323 do_pending_stack_adjust ();
324 emit_label (label1);
326 /* Now the ELSE-expression. */
327 do_jump (TREE_OPERAND (exp, 2),
328 if_false_label ? if_false_label : drop_through_label,
329 if_true_label ? if_true_label : drop_through_label);
330 end_cleanup_deferral ();
332 break;
334 case EQ_EXPR:
336 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
338 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
339 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
341 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
342 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
343 do_jump
344 (fold
345 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
346 fold (build (EQ_EXPR, TREE_TYPE (exp),
347 fold (build1 (REALPART_EXPR,
348 TREE_TYPE (inner_type),
349 exp0)),
350 fold (build1 (REALPART_EXPR,
351 TREE_TYPE (inner_type),
352 exp1)))),
353 fold (build (EQ_EXPR, TREE_TYPE (exp),
354 fold (build1 (IMAGPART_EXPR,
355 TREE_TYPE (inner_type),
356 exp0)),
357 fold (build1 (IMAGPART_EXPR,
358 TREE_TYPE (inner_type),
359 exp1)))))),
360 if_false_label, if_true_label);
363 else if (integer_zerop (TREE_OPERAND (exp, 1)))
364 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
366 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
367 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
368 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
369 else
370 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
371 break;
374 case NE_EXPR:
376 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
378 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
379 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
381 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
382 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
383 do_jump
384 (fold
385 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
386 fold (build (NE_EXPR, TREE_TYPE (exp),
387 fold (build1 (REALPART_EXPR,
388 TREE_TYPE (inner_type),
389 exp0)),
390 fold (build1 (REALPART_EXPR,
391 TREE_TYPE (inner_type),
392 exp1)))),
393 fold (build (NE_EXPR, TREE_TYPE (exp),
394 fold (build1 (IMAGPART_EXPR,
395 TREE_TYPE (inner_type),
396 exp0)),
397 fold (build1 (IMAGPART_EXPR,
398 TREE_TYPE (inner_type),
399 exp1)))))),
400 if_false_label, if_true_label);
403 else if (integer_zerop (TREE_OPERAND (exp, 1)))
404 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
406 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
407 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
408 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
409 else
410 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
411 break;
414 case LT_EXPR:
415 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
416 if (GET_MODE_CLASS (mode) == MODE_INT
417 && ! can_compare_p (LT, mode, ccp_jump))
418 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
419 else
420 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
421 break;
423 case LE_EXPR:
424 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
425 if (GET_MODE_CLASS (mode) == MODE_INT
426 && ! can_compare_p (LE, mode, ccp_jump))
427 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
428 else
429 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
430 break;
432 case GT_EXPR:
433 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
434 if (GET_MODE_CLASS (mode) == MODE_INT
435 && ! can_compare_p (GT, mode, ccp_jump))
436 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
437 else
438 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
439 break;
441 case GE_EXPR:
442 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
443 if (GET_MODE_CLASS (mode) == MODE_INT
444 && ! can_compare_p (GE, mode, ccp_jump))
445 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
446 else
447 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
448 break;
450 case UNORDERED_EXPR:
451 case ORDERED_EXPR:
453 enum rtx_code cmp, rcmp;
454 int do_rev;
456 if (code == UNORDERED_EXPR)
457 cmp = UNORDERED, rcmp = ORDERED;
458 else
459 cmp = ORDERED, rcmp = UNORDERED;
460 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
462 do_rev = 0;
463 if (! can_compare_p (cmp, mode, ccp_jump)
464 && (can_compare_p (rcmp, mode, ccp_jump)
465 /* If the target doesn't provide either UNORDERED or ORDERED
466 comparisons, canonicalize on UNORDERED for the library. */
467 || rcmp == UNORDERED))
468 do_rev = 1;
470 if (! do_rev)
471 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
472 else
473 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
475 break;
478 enum rtx_code rcode1;
479 enum tree_code tcode2;
481 case UNLT_EXPR:
482 rcode1 = UNLT;
483 tcode2 = LT_EXPR;
484 goto unordered_bcc;
485 case UNLE_EXPR:
486 rcode1 = UNLE;
487 tcode2 = LE_EXPR;
488 goto unordered_bcc;
489 case UNGT_EXPR:
490 rcode1 = UNGT;
491 tcode2 = GT_EXPR;
492 goto unordered_bcc;
493 case UNGE_EXPR:
494 rcode1 = UNGE;
495 tcode2 = GE_EXPR;
496 goto unordered_bcc;
497 case UNEQ_EXPR:
498 rcode1 = UNEQ;
499 tcode2 = EQ_EXPR;
500 goto unordered_bcc;
502 unordered_bcc:
503 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
504 if (can_compare_p (rcode1, mode, ccp_jump))
505 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
506 if_true_label);
507 else
509 tree op0 = save_expr (TREE_OPERAND (exp, 0));
510 tree op1 = save_expr (TREE_OPERAND (exp, 1));
511 tree cmp0, cmp1;
513 /* If the target doesn't support combined unordered
514 compares, decompose into UNORDERED + comparison. */
515 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
516 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
517 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
518 do_jump (exp, if_false_label, if_true_label);
521 break;
523 /* Special case:
524 __builtin_expect (<test>, 0) and
525 __builtin_expect (<test>, 1)
527 We need to do this here, so that <test> is not converted to a SCC
528 operation on machines that use condition code registers and COMPARE
529 like the PowerPC, and then the jump is done based on whether the SCC
530 operation produced a 1 or 0. */
531 case CALL_EXPR:
532 /* Check for a built-in function. */
534 tree fndecl = get_callee_fndecl (exp);
535 tree arglist = TREE_OPERAND (exp, 1);
537 if (fndecl
538 && DECL_BUILT_IN (fndecl)
539 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
540 && arglist != NULL_TREE
541 && TREE_CHAIN (arglist) != NULL_TREE)
543 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
544 if_true_label);
546 if (seq != NULL_RTX)
548 emit_insn (seq);
549 return;
553 /* Fall through and generate the normal code. */
555 default:
556 normal:
557 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
558 #if 0
559 /* This is not needed any more and causes poor code since it causes
560 comparisons and tests from non-SI objects to have different code
561 sequences. */
562 /* Copy to register to avoid generating bad insns by cse
563 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
564 if (!cse_not_expected && GET_CODE (temp) == MEM)
565 temp = copy_to_reg (temp);
566 #endif
567 do_pending_stack_adjust ();
568 /* Do any postincrements in the expression that was tested. */
569 emit_queue ();
571 if (GET_CODE (temp) == CONST_INT
572 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
573 || GET_CODE (temp) == LABEL_REF)
575 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
576 if (target)
577 emit_jump (target);
579 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
580 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
581 /* Note swapping the labels gives us not-equal. */
582 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
583 else if (GET_MODE (temp) != VOIDmode)
585 /* The RTL optimizers prefer comparisons against pseudos. */
586 if (GET_CODE (temp) == SUBREG)
587 temp = copy_to_reg (temp);
588 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
589 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
590 GET_MODE (temp), NULL_RTX,
591 if_false_label, if_true_label);
593 else
594 abort ();
597 if (drop_through_label)
599 /* If do_jump produces code that might be jumped around,
600 do any stack adjusts from that code, before the place
601 where control merges in. */
602 do_pending_stack_adjust ();
603 emit_label (drop_through_label);
607 /* Given a comparison expression EXP for values too wide to be compared
608 with one insn, test the comparison and jump to the appropriate label.
609 The code of EXP is ignored; we always test GT if SWAP is 0,
610 and LT if SWAP is 1. */
612 static void
613 do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label,
614 rtx if_true_label)
616 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
617 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
618 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
619 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
621 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
624 /* Compare OP0 with OP1, word at a time, in mode MODE.
625 UNSIGNEDP says to do unsigned comparison.
626 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
628 void
629 do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
630 rtx op1, rtx if_false_label, rtx if_true_label)
632 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
633 rtx drop_through_label = 0;
634 int i;
636 if (! if_true_label || ! if_false_label)
637 drop_through_label = gen_label_rtx ();
638 if (! if_true_label)
639 if_true_label = drop_through_label;
640 if (! if_false_label)
641 if_false_label = drop_through_label;
643 /* Compare a word at a time, high order first. */
644 for (i = 0; i < nwords; i++)
646 rtx op0_word, op1_word;
648 if (WORDS_BIG_ENDIAN)
650 op0_word = operand_subword_force (op0, i, mode);
651 op1_word = operand_subword_force (op1, i, mode);
653 else
655 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
656 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
659 /* All but high-order word must be compared as unsigned. */
660 do_compare_rtx_and_jump (op0_word, op1_word, GT,
661 (unsignedp || i > 0), word_mode, NULL_RTX,
662 NULL_RTX, if_true_label);
664 /* Consider lower words only if these are equal. */
665 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
666 NULL_RTX, NULL_RTX, if_false_label);
669 if (if_false_label)
670 emit_jump (if_false_label);
671 if (drop_through_label)
672 emit_label (drop_through_label);
675 /* Given an EQ_EXPR expression EXP for values too wide to be compared
676 with one insn, test the comparison and jump to the appropriate label. */
678 static void
679 do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label)
681 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
682 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
683 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
684 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
685 int i;
686 rtx drop_through_label = 0;
688 if (! if_false_label)
689 drop_through_label = if_false_label = gen_label_rtx ();
691 for (i = 0; i < nwords; i++)
692 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
693 operand_subword_force (op1, i, mode),
694 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
695 word_mode, NULL_RTX, if_false_label, NULL_RTX);
697 if (if_true_label)
698 emit_jump (if_true_label);
699 if (drop_through_label)
700 emit_label (drop_through_label);
703 /* Jump according to whether OP0 is 0.
704 We assume that OP0 has an integer mode that is too wide
705 for the available compare insns. */
707 void
708 do_jump_by_parts_equality_rtx (rtx op0, rtx if_false_label, rtx if_true_label)
710 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
711 rtx part;
712 int i;
713 rtx drop_through_label = 0;
715 /* The fastest way of doing this comparison on almost any machine is to
716 "or" all the words and compare the result. If all have to be loaded
717 from memory and this is a very wide item, it's possible this may
718 be slower, but that's highly unlikely. */
720 part = gen_reg_rtx (word_mode);
721 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
722 for (i = 1; i < nwords && part != 0; i++)
723 part = expand_binop (word_mode, ior_optab, part,
724 operand_subword_force (op0, i, GET_MODE (op0)),
725 part, 1, OPTAB_WIDEN);
727 if (part != 0)
729 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
730 NULL_RTX, if_false_label, if_true_label);
732 return;
735 /* If we couldn't do the "or" simply, do this with a series of compares. */
736 if (! if_false_label)
737 drop_through_label = if_false_label = gen_label_rtx ();
739 for (i = 0; i < nwords; i++)
740 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
741 const0_rtx, EQ, 1, word_mode, NULL_RTX,
742 if_false_label, NULL_RTX);
744 if (if_true_label)
745 emit_jump (if_true_label);
747 if (drop_through_label)
748 emit_label (drop_through_label);
751 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
752 (including code to compute the values to be compared)
753 and set (CC0) according to the result.
754 The decision as to signed or unsigned comparison must be made by the caller.
756 We force a stack adjustment unless there are currently
757 things pushed on the stack that aren't yet used.
759 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
760 compared. */
763 compare_from_rtx (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
764 enum machine_mode mode, rtx size)
766 enum rtx_code ucode;
767 rtx tem;
769 /* If one operand is constant, make it the second one. Only do this
770 if the other operand is not constant as well. */
772 if (swap_commutative_operands_p (op0, op1))
774 tem = op0;
775 op0 = op1;
776 op1 = tem;
777 code = swap_condition (code);
780 if (flag_force_mem)
782 op0 = force_not_mem (op0);
783 op1 = force_not_mem (op1);
786 do_pending_stack_adjust ();
788 ucode = unsignedp ? unsigned_condition (code) : code;
789 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
790 return tem;
792 #if 0
793 /* There's no need to do this now that combine.c can eliminate lots of
794 sign extensions. This can be less efficient in certain cases on other
795 machines. */
797 /* If this is a signed equality comparison, we can do it as an
798 unsigned comparison since zero-extension is cheaper than sign
799 extension and comparisons with zero are done as unsigned. This is
800 the case even on machines that can do fast sign extension, since
801 zero-extension is easier to combine with other operations than
802 sign-extension is. If we are comparing against a constant, we must
803 convert it to what it would look like unsigned. */
804 if ((code == EQ || code == NE) && ! unsignedp
805 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
807 if (GET_CODE (op1) == CONST_INT
808 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
809 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
810 unsignedp = 1;
812 #endif
814 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
816 #if HAVE_cc0
817 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
818 #else
819 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
820 #endif
823 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
824 The decision as to signed or unsigned comparison must be made by the caller.
826 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
827 compared. */
829 void
830 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
831 enum machine_mode mode, rtx size, rtx if_false_label,
832 rtx if_true_label)
834 enum rtx_code ucode;
835 rtx tem;
836 int dummy_true_label = 0;
838 /* Reverse the comparison if that is safe and we want to jump if it is
839 false. */
840 if (! if_true_label && ! FLOAT_MODE_P (mode))
842 if_true_label = if_false_label;
843 if_false_label = 0;
844 code = reverse_condition (code);
847 /* If one operand is constant, make it the second one. Only do this
848 if the other operand is not constant as well. */
850 if (swap_commutative_operands_p (op0, op1))
852 tem = op0;
853 op0 = op1;
854 op1 = tem;
855 code = swap_condition (code);
858 if (flag_force_mem)
860 op0 = force_not_mem (op0);
861 op1 = force_not_mem (op1);
864 do_pending_stack_adjust ();
866 ucode = unsignedp ? unsigned_condition (code) : code;
867 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
869 if (tem == const_true_rtx)
871 if (if_true_label)
872 emit_jump (if_true_label);
874 else
876 if (if_false_label)
877 emit_jump (if_false_label);
879 return;
882 #if 0
883 /* There's no need to do this now that combine.c can eliminate lots of
884 sign extensions. This can be less efficient in certain cases on other
885 machines. */
887 /* If this is a signed equality comparison, we can do it as an
888 unsigned comparison since zero-extension is cheaper than sign
889 extension and comparisons with zero are done as unsigned. This is
890 the case even on machines that can do fast sign extension, since
891 zero-extension is easier to combine with other operations than
892 sign-extension is. If we are comparing against a constant, we must
893 convert it to what it would look like unsigned. */
894 if ((code == EQ || code == NE) && ! unsignedp
895 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
897 if (GET_CODE (op1) == CONST_INT
898 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
899 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
900 unsignedp = 1;
902 #endif
904 if (! if_true_label)
906 dummy_true_label = 1;
907 if_true_label = gen_label_rtx ();
910 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
911 if_true_label);
913 if (if_false_label)
914 emit_jump (if_false_label);
915 if (dummy_true_label)
916 emit_label (if_true_label);
919 /* Generate code for a comparison expression EXP (including code to compute
920 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
921 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
922 generated code will drop through.
923 SIGNED_CODE should be the rtx operation for this comparison for
924 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
926 We force a stack adjustment unless there are currently
927 things pushed on the stack that aren't yet used. */
929 static void
930 do_compare_and_jump (tree exp, enum rtx_code signed_code,
931 enum rtx_code unsigned_code, rtx if_false_label,
932 rtx if_true_label)
934 rtx op0, op1;
935 tree type;
936 enum machine_mode mode;
937 int unsignedp;
938 enum rtx_code code;
940 /* Don't crash if the comparison was erroneous. */
941 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
942 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
943 return;
945 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
946 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
947 return;
949 type = TREE_TYPE (TREE_OPERAND (exp, 0));
950 mode = TYPE_MODE (type);
951 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
952 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
953 || (GET_MODE_BITSIZE (mode)
954 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
955 1)))))))
957 /* op0 might have been replaced by promoted constant, in which
958 case the type of second argument should be used. */
959 type = TREE_TYPE (TREE_OPERAND (exp, 1));
960 mode = TYPE_MODE (type);
962 unsignedp = TREE_UNSIGNED (type);
963 code = unsignedp ? unsigned_code : signed_code;
965 #ifdef HAVE_canonicalize_funcptr_for_compare
966 /* If function pointers need to be "canonicalized" before they can
967 be reliably compared, then canonicalize them. */
968 if (HAVE_canonicalize_funcptr_for_compare
969 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
970 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
971 == FUNCTION_TYPE))
973 rtx new_op0 = gen_reg_rtx (mode);
975 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
976 op0 = new_op0;
979 if (HAVE_canonicalize_funcptr_for_compare
980 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
981 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
982 == FUNCTION_TYPE))
984 rtx new_op1 = gen_reg_rtx (mode);
986 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
987 op1 = new_op1;
989 #endif
991 /* Do any postincrements in the expression that was tested. */
992 emit_queue ();
994 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
995 ((mode == BLKmode)
996 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
997 if_false_label, if_true_label);