new folding rule
[official-gcc.git] / gcc / fold-const.c
blobcdae661733c5b4e59e93745e3529e08b5325e255
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "toplev.h"
63 #include "intl.h"
64 #include "ggc.h"
65 #include "hashtab.h"
66 #include "langhooks.h"
67 #include "md5.h"
68 #include "gimple.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
71 otherwise. */
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
78 COMPCODE_FALSE = 0,
79 COMPCODE_LT = 1,
80 COMPCODE_EQ = 2,
81 COMPCODE_LE = 3,
82 COMPCODE_GT = 4,
83 COMPCODE_LTGT = 5,
84 COMPCODE_GE = 6,
85 COMPCODE_ORD = 7,
86 COMPCODE_UNORD = 8,
87 COMPCODE_UNLT = 9,
88 COMPCODE_UNEQ = 10,
89 COMPCODE_UNLE = 11,
90 COMPCODE_UNGT = 12,
91 COMPCODE_NE = 13,
92 COMPCODE_UNGE = 14,
93 COMPCODE_TRUE = 15
96 static bool negate_mathfn_p (enum built_in_function);
97 static bool negate_expr_p (tree);
98 static tree negate_expr (tree);
99 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
100 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
101 static tree const_binop (enum tree_code, tree, tree, int);
102 static enum comparison_code comparison_to_compcode (enum tree_code);
103 static enum tree_code compcode_to_comparison (enum comparison_code);
104 static int operand_equal_for_comparison_p (tree, tree, tree);
105 static int twoval_comparison_p (tree, tree *, tree *, int *);
106 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
107 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
108 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
109 static tree make_bit_field_ref (location_t, tree, tree,
110 HOST_WIDE_INT, HOST_WIDE_INT, int);
111 static tree optimize_bit_field_compare (location_t, enum tree_code,
112 tree, tree, tree);
113 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
114 HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
116 tree *, tree *);
117 static int all_ones_mask_p (const_tree, int);
118 static tree sign_bit_p (tree, const_tree);
119 static int simple_operand_p (const_tree);
120 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
121 static tree range_predecessor (tree);
122 static tree range_successor (tree);
123 extern tree make_range (tree, int *, tree *, tree *, bool *);
124 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
125 tree, tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
130 static tree optimize_minmax_comparison (location_t, enum tree_code,
131 tree, tree, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (location_t,
135 enum tree_code, tree,
136 tree, tree,
137 tree, tree, int);
138 static tree fold_mathfn_compare (location_t,
139 enum built_in_function, enum tree_code,
140 tree, tree, tree);
141 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
142 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
143 static bool reorder_operands_p (const_tree, const_tree);
144 static tree fold_negate_const (tree, tree);
145 static tree fold_not_const (tree, tree);
146 static tree fold_relational_const (enum tree_code, tree, tree, tree);
147 static tree fold_convert_const (enum tree_code, tree, tree);
150 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
151 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
152 and SUM1. Then this yields nonzero if overflow occurred during the
153 addition.
155 Overflow occurs if A and B have the same sign, but A and SUM differ in
156 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
157 sign. */
158 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
160 /* If ARG2 divides ARG1 with zero remainder, carries out the division
161 of type CODE and returns the quotient.
162 Otherwise returns NULL_TREE. */
164 tree
165 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
167 double_int quo, rem;
168 int uns;
170 /* The sign of the division is according to operand two, that
171 does the correct thing for POINTER_PLUS_EXPR where we want
172 a signed division. */
173 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
174 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
175 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
176 uns = false;
178 quo = double_int_divmod (tree_to_double_int (arg1),
179 tree_to_double_int (arg2),
180 uns, code, &rem);
182 if (double_int_zero_p (rem))
183 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
185 return NULL_TREE;
188 /* This is nonzero if we should defer warnings about undefined
189 overflow. This facility exists because these warnings are a
190 special case. The code to estimate loop iterations does not want
191 to issue any warnings, since it works with expressions which do not
192 occur in user code. Various bits of cleanup code call fold(), but
193 only use the result if it has certain characteristics (e.g., is a
194 constant); that code only wants to issue a warning if the result is
195 used. */
197 static int fold_deferring_overflow_warnings;
199 /* If a warning about undefined overflow is deferred, this is the
200 warning. Note that this may cause us to turn two warnings into
201 one, but that is fine since it is sufficient to only give one
202 warning per expression. */
204 static const char* fold_deferred_overflow_warning;
206 /* If a warning about undefined overflow is deferred, this is the
207 level at which the warning should be emitted. */
209 static enum warn_strict_overflow_code fold_deferred_overflow_code;
211 /* Start deferring overflow warnings. We could use a stack here to
212 permit nested calls, but at present it is not necessary. */
214 void
215 fold_defer_overflow_warnings (void)
217 ++fold_deferring_overflow_warnings;
220 /* Stop deferring overflow warnings. If there is a pending warning,
221 and ISSUE is true, then issue the warning if appropriate. STMT is
222 the statement with which the warning should be associated (used for
223 location information); STMT may be NULL. CODE is the level of the
224 warning--a warn_strict_overflow_code value. This function will use
225 the smaller of CODE and the deferred code when deciding whether to
226 issue the warning. CODE may be zero to mean to always use the
227 deferred code. */
229 void
230 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
232 const char *warnmsg;
233 location_t locus;
235 gcc_assert (fold_deferring_overflow_warnings > 0);
236 --fold_deferring_overflow_warnings;
237 if (fold_deferring_overflow_warnings > 0)
239 if (fold_deferred_overflow_warning != NULL
240 && code != 0
241 && code < (int) fold_deferred_overflow_code)
242 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
243 return;
246 warnmsg = fold_deferred_overflow_warning;
247 fold_deferred_overflow_warning = NULL;
249 if (!issue || warnmsg == NULL)
250 return;
252 if (gimple_no_warning_p (stmt))
253 return;
255 /* Use the smallest code level when deciding to issue the
256 warning. */
257 if (code == 0 || code > (int) fold_deferred_overflow_code)
258 code = fold_deferred_overflow_code;
260 if (!issue_strict_overflow_warning (code))
261 return;
263 if (stmt == NULL)
264 locus = input_location;
265 else
266 locus = gimple_location (stmt);
267 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
270 /* Stop deferring overflow warnings, ignoring any deferred
271 warnings. */
273 void
274 fold_undefer_and_ignore_overflow_warnings (void)
276 fold_undefer_overflow_warnings (false, NULL, 0);
279 /* Whether we are deferring overflow warnings. */
281 bool
282 fold_deferring_overflow_warnings_p (void)
284 return fold_deferring_overflow_warnings > 0;
287 /* This is called when we fold something based on the fact that signed
288 overflow is undefined. */
290 static void
291 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
293 if (fold_deferring_overflow_warnings > 0)
295 if (fold_deferred_overflow_warning == NULL
296 || wc < fold_deferred_overflow_code)
298 fold_deferred_overflow_warning = gmsgid;
299 fold_deferred_overflow_code = wc;
302 else if (issue_strict_overflow_warning (wc))
303 warning (OPT_Wstrict_overflow, gmsgid);
306 /* Return true if the built-in mathematical function specified by CODE
307 is odd, i.e. -f(x) == f(-x). */
309 static bool
310 negate_mathfn_p (enum built_in_function code)
312 switch (code)
314 CASE_FLT_FN (BUILT_IN_ASIN):
315 CASE_FLT_FN (BUILT_IN_ASINH):
316 CASE_FLT_FN (BUILT_IN_ATAN):
317 CASE_FLT_FN (BUILT_IN_ATANH):
318 CASE_FLT_FN (BUILT_IN_CASIN):
319 CASE_FLT_FN (BUILT_IN_CASINH):
320 CASE_FLT_FN (BUILT_IN_CATAN):
321 CASE_FLT_FN (BUILT_IN_CATANH):
322 CASE_FLT_FN (BUILT_IN_CBRT):
323 CASE_FLT_FN (BUILT_IN_CPROJ):
324 CASE_FLT_FN (BUILT_IN_CSIN):
325 CASE_FLT_FN (BUILT_IN_CSINH):
326 CASE_FLT_FN (BUILT_IN_CTAN):
327 CASE_FLT_FN (BUILT_IN_CTANH):
328 CASE_FLT_FN (BUILT_IN_ERF):
329 CASE_FLT_FN (BUILT_IN_LLROUND):
330 CASE_FLT_FN (BUILT_IN_LROUND):
331 CASE_FLT_FN (BUILT_IN_ROUND):
332 CASE_FLT_FN (BUILT_IN_SIN):
333 CASE_FLT_FN (BUILT_IN_SINH):
334 CASE_FLT_FN (BUILT_IN_TAN):
335 CASE_FLT_FN (BUILT_IN_TANH):
336 CASE_FLT_FN (BUILT_IN_TRUNC):
337 return true;
339 CASE_FLT_FN (BUILT_IN_LLRINT):
340 CASE_FLT_FN (BUILT_IN_LRINT):
341 CASE_FLT_FN (BUILT_IN_NEARBYINT):
342 CASE_FLT_FN (BUILT_IN_RINT):
343 return !flag_rounding_math;
345 default:
346 break;
348 return false;
351 /* Check whether we may negate an integer constant T without causing
352 overflow. */
354 bool
355 may_negate_without_overflow_p (const_tree t)
357 unsigned HOST_WIDE_INT val;
358 unsigned int prec;
359 tree type;
361 gcc_assert (TREE_CODE (t) == INTEGER_CST);
363 type = TREE_TYPE (t);
364 if (TYPE_UNSIGNED (type))
365 return false;
367 prec = TYPE_PRECISION (type);
368 if (prec > HOST_BITS_PER_WIDE_INT)
370 if (TREE_INT_CST_LOW (t) != 0)
371 return true;
372 prec -= HOST_BITS_PER_WIDE_INT;
373 val = TREE_INT_CST_HIGH (t);
375 else
376 val = TREE_INT_CST_LOW (t);
377 if (prec < HOST_BITS_PER_WIDE_INT)
378 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
379 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
382 /* Determine whether an expression T can be cheaply negated using
383 the function negate_expr without introducing undefined overflow. */
385 static bool
386 negate_expr_p (tree t)
388 tree type;
390 if (t == 0)
391 return false;
393 type = TREE_TYPE (t);
395 STRIP_SIGN_NOPS (t);
396 switch (TREE_CODE (t))
398 case INTEGER_CST:
399 if (TYPE_OVERFLOW_WRAPS (type))
400 return true;
402 /* Check that -CST will not overflow type. */
403 return may_negate_without_overflow_p (t);
404 case BIT_NOT_EXPR:
405 return (INTEGRAL_TYPE_P (type)
406 && TYPE_OVERFLOW_WRAPS (type));
408 case FIXED_CST:
409 case NEGATE_EXPR:
410 return true;
412 case REAL_CST:
413 /* We want to canonicalize to positive real constants. Pretend
414 that only negative ones can be easily negated. */
415 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
417 case COMPLEX_CST:
418 return negate_expr_p (TREE_REALPART (t))
419 && negate_expr_p (TREE_IMAGPART (t));
421 case COMPLEX_EXPR:
422 return negate_expr_p (TREE_OPERAND (t, 0))
423 && negate_expr_p (TREE_OPERAND (t, 1));
425 case CONJ_EXPR:
426 return negate_expr_p (TREE_OPERAND (t, 0));
428 case PLUS_EXPR:
429 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
430 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
431 return false;
432 /* -(A + B) -> (-B) - A. */
433 if (negate_expr_p (TREE_OPERAND (t, 1))
434 && reorder_operands_p (TREE_OPERAND (t, 0),
435 TREE_OPERAND (t, 1)))
436 return true;
437 /* -(A + B) -> (-A) - B. */
438 return negate_expr_p (TREE_OPERAND (t, 0));
440 case MINUS_EXPR:
441 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
442 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
443 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
444 && reorder_operands_p (TREE_OPERAND (t, 0),
445 TREE_OPERAND (t, 1));
447 case MULT_EXPR:
448 if (TYPE_UNSIGNED (TREE_TYPE (t)))
449 break;
451 /* Fall through. */
453 case RDIV_EXPR:
454 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
455 return negate_expr_p (TREE_OPERAND (t, 1))
456 || negate_expr_p (TREE_OPERAND (t, 0));
457 break;
459 case TRUNC_DIV_EXPR:
460 case ROUND_DIV_EXPR:
461 case FLOOR_DIV_EXPR:
462 case CEIL_DIV_EXPR:
463 case EXACT_DIV_EXPR:
464 /* In general we can't negate A / B, because if A is INT_MIN and
465 B is 1, we may turn this into INT_MIN / -1 which is undefined
466 and actually traps on some architectures. But if overflow is
467 undefined, we can negate, because - (INT_MIN / 1) is an
468 overflow. */
469 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
470 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
471 break;
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
475 case NOP_EXPR:
476 /* Negate -((double)float) as (double)(-float). */
477 if (TREE_CODE (type) == REAL_TYPE)
479 tree tem = strip_float_extensions (t);
480 if (tem != t)
481 return negate_expr_p (tem);
483 break;
485 case CALL_EXPR:
486 /* Negate -f(x) as f(-x). */
487 if (negate_mathfn_p (builtin_mathfn_code (t)))
488 return negate_expr_p (CALL_EXPR_ARG (t, 0));
489 break;
491 case RSHIFT_EXPR:
492 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
493 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
495 tree op1 = TREE_OPERAND (t, 1);
496 if (TREE_INT_CST_HIGH (op1) == 0
497 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
498 == TREE_INT_CST_LOW (op1))
499 return true;
501 break;
503 default:
504 break;
506 return false;
509 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
510 simplification is possible.
511 If negate_expr_p would return true for T, NULL_TREE will never be
512 returned. */
514 static tree
515 fold_negate_expr (location_t loc, tree t)
517 tree type = TREE_TYPE (t);
518 tree tem;
520 switch (TREE_CODE (t))
522 /* Convert - (~A) to A + 1. */
523 case BIT_NOT_EXPR:
524 if (INTEGRAL_TYPE_P (type))
525 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
526 build_int_cst (type, 1));
527 break;
529 case INTEGER_CST:
530 tem = fold_negate_const (t, type);
531 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
532 || !TYPE_OVERFLOW_TRAPS (type))
533 return tem;
534 break;
536 case REAL_CST:
537 tem = fold_negate_const (t, type);
538 /* Two's complement FP formats, such as c4x, may overflow. */
539 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
540 return tem;
541 break;
543 case FIXED_CST:
544 tem = fold_negate_const (t, type);
545 return tem;
547 case COMPLEX_CST:
549 tree rpart = negate_expr (TREE_REALPART (t));
550 tree ipart = negate_expr (TREE_IMAGPART (t));
552 if ((TREE_CODE (rpart) == REAL_CST
553 && TREE_CODE (ipart) == REAL_CST)
554 || (TREE_CODE (rpart) == INTEGER_CST
555 && TREE_CODE (ipart) == INTEGER_CST))
556 return build_complex (type, rpart, ipart);
558 break;
560 case COMPLEX_EXPR:
561 if (negate_expr_p (t))
562 return fold_build2_loc (loc, COMPLEX_EXPR, type,
563 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
564 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
565 break;
567 case CONJ_EXPR:
568 if (negate_expr_p (t))
569 return fold_build1_loc (loc, CONJ_EXPR, type,
570 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
571 break;
573 case NEGATE_EXPR:
574 return TREE_OPERAND (t, 0);
576 case PLUS_EXPR:
577 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
578 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
580 /* -(A + B) -> (-B) - A. */
581 if (negate_expr_p (TREE_OPERAND (t, 1))
582 && reorder_operands_p (TREE_OPERAND (t, 0),
583 TREE_OPERAND (t, 1)))
585 tem = negate_expr (TREE_OPERAND (t, 1));
586 return fold_build2_loc (loc, MINUS_EXPR, type,
587 tem, TREE_OPERAND (t, 0));
590 /* -(A + B) -> (-A) - B. */
591 if (negate_expr_p (TREE_OPERAND (t, 0)))
593 tem = negate_expr (TREE_OPERAND (t, 0));
594 return fold_build2_loc (loc, MINUS_EXPR, type,
595 tem, TREE_OPERAND (t, 1));
598 break;
600 case MINUS_EXPR:
601 /* - (A - B) -> B - A */
602 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
603 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
604 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
605 return fold_build2_loc (loc, MINUS_EXPR, type,
606 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
607 break;
609 case MULT_EXPR:
610 if (TYPE_UNSIGNED (type))
611 break;
613 /* Fall through. */
615 case RDIV_EXPR:
616 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
618 tem = TREE_OPERAND (t, 1);
619 if (negate_expr_p (tem))
620 return fold_build2_loc (loc, TREE_CODE (t), type,
621 TREE_OPERAND (t, 0), negate_expr (tem));
622 tem = TREE_OPERAND (t, 0);
623 if (negate_expr_p (tem))
624 return fold_build2_loc (loc, TREE_CODE (t), type,
625 negate_expr (tem), TREE_OPERAND (t, 1));
627 break;
629 case TRUNC_DIV_EXPR:
630 case ROUND_DIV_EXPR:
631 case FLOOR_DIV_EXPR:
632 case CEIL_DIV_EXPR:
633 case EXACT_DIV_EXPR:
634 /* In general we can't negate A / B, because if A is INT_MIN and
635 B is 1, we may turn this into INT_MIN / -1 which is undefined
636 and actually traps on some architectures. But if overflow is
637 undefined, we can negate, because - (INT_MIN / 1) is an
638 overflow. */
639 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
641 const char * const warnmsg = G_("assuming signed overflow does not "
642 "occur when negating a division");
643 tem = TREE_OPERAND (t, 1);
644 if (negate_expr_p (tem))
646 if (INTEGRAL_TYPE_P (type)
647 && (TREE_CODE (tem) != INTEGER_CST
648 || integer_onep (tem)))
649 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
650 return fold_build2_loc (loc, TREE_CODE (t), type,
651 TREE_OPERAND (t, 0), negate_expr (tem));
653 tem = TREE_OPERAND (t, 0);
654 if (negate_expr_p (tem))
656 if (INTEGRAL_TYPE_P (type)
657 && (TREE_CODE (tem) != INTEGER_CST
658 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
659 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
660 return fold_build2_loc (loc, TREE_CODE (t), type,
661 negate_expr (tem), TREE_OPERAND (t, 1));
664 break;
666 case NOP_EXPR:
667 /* Convert -((double)float) into (double)(-float). */
668 if (TREE_CODE (type) == REAL_TYPE)
670 tem = strip_float_extensions (t);
671 if (tem != t && negate_expr_p (tem))
672 return fold_convert_loc (loc, type, negate_expr (tem));
674 break;
676 case CALL_EXPR:
677 /* Negate -f(x) as f(-x). */
678 if (negate_mathfn_p (builtin_mathfn_code (t))
679 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
681 tree fndecl, arg;
683 fndecl = get_callee_fndecl (t);
684 arg = negate_expr (CALL_EXPR_ARG (t, 0));
685 return build_call_expr_loc (loc, fndecl, 1, arg);
687 break;
689 case RSHIFT_EXPR:
690 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
691 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
693 tree op1 = TREE_OPERAND (t, 1);
694 if (TREE_INT_CST_HIGH (op1) == 0
695 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
696 == TREE_INT_CST_LOW (op1))
698 tree ntype = TYPE_UNSIGNED (type)
699 ? signed_type_for (type)
700 : unsigned_type_for (type);
701 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
702 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
703 return fold_convert_loc (loc, type, temp);
706 break;
708 default:
709 break;
712 return NULL_TREE;
715 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
716 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
717 return NULL_TREE. */
719 static tree
720 negate_expr (tree t)
722 tree type, tem;
723 location_t loc;
725 if (t == NULL_TREE)
726 return NULL_TREE;
728 loc = EXPR_LOCATION (t);
729 type = TREE_TYPE (t);
730 STRIP_SIGN_NOPS (t);
732 tem = fold_negate_expr (loc, t);
733 if (!tem)
735 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
736 SET_EXPR_LOCATION (tem, loc);
738 return fold_convert_loc (loc, type, tem);
741 /* Split a tree IN into a constant, literal and variable parts that could be
742 combined with CODE to make IN. "constant" means an expression with
743 TREE_CONSTANT but that isn't an actual constant. CODE must be a
744 commutative arithmetic operation. Store the constant part into *CONP,
745 the literal in *LITP and return the variable part. If a part isn't
746 present, set it to null. If the tree does not decompose in this way,
747 return the entire tree as the variable part and the other parts as null.
749 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
750 case, we negate an operand that was subtracted. Except if it is a
751 literal for which we use *MINUS_LITP instead.
753 If NEGATE_P is true, we are negating all of IN, again except a literal
754 for which we use *MINUS_LITP instead.
756 If IN is itself a literal or constant, return it as appropriate.
758 Note that we do not guarantee that any of the three values will be the
759 same type as IN, but they will have the same signedness and mode. */
761 static tree
762 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
763 tree *minus_litp, int negate_p)
765 tree var = 0;
767 *conp = 0;
768 *litp = 0;
769 *minus_litp = 0;
771 /* Strip any conversions that don't change the machine mode or signedness. */
772 STRIP_SIGN_NOPS (in);
774 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
775 || TREE_CODE (in) == FIXED_CST)
776 *litp = in;
777 else if (TREE_CODE (in) == code
778 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
779 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
780 /* We can associate addition and subtraction together (even
781 though the C standard doesn't say so) for integers because
782 the value is not affected. For reals, the value might be
783 affected, so we can't. */
784 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
785 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
787 tree op0 = TREE_OPERAND (in, 0);
788 tree op1 = TREE_OPERAND (in, 1);
789 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
790 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
792 /* First see if either of the operands is a literal, then a constant. */
793 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
794 || TREE_CODE (op0) == FIXED_CST)
795 *litp = op0, op0 = 0;
796 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
797 || TREE_CODE (op1) == FIXED_CST)
798 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
800 if (op0 != 0 && TREE_CONSTANT (op0))
801 *conp = op0, op0 = 0;
802 else if (op1 != 0 && TREE_CONSTANT (op1))
803 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
805 /* If we haven't dealt with either operand, this is not a case we can
806 decompose. Otherwise, VAR is either of the ones remaining, if any. */
807 if (op0 != 0 && op1 != 0)
808 var = in;
809 else if (op0 != 0)
810 var = op0;
811 else
812 var = op1, neg_var_p = neg1_p;
814 /* Now do any needed negations. */
815 if (neg_litp_p)
816 *minus_litp = *litp, *litp = 0;
817 if (neg_conp_p)
818 *conp = negate_expr (*conp);
819 if (neg_var_p)
820 var = negate_expr (var);
822 else if (TREE_CONSTANT (in))
823 *conp = in;
824 else
825 var = in;
827 if (negate_p)
829 if (*litp)
830 *minus_litp = *litp, *litp = 0;
831 else if (*minus_litp)
832 *litp = *minus_litp, *minus_litp = 0;
833 *conp = negate_expr (*conp);
834 var = negate_expr (var);
837 return var;
840 /* Re-associate trees split by the above function. T1 and T2 are
841 either expressions to associate or null. Return the new
842 expression, if any. LOC is the location of the new expression. If
843 we build an operation, do it in TYPE and with CODE. */
845 static tree
846 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
848 tree tem;
850 if (t1 == 0)
851 return t2;
852 else if (t2 == 0)
853 return t1;
855 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
856 try to fold this since we will have infinite recursion. But do
857 deal with any NEGATE_EXPRs. */
858 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
859 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
861 if (code == PLUS_EXPR)
863 if (TREE_CODE (t1) == NEGATE_EXPR)
864 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
865 fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
866 else if (TREE_CODE (t2) == NEGATE_EXPR)
867 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
868 fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
869 else if (integer_zerop (t2))
870 return fold_convert_loc (loc, type, t1);
872 else if (code == MINUS_EXPR)
874 if (integer_zerop (t2))
875 return fold_convert_loc (loc, type, t1);
878 tem = build2 (code, type, fold_convert_loc (loc, type, t1),
879 fold_convert_loc (loc, type, t2));
880 goto associate_trees_exit;
883 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
884 fold_convert_loc (loc, type, t2));
885 associate_trees_exit:
886 protected_set_expr_location (tem, loc);
887 return tem;
890 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
891 for use in int_const_binop, size_binop and size_diffop. */
893 static bool
894 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
896 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
897 return false;
898 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
899 return false;
901 switch (code)
903 case LSHIFT_EXPR:
904 case RSHIFT_EXPR:
905 case LROTATE_EXPR:
906 case RROTATE_EXPR:
907 return true;
909 default:
910 break;
913 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
914 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
915 && TYPE_MODE (type1) == TYPE_MODE (type2);
919 /* Combine two integer constants ARG1 and ARG2 under operation CODE
920 to produce a new constant. Return NULL_TREE if we don't know how
921 to evaluate CODE at compile-time.
923 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
925 tree
926 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
928 unsigned HOST_WIDE_INT int1l, int2l;
929 HOST_WIDE_INT int1h, int2h;
930 unsigned HOST_WIDE_INT low;
931 HOST_WIDE_INT hi;
932 unsigned HOST_WIDE_INT garbagel;
933 HOST_WIDE_INT garbageh;
934 tree t;
935 tree type = TREE_TYPE (arg1);
936 int uns = TYPE_UNSIGNED (type);
937 int is_sizetype
938 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
939 int overflow = 0;
941 int1l = TREE_INT_CST_LOW (arg1);
942 int1h = TREE_INT_CST_HIGH (arg1);
943 int2l = TREE_INT_CST_LOW (arg2);
944 int2h = TREE_INT_CST_HIGH (arg2);
946 switch (code)
948 case BIT_IOR_EXPR:
949 low = int1l | int2l, hi = int1h | int2h;
950 break;
952 case BIT_XOR_EXPR:
953 low = int1l ^ int2l, hi = int1h ^ int2h;
954 break;
956 case BIT_AND_EXPR:
957 low = int1l & int2l, hi = int1h & int2h;
958 break;
960 case RSHIFT_EXPR:
961 int2l = -int2l;
962 case LSHIFT_EXPR:
963 /* It's unclear from the C standard whether shifts can overflow.
964 The following code ignores overflow; perhaps a C standard
965 interpretation ruling is needed. */
966 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
967 &low, &hi, !uns);
968 break;
970 case RROTATE_EXPR:
971 int2l = - int2l;
972 case LROTATE_EXPR:
973 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
974 &low, &hi);
975 break;
977 case PLUS_EXPR:
978 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
979 break;
981 case MINUS_EXPR:
982 neg_double (int2l, int2h, &low, &hi);
983 add_double (int1l, int1h, low, hi, &low, &hi);
984 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
985 break;
987 case MULT_EXPR:
988 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
989 break;
991 case TRUNC_DIV_EXPR:
992 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
993 case EXACT_DIV_EXPR:
994 /* This is a shortcut for a common special case. */
995 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
996 && !TREE_OVERFLOW (arg1)
997 && !TREE_OVERFLOW (arg2)
998 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1000 if (code == CEIL_DIV_EXPR)
1001 int1l += int2l - 1;
1003 low = int1l / int2l, hi = 0;
1004 break;
1007 /* ... fall through ... */
1009 case ROUND_DIV_EXPR:
1010 if (int2h == 0 && int2l == 0)
1011 return NULL_TREE;
1012 if (int2h == 0 && int2l == 1)
1014 low = int1l, hi = int1h;
1015 break;
1017 if (int1l == int2l && int1h == int2h
1018 && ! (int1l == 0 && int1h == 0))
1020 low = 1, hi = 0;
1021 break;
1023 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1024 &low, &hi, &garbagel, &garbageh);
1025 break;
1027 case TRUNC_MOD_EXPR:
1028 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1029 /* This is a shortcut for a common special case. */
1030 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1031 && !TREE_OVERFLOW (arg1)
1032 && !TREE_OVERFLOW (arg2)
1033 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1035 if (code == CEIL_MOD_EXPR)
1036 int1l += int2l - 1;
1037 low = int1l % int2l, hi = 0;
1038 break;
1041 /* ... fall through ... */
1043 case ROUND_MOD_EXPR:
1044 if (int2h == 0 && int2l == 0)
1045 return NULL_TREE;
1046 overflow = div_and_round_double (code, uns,
1047 int1l, int1h, int2l, int2h,
1048 &garbagel, &garbageh, &low, &hi);
1049 break;
1051 case MIN_EXPR:
1052 case MAX_EXPR:
1053 if (uns)
1054 low = (((unsigned HOST_WIDE_INT) int1h
1055 < (unsigned HOST_WIDE_INT) int2h)
1056 || (((unsigned HOST_WIDE_INT) int1h
1057 == (unsigned HOST_WIDE_INT) int2h)
1058 && int1l < int2l));
1059 else
1060 low = (int1h < int2h
1061 || (int1h == int2h && int1l < int2l));
1063 if (low == (code == MIN_EXPR))
1064 low = int1l, hi = int1h;
1065 else
1066 low = int2l, hi = int2h;
1067 break;
1069 default:
1070 return NULL_TREE;
1073 if (notrunc)
1075 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1077 /* Propagate overflow flags ourselves. */
1078 if (((!uns || is_sizetype) && overflow)
1079 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1081 t = copy_node (t);
1082 TREE_OVERFLOW (t) = 1;
1085 else
1086 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1087 ((!uns || is_sizetype) && overflow)
1088 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1090 return t;
1093 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1094 constant. We assume ARG1 and ARG2 have the same data type, or at least
1095 are the same kind of constant and the same machine mode. Return zero if
1096 combining the constants is not allowed in the current operating mode.
1098 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1100 static tree
1101 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1103 /* Sanity check for the recursive cases. */
1104 if (!arg1 || !arg2)
1105 return NULL_TREE;
1107 STRIP_NOPS (arg1);
1108 STRIP_NOPS (arg2);
1110 if (TREE_CODE (arg1) == INTEGER_CST)
1111 return int_const_binop (code, arg1, arg2, notrunc);
1113 if (TREE_CODE (arg1) == REAL_CST)
1115 enum machine_mode mode;
1116 REAL_VALUE_TYPE d1;
1117 REAL_VALUE_TYPE d2;
1118 REAL_VALUE_TYPE value;
1119 REAL_VALUE_TYPE result;
1120 bool inexact;
1121 tree t, type;
1123 /* The following codes are handled by real_arithmetic. */
1124 switch (code)
1126 case PLUS_EXPR:
1127 case MINUS_EXPR:
1128 case MULT_EXPR:
1129 case RDIV_EXPR:
1130 case MIN_EXPR:
1131 case MAX_EXPR:
1132 break;
1134 default:
1135 return NULL_TREE;
1138 d1 = TREE_REAL_CST (arg1);
1139 d2 = TREE_REAL_CST (arg2);
1141 type = TREE_TYPE (arg1);
1142 mode = TYPE_MODE (type);
1144 /* Don't perform operation if we honor signaling NaNs and
1145 either operand is a NaN. */
1146 if (HONOR_SNANS (mode)
1147 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1148 return NULL_TREE;
1150 /* Don't perform operation if it would raise a division
1151 by zero exception. */
1152 if (code == RDIV_EXPR
1153 && REAL_VALUES_EQUAL (d2, dconst0)
1154 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1155 return NULL_TREE;
1157 /* If either operand is a NaN, just return it. Otherwise, set up
1158 for floating-point trap; we return an overflow. */
1159 if (REAL_VALUE_ISNAN (d1))
1160 return arg1;
1161 else if (REAL_VALUE_ISNAN (d2))
1162 return arg2;
1164 inexact = real_arithmetic (&value, code, &d1, &d2);
1165 real_convert (&result, mode, &value);
1167 /* Don't constant fold this floating point operation if
1168 the result has overflowed and flag_trapping_math. */
1169 if (flag_trapping_math
1170 && MODE_HAS_INFINITIES (mode)
1171 && REAL_VALUE_ISINF (result)
1172 && !REAL_VALUE_ISINF (d1)
1173 && !REAL_VALUE_ISINF (d2))
1174 return NULL_TREE;
1176 /* Don't constant fold this floating point operation if the
1177 result may dependent upon the run-time rounding mode and
1178 flag_rounding_math is set, or if GCC's software emulation
1179 is unable to accurately represent the result. */
1180 if ((flag_rounding_math
1181 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1182 && (inexact || !real_identical (&result, &value)))
1183 return NULL_TREE;
1185 t = build_real (type, result);
1187 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1188 return t;
1191 if (TREE_CODE (arg1) == FIXED_CST)
1193 FIXED_VALUE_TYPE f1;
1194 FIXED_VALUE_TYPE f2;
1195 FIXED_VALUE_TYPE result;
1196 tree t, type;
1197 int sat_p;
1198 bool overflow_p;
1200 /* The following codes are handled by fixed_arithmetic. */
1201 switch (code)
1203 case PLUS_EXPR:
1204 case MINUS_EXPR:
1205 case MULT_EXPR:
1206 case TRUNC_DIV_EXPR:
1207 f2 = TREE_FIXED_CST (arg2);
1208 break;
1210 case LSHIFT_EXPR:
1211 case RSHIFT_EXPR:
1212 f2.data.high = TREE_INT_CST_HIGH (arg2);
1213 f2.data.low = TREE_INT_CST_LOW (arg2);
1214 f2.mode = SImode;
1215 break;
1217 default:
1218 return NULL_TREE;
1221 f1 = TREE_FIXED_CST (arg1);
1222 type = TREE_TYPE (arg1);
1223 sat_p = TYPE_SATURATING (type);
1224 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1225 t = build_fixed (type, result);
1226 /* Propagate overflow flags. */
1227 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1228 TREE_OVERFLOW (t) = 1;
1229 return t;
1232 if (TREE_CODE (arg1) == COMPLEX_CST)
1234 tree type = TREE_TYPE (arg1);
1235 tree r1 = TREE_REALPART (arg1);
1236 tree i1 = TREE_IMAGPART (arg1);
1237 tree r2 = TREE_REALPART (arg2);
1238 tree i2 = TREE_IMAGPART (arg2);
1239 tree real, imag;
1241 switch (code)
1243 case PLUS_EXPR:
1244 case MINUS_EXPR:
1245 real = const_binop (code, r1, r2, notrunc);
1246 imag = const_binop (code, i1, i2, notrunc);
1247 break;
1249 case MULT_EXPR:
1250 if (COMPLEX_FLOAT_TYPE_P (type))
1251 return do_mpc_arg2 (arg1, arg2, type,
1252 /* do_nonfinite= */ folding_initializer,
1253 mpc_mul);
1255 real = const_binop (MINUS_EXPR,
1256 const_binop (MULT_EXPR, r1, r2, notrunc),
1257 const_binop (MULT_EXPR, i1, i2, notrunc),
1258 notrunc);
1259 imag = const_binop (PLUS_EXPR,
1260 const_binop (MULT_EXPR, r1, i2, notrunc),
1261 const_binop (MULT_EXPR, i1, r2, notrunc),
1262 notrunc);
1263 break;
1265 case RDIV_EXPR:
1266 if (COMPLEX_FLOAT_TYPE_P (type))
1267 return do_mpc_arg2 (arg1, arg2, type,
1268 /* do_nonfinite= */ folding_initializer,
1269 mpc_div);
1270 /* Fallthru ... */
1271 case TRUNC_DIV_EXPR:
1272 case CEIL_DIV_EXPR:
1273 case FLOOR_DIV_EXPR:
1274 case ROUND_DIV_EXPR:
1275 if (flag_complex_method == 0)
1277 /* Keep this algorithm in sync with
1278 tree-complex.c:expand_complex_div_straight().
1280 Expand complex division to scalars, straightforward algorithm.
1281 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1282 t = br*br + bi*bi
1284 tree magsquared
1285 = const_binop (PLUS_EXPR,
1286 const_binop (MULT_EXPR, r2, r2, notrunc),
1287 const_binop (MULT_EXPR, i2, i2, notrunc),
1288 notrunc);
1289 tree t1
1290 = const_binop (PLUS_EXPR,
1291 const_binop (MULT_EXPR, r1, r2, notrunc),
1292 const_binop (MULT_EXPR, i1, i2, notrunc),
1293 notrunc);
1294 tree t2
1295 = const_binop (MINUS_EXPR,
1296 const_binop (MULT_EXPR, i1, r2, notrunc),
1297 const_binop (MULT_EXPR, r1, i2, notrunc),
1298 notrunc);
1300 real = const_binop (code, t1, magsquared, notrunc);
1301 imag = const_binop (code, t2, magsquared, notrunc);
1303 else
1305 /* Keep this algorithm in sync with
1306 tree-complex.c:expand_complex_div_wide().
1308 Expand complex division to scalars, modified algorithm to minimize
1309 overflow with wide input ranges. */
1310 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1311 fold_abs_const (r2, TREE_TYPE (type)),
1312 fold_abs_const (i2, TREE_TYPE (type)));
1314 if (integer_nonzerop (compare))
1316 /* In the TRUE branch, we compute
1317 ratio = br/bi;
1318 div = (br * ratio) + bi;
1319 tr = (ar * ratio) + ai;
1320 ti = (ai * ratio) - ar;
1321 tr = tr / div;
1322 ti = ti / div; */
1323 tree ratio = const_binop (code, r2, i2, notrunc);
1324 tree div = const_binop (PLUS_EXPR, i2,
1325 const_binop (MULT_EXPR, r2, ratio,
1326 notrunc),
1327 notrunc);
1328 real = const_binop (MULT_EXPR, r1, ratio, notrunc);
1329 real = const_binop (PLUS_EXPR, real, i1, notrunc);
1330 real = const_binop (code, real, div, notrunc);
1332 imag = const_binop (MULT_EXPR, i1, ratio, notrunc);
1333 imag = const_binop (MINUS_EXPR, imag, r1, notrunc);
1334 imag = const_binop (code, imag, div, notrunc);
1336 else
1338 /* In the FALSE branch, we compute
1339 ratio = d/c;
1340 divisor = (d * ratio) + c;
1341 tr = (b * ratio) + a;
1342 ti = b - (a * ratio);
1343 tr = tr / div;
1344 ti = ti / div; */
1345 tree ratio = const_binop (code, i2, r2, notrunc);
1346 tree div = const_binop (PLUS_EXPR, r2,
1347 const_binop (MULT_EXPR, i2, ratio,
1348 notrunc),
1349 notrunc);
1351 real = const_binop (MULT_EXPR, i1, ratio, notrunc);
1352 real = const_binop (PLUS_EXPR, real, r1, notrunc);
1353 real = const_binop (code, real, div, notrunc);
1355 imag = const_binop (MULT_EXPR, r1, ratio, notrunc);
1356 imag = const_binop (MINUS_EXPR, i1, imag, notrunc);
1357 imag = const_binop (code, imag, div, notrunc);
1360 break;
1362 default:
1363 return NULL_TREE;
1366 if (real && imag)
1367 return build_complex (type, real, imag);
1370 if (TREE_CODE (arg1) == VECTOR_CST)
1372 tree type = TREE_TYPE(arg1);
1373 int count = TYPE_VECTOR_SUBPARTS (type), i;
1374 tree elements1, elements2, list = NULL_TREE;
1376 if(TREE_CODE(arg2) != VECTOR_CST)
1377 return NULL_TREE;
1379 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1380 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1382 for (i = 0; i < count; i++)
1384 tree elem1, elem2, elem;
1386 /* The trailing elements can be empty and should be treated as 0 */
1387 if(!elements1)
1388 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1389 else
1391 elem1 = TREE_VALUE(elements1);
1392 elements1 = TREE_CHAIN (elements1);
1395 if(!elements2)
1396 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1397 else
1399 elem2 = TREE_VALUE(elements2);
1400 elements2 = TREE_CHAIN (elements2);
1403 elem = const_binop (code, elem1, elem2, notrunc);
1405 /* It is possible that const_binop cannot handle the given
1406 code and return NULL_TREE */
1407 if(elem == NULL_TREE)
1408 return NULL_TREE;
1410 list = tree_cons (NULL_TREE, elem, list);
1412 return build_vector(type, nreverse(list));
1414 return NULL_TREE;
1417 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1418 indicates which particular sizetype to create. */
1420 tree
1421 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1423 return build_int_cst (sizetype_tab[(int) kind], number);
1426 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1427 is a tree code. The type of the result is taken from the operands.
1428 Both must be equivalent integer types, ala int_binop_types_match_p.
1429 If the operands are constant, so is the result. */
1431 tree
1432 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1434 tree type = TREE_TYPE (arg0);
1436 if (arg0 == error_mark_node || arg1 == error_mark_node)
1437 return error_mark_node;
1439 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1440 TREE_TYPE (arg1)));
1442 /* Handle the special case of two integer constants faster. */
1443 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1445 /* And some specific cases even faster than that. */
1446 if (code == PLUS_EXPR)
1448 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1449 return arg1;
1450 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1451 return arg0;
1453 else if (code == MINUS_EXPR)
1455 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1456 return arg0;
1458 else if (code == MULT_EXPR)
1460 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1461 return arg1;
1464 /* Handle general case of two integer constants. */
1465 return int_const_binop (code, arg0, arg1, 0);
1468 return fold_build2_loc (loc, code, type, arg0, arg1);
1471 /* Given two values, either both of sizetype or both of bitsizetype,
1472 compute the difference between the two values. Return the value
1473 in signed type corresponding to the type of the operands. */
1475 tree
1476 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1478 tree type = TREE_TYPE (arg0);
1479 tree ctype;
1481 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1482 TREE_TYPE (arg1)));
1484 /* If the type is already signed, just do the simple thing. */
1485 if (!TYPE_UNSIGNED (type))
1486 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1488 if (type == sizetype)
1489 ctype = ssizetype;
1490 else if (type == bitsizetype)
1491 ctype = sbitsizetype;
1492 else
1493 ctype = signed_type_for (type);
1495 /* If either operand is not a constant, do the conversions to the signed
1496 type and subtract. The hardware will do the right thing with any
1497 overflow in the subtraction. */
1498 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1499 return size_binop_loc (loc, MINUS_EXPR,
1500 fold_convert_loc (loc, ctype, arg0),
1501 fold_convert_loc (loc, ctype, arg1));
1503 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1504 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1505 overflow) and negate (which can't either). Special-case a result
1506 of zero while we're here. */
1507 if (tree_int_cst_equal (arg0, arg1))
1508 return build_int_cst (ctype, 0);
1509 else if (tree_int_cst_lt (arg1, arg0))
1510 return fold_convert_loc (loc, ctype,
1511 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1512 else
1513 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1514 fold_convert_loc (loc, ctype,
1515 size_binop_loc (loc,
1516 MINUS_EXPR,
1517 arg1, arg0)));
1520 /* A subroutine of fold_convert_const handling conversions of an
1521 INTEGER_CST to another integer type. */
1523 static tree
1524 fold_convert_const_int_from_int (tree type, const_tree arg1)
1526 tree t;
1528 /* Given an integer constant, make new constant with new type,
1529 appropriately sign-extended or truncated. */
1530 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
1531 TREE_INT_CST_HIGH (arg1),
1532 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1533 (TREE_INT_CST_HIGH (arg1) < 0
1534 && (TYPE_UNSIGNED (type)
1535 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1536 | TREE_OVERFLOW (arg1));
1538 return t;
1541 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1542 to an integer type. */
1544 static tree
1545 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1547 int overflow = 0;
1548 tree t;
1550 /* The following code implements the floating point to integer
1551 conversion rules required by the Java Language Specification,
1552 that IEEE NaNs are mapped to zero and values that overflow
1553 the target precision saturate, i.e. values greater than
1554 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1555 are mapped to INT_MIN. These semantics are allowed by the
1556 C and C++ standards that simply state that the behavior of
1557 FP-to-integer conversion is unspecified upon overflow. */
1559 double_int val;
1560 REAL_VALUE_TYPE r;
1561 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1563 switch (code)
1565 case FIX_TRUNC_EXPR:
1566 real_trunc (&r, VOIDmode, &x);
1567 break;
1569 default:
1570 gcc_unreachable ();
1573 /* If R is NaN, return zero and show we have an overflow. */
1574 if (REAL_VALUE_ISNAN (r))
1576 overflow = 1;
1577 val = double_int_zero;
1580 /* See if R is less than the lower bound or greater than the
1581 upper bound. */
1583 if (! overflow)
1585 tree lt = TYPE_MIN_VALUE (type);
1586 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1587 if (REAL_VALUES_LESS (r, l))
1589 overflow = 1;
1590 val = tree_to_double_int (lt);
1594 if (! overflow)
1596 tree ut = TYPE_MAX_VALUE (type);
1597 if (ut)
1599 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1600 if (REAL_VALUES_LESS (u, r))
1602 overflow = 1;
1603 val = tree_to_double_int (ut);
1608 if (! overflow)
1609 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1611 t = force_fit_type_double (type, val.low, val.high, -1,
1612 overflow | TREE_OVERFLOW (arg1));
1613 return t;
1616 /* A subroutine of fold_convert_const handling conversions of a
1617 FIXED_CST to an integer type. */
1619 static tree
1620 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1622 tree t;
1623 double_int temp, temp_trunc;
1624 unsigned int mode;
1626 /* Right shift FIXED_CST to temp by fbit. */
1627 temp = TREE_FIXED_CST (arg1).data;
1628 mode = TREE_FIXED_CST (arg1).mode;
1629 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1631 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1632 HOST_BITS_PER_DOUBLE_INT,
1633 SIGNED_FIXED_POINT_MODE_P (mode));
1635 /* Left shift temp to temp_trunc by fbit. */
1636 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1637 HOST_BITS_PER_DOUBLE_INT,
1638 SIGNED_FIXED_POINT_MODE_P (mode));
1640 else
1642 temp = double_int_zero;
1643 temp_trunc = double_int_zero;
1646 /* If FIXED_CST is negative, we need to round the value toward 0.
1647 By checking if the fractional bits are not zero to add 1 to temp. */
1648 if (SIGNED_FIXED_POINT_MODE_P (mode)
1649 && double_int_negative_p (temp_trunc)
1650 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1651 temp = double_int_add (temp, double_int_one);
1653 /* Given a fixed-point constant, make new constant with new type,
1654 appropriately sign-extended or truncated. */
1655 t = force_fit_type_double (type, temp.low, temp.high, -1,
1656 (double_int_negative_p (temp)
1657 && (TYPE_UNSIGNED (type)
1658 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1659 | TREE_OVERFLOW (arg1));
1661 return t;
1664 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1665 to another floating point type. */
1667 static tree
1668 fold_convert_const_real_from_real (tree type, const_tree arg1)
1670 REAL_VALUE_TYPE value;
1671 tree t;
1673 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1674 t = build_real (type, value);
1676 /* If converting an infinity or NAN to a representation that doesn't
1677 have one, set the overflow bit so that we can produce some kind of
1678 error message at the appropriate point if necessary. It's not the
1679 most user-friendly message, but it's better than nothing. */
1680 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1681 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1682 TREE_OVERFLOW (t) = 1;
1683 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1684 && !MODE_HAS_NANS (TYPE_MODE (type)))
1685 TREE_OVERFLOW (t) = 1;
1686 /* Regular overflow, conversion produced an infinity in a mode that
1687 can't represent them. */
1688 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1689 && REAL_VALUE_ISINF (value)
1690 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1691 TREE_OVERFLOW (t) = 1;
1692 else
1693 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1694 return t;
1697 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1698 to a floating point type. */
1700 static tree
1701 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1703 REAL_VALUE_TYPE value;
1704 tree t;
1706 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1707 t = build_real (type, value);
1709 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1710 return t;
1713 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1714 to another fixed-point type. */
1716 static tree
1717 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1719 FIXED_VALUE_TYPE value;
1720 tree t;
1721 bool overflow_p;
1723 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1724 TYPE_SATURATING (type));
1725 t = build_fixed (type, value);
1727 /* Propagate overflow flags. */
1728 if (overflow_p | TREE_OVERFLOW (arg1))
1729 TREE_OVERFLOW (t) = 1;
1730 return t;
1733 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1734 to a fixed-point type. */
1736 static tree
1737 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1739 FIXED_VALUE_TYPE value;
1740 tree t;
1741 bool overflow_p;
1743 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1744 TREE_INT_CST (arg1),
1745 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1746 TYPE_SATURATING (type));
1747 t = build_fixed (type, value);
1749 /* Propagate overflow flags. */
1750 if (overflow_p | TREE_OVERFLOW (arg1))
1751 TREE_OVERFLOW (t) = 1;
1752 return t;
1755 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1756 to a fixed-point type. */
1758 static tree
1759 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1761 FIXED_VALUE_TYPE value;
1762 tree t;
1763 bool overflow_p;
1765 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1766 &TREE_REAL_CST (arg1),
1767 TYPE_SATURATING (type));
1768 t = build_fixed (type, value);
1770 /* Propagate overflow flags. */
1771 if (overflow_p | TREE_OVERFLOW (arg1))
1772 TREE_OVERFLOW (t) = 1;
1773 return t;
1776 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1777 type TYPE. If no simplification can be done return NULL_TREE. */
1779 static tree
1780 fold_convert_const (enum tree_code code, tree type, tree arg1)
1782 if (TREE_TYPE (arg1) == type)
1783 return arg1;
1785 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1786 || TREE_CODE (type) == OFFSET_TYPE)
1788 if (TREE_CODE (arg1) == INTEGER_CST)
1789 return fold_convert_const_int_from_int (type, arg1);
1790 else if (TREE_CODE (arg1) == REAL_CST)
1791 return fold_convert_const_int_from_real (code, type, arg1);
1792 else if (TREE_CODE (arg1) == FIXED_CST)
1793 return fold_convert_const_int_from_fixed (type, arg1);
1795 else if (TREE_CODE (type) == REAL_TYPE)
1797 if (TREE_CODE (arg1) == INTEGER_CST)
1798 return build_real_from_int_cst (type, arg1);
1799 else if (TREE_CODE (arg1) == REAL_CST)
1800 return fold_convert_const_real_from_real (type, arg1);
1801 else if (TREE_CODE (arg1) == FIXED_CST)
1802 return fold_convert_const_real_from_fixed (type, arg1);
1804 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1806 if (TREE_CODE (arg1) == FIXED_CST)
1807 return fold_convert_const_fixed_from_fixed (type, arg1);
1808 else if (TREE_CODE (arg1) == INTEGER_CST)
1809 return fold_convert_const_fixed_from_int (type, arg1);
1810 else if (TREE_CODE (arg1) == REAL_CST)
1811 return fold_convert_const_fixed_from_real (type, arg1);
1813 return NULL_TREE;
1816 /* Construct a vector of zero elements of vector type TYPE. */
1818 static tree
1819 build_zero_vector (tree type)
1821 tree elem, list;
1822 int i, units;
1824 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1825 units = TYPE_VECTOR_SUBPARTS (type);
1827 list = NULL_TREE;
1828 for (i = 0; i < units; i++)
1829 list = tree_cons (NULL_TREE, elem, list);
1830 return build_vector (type, list);
1833 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1835 bool
1836 fold_convertible_p (const_tree type, const_tree arg)
1838 tree orig = TREE_TYPE (arg);
1840 if (type == orig)
1841 return true;
1843 if (TREE_CODE (arg) == ERROR_MARK
1844 || TREE_CODE (type) == ERROR_MARK
1845 || TREE_CODE (orig) == ERROR_MARK)
1846 return false;
1848 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1849 return true;
1851 switch (TREE_CODE (type))
1853 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1854 case POINTER_TYPE: case REFERENCE_TYPE:
1855 case OFFSET_TYPE:
1856 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1857 || TREE_CODE (orig) == OFFSET_TYPE)
1858 return true;
1859 return (TREE_CODE (orig) == VECTOR_TYPE
1860 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1862 case REAL_TYPE:
1863 case FIXED_POINT_TYPE:
1864 case COMPLEX_TYPE:
1865 case VECTOR_TYPE:
1866 case VOID_TYPE:
1867 return TREE_CODE (type) == TREE_CODE (orig);
1869 default:
1870 return false;
1874 /* Convert expression ARG to type TYPE. Used by the middle-end for
1875 simple conversions in preference to calling the front-end's convert. */
1877 tree
1878 fold_convert_loc (location_t loc, tree type, tree arg)
1880 tree orig = TREE_TYPE (arg);
1881 tree tem;
1883 if (type == orig)
1884 return arg;
1886 if (TREE_CODE (arg) == ERROR_MARK
1887 || TREE_CODE (type) == ERROR_MARK
1888 || TREE_CODE (orig) == ERROR_MARK)
1889 return error_mark_node;
1891 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1892 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1894 switch (TREE_CODE (type))
1896 case POINTER_TYPE:
1897 case REFERENCE_TYPE:
1898 /* Handle conversions between pointers to different address spaces. */
1899 if (POINTER_TYPE_P (orig)
1900 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1901 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1902 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1903 /* fall through */
1905 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1906 case OFFSET_TYPE:
1907 if (TREE_CODE (arg) == INTEGER_CST)
1909 tem = fold_convert_const (NOP_EXPR, type, arg);
1910 if (tem != NULL_TREE)
1911 return tem;
1913 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1914 || TREE_CODE (orig) == OFFSET_TYPE)
1915 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1916 if (TREE_CODE (orig) == COMPLEX_TYPE)
1917 return fold_convert_loc (loc, type,
1918 fold_build1_loc (loc, REALPART_EXPR,
1919 TREE_TYPE (orig), arg));
1920 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1921 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1922 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1924 case REAL_TYPE:
1925 if (TREE_CODE (arg) == INTEGER_CST)
1927 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1928 if (tem != NULL_TREE)
1929 return tem;
1931 else if (TREE_CODE (arg) == REAL_CST)
1933 tem = fold_convert_const (NOP_EXPR, type, arg);
1934 if (tem != NULL_TREE)
1935 return tem;
1937 else if (TREE_CODE (arg) == FIXED_CST)
1939 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1940 if (tem != NULL_TREE)
1941 return tem;
1944 switch (TREE_CODE (orig))
1946 case INTEGER_TYPE:
1947 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1948 case POINTER_TYPE: case REFERENCE_TYPE:
1949 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1951 case REAL_TYPE:
1952 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1954 case FIXED_POINT_TYPE:
1955 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1957 case COMPLEX_TYPE:
1958 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1959 return fold_convert_loc (loc, type, tem);
1961 default:
1962 gcc_unreachable ();
1965 case FIXED_POINT_TYPE:
1966 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1967 || TREE_CODE (arg) == REAL_CST)
1969 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1970 if (tem != NULL_TREE)
1971 goto fold_convert_exit;
1974 switch (TREE_CODE (orig))
1976 case FIXED_POINT_TYPE:
1977 case INTEGER_TYPE:
1978 case ENUMERAL_TYPE:
1979 case BOOLEAN_TYPE:
1980 case REAL_TYPE:
1981 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1983 case COMPLEX_TYPE:
1984 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1985 return fold_convert_loc (loc, type, tem);
1987 default:
1988 gcc_unreachable ();
1991 case COMPLEX_TYPE:
1992 switch (TREE_CODE (orig))
1994 case INTEGER_TYPE:
1995 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1996 case POINTER_TYPE: case REFERENCE_TYPE:
1997 case REAL_TYPE:
1998 case FIXED_POINT_TYPE:
1999 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2000 fold_convert_loc (loc, TREE_TYPE (type), arg),
2001 fold_convert_loc (loc, TREE_TYPE (type),
2002 integer_zero_node));
2003 case COMPLEX_TYPE:
2005 tree rpart, ipart;
2007 if (TREE_CODE (arg) == COMPLEX_EXPR)
2009 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2010 TREE_OPERAND (arg, 0));
2011 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2012 TREE_OPERAND (arg, 1));
2013 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2016 arg = save_expr (arg);
2017 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2018 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2019 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2020 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2021 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2024 default:
2025 gcc_unreachable ();
2028 case VECTOR_TYPE:
2029 if (integer_zerop (arg))
2030 return build_zero_vector (type);
2031 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2032 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2033 || TREE_CODE (orig) == VECTOR_TYPE);
2034 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2036 case VOID_TYPE:
2037 tem = fold_ignored_result (arg);
2038 if (TREE_CODE (tem) == MODIFY_EXPR)
2039 goto fold_convert_exit;
2040 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2042 default:
2043 gcc_unreachable ();
2045 fold_convert_exit:
2046 protected_set_expr_location (tem, loc);
2047 return tem;
2050 /* Return false if expr can be assumed not to be an lvalue, true
2051 otherwise. */
2053 static bool
2054 maybe_lvalue_p (const_tree x)
2056 /* We only need to wrap lvalue tree codes. */
2057 switch (TREE_CODE (x))
2059 case VAR_DECL:
2060 case PARM_DECL:
2061 case RESULT_DECL:
2062 case LABEL_DECL:
2063 case FUNCTION_DECL:
2064 case SSA_NAME:
2066 case COMPONENT_REF:
2067 case INDIRECT_REF:
2068 case ALIGN_INDIRECT_REF:
2069 case MISALIGNED_INDIRECT_REF:
2070 case ARRAY_REF:
2071 case ARRAY_RANGE_REF:
2072 case BIT_FIELD_REF:
2073 case OBJ_TYPE_REF:
2075 case REALPART_EXPR:
2076 case IMAGPART_EXPR:
2077 case PREINCREMENT_EXPR:
2078 case PREDECREMENT_EXPR:
2079 case SAVE_EXPR:
2080 case TRY_CATCH_EXPR:
2081 case WITH_CLEANUP_EXPR:
2082 case COMPOUND_EXPR:
2083 case MODIFY_EXPR:
2084 case TARGET_EXPR:
2085 case COND_EXPR:
2086 case BIND_EXPR:
2087 break;
2089 default:
2090 /* Assume the worst for front-end tree codes. */
2091 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2092 break;
2093 return false;
2096 return true;
2099 /* Return an expr equal to X but certainly not valid as an lvalue. */
2101 tree
2102 non_lvalue_loc (location_t loc, tree x)
2104 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2105 us. */
2106 if (in_gimple_form)
2107 return x;
2109 if (! maybe_lvalue_p (x))
2110 return x;
2111 x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2112 SET_EXPR_LOCATION (x, loc);
2113 return x;
2116 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2117 Zero means allow extended lvalues. */
2119 int pedantic_lvalues;
2121 /* When pedantic, return an expr equal to X but certainly not valid as a
2122 pedantic lvalue. Otherwise, return X. */
2124 static tree
2125 pedantic_non_lvalue_loc (location_t loc, tree x)
2127 if (pedantic_lvalues)
2128 return non_lvalue_loc (loc, x);
2129 protected_set_expr_location (x, loc);
2130 return x;
2133 /* Given a tree comparison code, return the code that is the logical inverse
2134 of the given code. It is not safe to do this for floating-point
2135 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2136 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2138 enum tree_code
2139 invert_tree_comparison (enum tree_code code, bool honor_nans)
2141 if (honor_nans && flag_trapping_math)
2142 return ERROR_MARK;
2144 switch (code)
2146 case EQ_EXPR:
2147 return NE_EXPR;
2148 case NE_EXPR:
2149 return EQ_EXPR;
2150 case GT_EXPR:
2151 return honor_nans ? UNLE_EXPR : LE_EXPR;
2152 case GE_EXPR:
2153 return honor_nans ? UNLT_EXPR : LT_EXPR;
2154 case LT_EXPR:
2155 return honor_nans ? UNGE_EXPR : GE_EXPR;
2156 case LE_EXPR:
2157 return honor_nans ? UNGT_EXPR : GT_EXPR;
2158 case LTGT_EXPR:
2159 return UNEQ_EXPR;
2160 case UNEQ_EXPR:
2161 return LTGT_EXPR;
2162 case UNGT_EXPR:
2163 return LE_EXPR;
2164 case UNGE_EXPR:
2165 return LT_EXPR;
2166 case UNLT_EXPR:
2167 return GE_EXPR;
2168 case UNLE_EXPR:
2169 return GT_EXPR;
2170 case ORDERED_EXPR:
2171 return UNORDERED_EXPR;
2172 case UNORDERED_EXPR:
2173 return ORDERED_EXPR;
2174 default:
2175 gcc_unreachable ();
2179 /* Similar, but return the comparison that results if the operands are
2180 swapped. This is safe for floating-point. */
2182 enum tree_code
2183 swap_tree_comparison (enum tree_code code)
2185 switch (code)
2187 case EQ_EXPR:
2188 case NE_EXPR:
2189 case ORDERED_EXPR:
2190 case UNORDERED_EXPR:
2191 case LTGT_EXPR:
2192 case UNEQ_EXPR:
2193 return code;
2194 case GT_EXPR:
2195 return LT_EXPR;
2196 case GE_EXPR:
2197 return LE_EXPR;
2198 case LT_EXPR:
2199 return GT_EXPR;
2200 case LE_EXPR:
2201 return GE_EXPR;
2202 case UNGT_EXPR:
2203 return UNLT_EXPR;
2204 case UNGE_EXPR:
2205 return UNLE_EXPR;
2206 case UNLT_EXPR:
2207 return UNGT_EXPR;
2208 case UNLE_EXPR:
2209 return UNGE_EXPR;
2210 default:
2211 gcc_unreachable ();
2216 /* Convert a comparison tree code from an enum tree_code representation
2217 into a compcode bit-based encoding. This function is the inverse of
2218 compcode_to_comparison. */
2220 static enum comparison_code
2221 comparison_to_compcode (enum tree_code code)
2223 switch (code)
2225 case LT_EXPR:
2226 return COMPCODE_LT;
2227 case EQ_EXPR:
2228 return COMPCODE_EQ;
2229 case LE_EXPR:
2230 return COMPCODE_LE;
2231 case GT_EXPR:
2232 return COMPCODE_GT;
2233 case NE_EXPR:
2234 return COMPCODE_NE;
2235 case GE_EXPR:
2236 return COMPCODE_GE;
2237 case ORDERED_EXPR:
2238 return COMPCODE_ORD;
2239 case UNORDERED_EXPR:
2240 return COMPCODE_UNORD;
2241 case UNLT_EXPR:
2242 return COMPCODE_UNLT;
2243 case UNEQ_EXPR:
2244 return COMPCODE_UNEQ;
2245 case UNLE_EXPR:
2246 return COMPCODE_UNLE;
2247 case UNGT_EXPR:
2248 return COMPCODE_UNGT;
2249 case LTGT_EXPR:
2250 return COMPCODE_LTGT;
2251 case UNGE_EXPR:
2252 return COMPCODE_UNGE;
2253 default:
2254 gcc_unreachable ();
2258 /* Convert a compcode bit-based encoding of a comparison operator back
2259 to GCC's enum tree_code representation. This function is the
2260 inverse of comparison_to_compcode. */
2262 static enum tree_code
2263 compcode_to_comparison (enum comparison_code code)
2265 switch (code)
2267 case COMPCODE_LT:
2268 return LT_EXPR;
2269 case COMPCODE_EQ:
2270 return EQ_EXPR;
2271 case COMPCODE_LE:
2272 return LE_EXPR;
2273 case COMPCODE_GT:
2274 return GT_EXPR;
2275 case COMPCODE_NE:
2276 return NE_EXPR;
2277 case COMPCODE_GE:
2278 return GE_EXPR;
2279 case COMPCODE_ORD:
2280 return ORDERED_EXPR;
2281 case COMPCODE_UNORD:
2282 return UNORDERED_EXPR;
2283 case COMPCODE_UNLT:
2284 return UNLT_EXPR;
2285 case COMPCODE_UNEQ:
2286 return UNEQ_EXPR;
2287 case COMPCODE_UNLE:
2288 return UNLE_EXPR;
2289 case COMPCODE_UNGT:
2290 return UNGT_EXPR;
2291 case COMPCODE_LTGT:
2292 return LTGT_EXPR;
2293 case COMPCODE_UNGE:
2294 return UNGE_EXPR;
2295 default:
2296 gcc_unreachable ();
2300 /* Return a tree for the comparison which is the combination of
2301 doing the AND or OR (depending on CODE) of the two operations LCODE
2302 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2303 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2304 if this makes the transformation invalid. */
2306 tree
2307 combine_comparisons (location_t loc,
2308 enum tree_code code, enum tree_code lcode,
2309 enum tree_code rcode, tree truth_type,
2310 tree ll_arg, tree lr_arg)
2312 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2313 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2314 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2315 int compcode;
2317 switch (code)
2319 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2320 compcode = lcompcode & rcompcode;
2321 break;
2323 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2324 compcode = lcompcode | rcompcode;
2325 break;
2327 default:
2328 return NULL_TREE;
2331 if (!honor_nans)
2333 /* Eliminate unordered comparisons, as well as LTGT and ORD
2334 which are not used unless the mode has NaNs. */
2335 compcode &= ~COMPCODE_UNORD;
2336 if (compcode == COMPCODE_LTGT)
2337 compcode = COMPCODE_NE;
2338 else if (compcode == COMPCODE_ORD)
2339 compcode = COMPCODE_TRUE;
2341 else if (flag_trapping_math)
2343 /* Check that the original operation and the optimized ones will trap
2344 under the same condition. */
2345 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2346 && (lcompcode != COMPCODE_EQ)
2347 && (lcompcode != COMPCODE_ORD);
2348 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2349 && (rcompcode != COMPCODE_EQ)
2350 && (rcompcode != COMPCODE_ORD);
2351 bool trap = (compcode & COMPCODE_UNORD) == 0
2352 && (compcode != COMPCODE_EQ)
2353 && (compcode != COMPCODE_ORD);
2355 /* In a short-circuited boolean expression the LHS might be
2356 such that the RHS, if evaluated, will never trap. For
2357 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2358 if neither x nor y is NaN. (This is a mixed blessing: for
2359 example, the expression above will never trap, hence
2360 optimizing it to x < y would be invalid). */
2361 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2362 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2363 rtrap = false;
2365 /* If the comparison was short-circuited, and only the RHS
2366 trapped, we may now generate a spurious trap. */
2367 if (rtrap && !ltrap
2368 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2369 return NULL_TREE;
2371 /* If we changed the conditions that cause a trap, we lose. */
2372 if ((ltrap || rtrap) != trap)
2373 return NULL_TREE;
2376 if (compcode == COMPCODE_TRUE)
2377 return constant_boolean_node (true, truth_type);
2378 else if (compcode == COMPCODE_FALSE)
2379 return constant_boolean_node (false, truth_type);
2380 else
2382 enum tree_code tcode;
2384 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2385 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2389 /* Return nonzero if two operands (typically of the same tree node)
2390 are necessarily equal. If either argument has side-effects this
2391 function returns zero. FLAGS modifies behavior as follows:
2393 If OEP_ONLY_CONST is set, only return nonzero for constants.
2394 This function tests whether the operands are indistinguishable;
2395 it does not test whether they are equal using C's == operation.
2396 The distinction is important for IEEE floating point, because
2397 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2398 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2400 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2401 even though it may hold multiple values during a function.
2402 This is because a GCC tree node guarantees that nothing else is
2403 executed between the evaluation of its "operands" (which may often
2404 be evaluated in arbitrary order). Hence if the operands themselves
2405 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2406 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2407 unset means assuming isochronic (or instantaneous) tree equivalence.
2408 Unless comparing arbitrary expression trees, such as from different
2409 statements, this flag can usually be left unset.
2411 If OEP_PURE_SAME is set, then pure functions with identical arguments
2412 are considered the same. It is used when the caller has other ways
2413 to ensure that global memory is unchanged in between. */
2416 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2418 /* If either is ERROR_MARK, they aren't equal. */
2419 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2420 || TREE_TYPE (arg0) == error_mark_node
2421 || TREE_TYPE (arg1) == error_mark_node)
2422 return 0;
2424 /* Similar, if either does not have a type (like a released SSA name),
2425 they aren't equal. */
2426 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2427 return 0;
2429 /* Check equality of integer constants before bailing out due to
2430 precision differences. */
2431 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2432 return tree_int_cst_equal (arg0, arg1);
2434 /* If both types don't have the same signedness, then we can't consider
2435 them equal. We must check this before the STRIP_NOPS calls
2436 because they may change the signedness of the arguments. As pointers
2437 strictly don't have a signedness, require either two pointers or
2438 two non-pointers as well. */
2439 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2440 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2441 return 0;
2443 /* We cannot consider pointers to different address space equal. */
2444 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2445 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2446 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2447 return 0;
2449 /* If both types don't have the same precision, then it is not safe
2450 to strip NOPs. */
2451 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2452 return 0;
2454 STRIP_NOPS (arg0);
2455 STRIP_NOPS (arg1);
2457 /* In case both args are comparisons but with different comparison
2458 code, try to swap the comparison operands of one arg to produce
2459 a match and compare that variant. */
2460 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2461 && COMPARISON_CLASS_P (arg0)
2462 && COMPARISON_CLASS_P (arg1))
2464 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2466 if (TREE_CODE (arg0) == swap_code)
2467 return operand_equal_p (TREE_OPERAND (arg0, 0),
2468 TREE_OPERAND (arg1, 1), flags)
2469 && operand_equal_p (TREE_OPERAND (arg0, 1),
2470 TREE_OPERAND (arg1, 0), flags);
2473 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2474 /* This is needed for conversions and for COMPONENT_REF.
2475 Might as well play it safe and always test this. */
2476 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2477 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2478 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2479 return 0;
2481 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2482 We don't care about side effects in that case because the SAVE_EXPR
2483 takes care of that for us. In all other cases, two expressions are
2484 equal if they have no side effects. If we have two identical
2485 expressions with side effects that should be treated the same due
2486 to the only side effects being identical SAVE_EXPR's, that will
2487 be detected in the recursive calls below. */
2488 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2489 && (TREE_CODE (arg0) == SAVE_EXPR
2490 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2491 return 1;
2493 /* Next handle constant cases, those for which we can return 1 even
2494 if ONLY_CONST is set. */
2495 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2496 switch (TREE_CODE (arg0))
2498 case INTEGER_CST:
2499 return tree_int_cst_equal (arg0, arg1);
2501 case FIXED_CST:
2502 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2503 TREE_FIXED_CST (arg1));
2505 case REAL_CST:
2506 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2507 TREE_REAL_CST (arg1)))
2508 return 1;
2511 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2513 /* If we do not distinguish between signed and unsigned zero,
2514 consider them equal. */
2515 if (real_zerop (arg0) && real_zerop (arg1))
2516 return 1;
2518 return 0;
2520 case VECTOR_CST:
2522 tree v1, v2;
2524 v1 = TREE_VECTOR_CST_ELTS (arg0);
2525 v2 = TREE_VECTOR_CST_ELTS (arg1);
2526 while (v1 && v2)
2528 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2529 flags))
2530 return 0;
2531 v1 = TREE_CHAIN (v1);
2532 v2 = TREE_CHAIN (v2);
2535 return v1 == v2;
2538 case COMPLEX_CST:
2539 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2540 flags)
2541 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2542 flags));
2544 case STRING_CST:
2545 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2546 && ! memcmp (TREE_STRING_POINTER (arg0),
2547 TREE_STRING_POINTER (arg1),
2548 TREE_STRING_LENGTH (arg0)));
2550 case ADDR_EXPR:
2551 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2553 default:
2554 break;
2557 if (flags & OEP_ONLY_CONST)
2558 return 0;
2560 /* Define macros to test an operand from arg0 and arg1 for equality and a
2561 variant that allows null and views null as being different from any
2562 non-null value. In the latter case, if either is null, the both
2563 must be; otherwise, do the normal comparison. */
2564 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2565 TREE_OPERAND (arg1, N), flags)
2567 #define OP_SAME_WITH_NULL(N) \
2568 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2569 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2571 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2573 case tcc_unary:
2574 /* Two conversions are equal only if signedness and modes match. */
2575 switch (TREE_CODE (arg0))
2577 CASE_CONVERT:
2578 case FIX_TRUNC_EXPR:
2579 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2580 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2581 return 0;
2582 break;
2583 default:
2584 break;
2587 return OP_SAME (0);
2590 case tcc_comparison:
2591 case tcc_binary:
2592 if (OP_SAME (0) && OP_SAME (1))
2593 return 1;
2595 /* For commutative ops, allow the other order. */
2596 return (commutative_tree_code (TREE_CODE (arg0))
2597 && operand_equal_p (TREE_OPERAND (arg0, 0),
2598 TREE_OPERAND (arg1, 1), flags)
2599 && operand_equal_p (TREE_OPERAND (arg0, 1),
2600 TREE_OPERAND (arg1, 0), flags));
2602 case tcc_reference:
2603 /* If either of the pointer (or reference) expressions we are
2604 dereferencing contain a side effect, these cannot be equal. */
2605 if (TREE_SIDE_EFFECTS (arg0)
2606 || TREE_SIDE_EFFECTS (arg1))
2607 return 0;
2609 switch (TREE_CODE (arg0))
2611 case INDIRECT_REF:
2612 case ALIGN_INDIRECT_REF:
2613 case MISALIGNED_INDIRECT_REF:
2614 case REALPART_EXPR:
2615 case IMAGPART_EXPR:
2616 return OP_SAME (0);
2618 case ARRAY_REF:
2619 case ARRAY_RANGE_REF:
2620 /* Operands 2 and 3 may be null.
2621 Compare the array index by value if it is constant first as we
2622 may have different types but same value here. */
2623 return (OP_SAME (0)
2624 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2625 TREE_OPERAND (arg1, 1))
2626 || OP_SAME (1))
2627 && OP_SAME_WITH_NULL (2)
2628 && OP_SAME_WITH_NULL (3));
2630 case COMPONENT_REF:
2631 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2632 may be NULL when we're called to compare MEM_EXPRs. */
2633 return OP_SAME_WITH_NULL (0)
2634 && OP_SAME (1)
2635 && OP_SAME_WITH_NULL (2);
2637 case BIT_FIELD_REF:
2638 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2640 default:
2641 return 0;
2644 case tcc_expression:
2645 switch (TREE_CODE (arg0))
2647 case ADDR_EXPR:
2648 case TRUTH_NOT_EXPR:
2649 return OP_SAME (0);
2651 case TRUTH_ANDIF_EXPR:
2652 case TRUTH_ORIF_EXPR:
2653 return OP_SAME (0) && OP_SAME (1);
2655 case TRUTH_AND_EXPR:
2656 case TRUTH_OR_EXPR:
2657 case TRUTH_XOR_EXPR:
2658 if (OP_SAME (0) && OP_SAME (1))
2659 return 1;
2661 /* Otherwise take into account this is a commutative operation. */
2662 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2663 TREE_OPERAND (arg1, 1), flags)
2664 && operand_equal_p (TREE_OPERAND (arg0, 1),
2665 TREE_OPERAND (arg1, 0), flags));
2667 case COND_EXPR:
2668 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2670 default:
2671 return 0;
2674 case tcc_vl_exp:
2675 switch (TREE_CODE (arg0))
2677 case CALL_EXPR:
2678 /* If the CALL_EXPRs call different functions, then they
2679 clearly can not be equal. */
2680 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2681 flags))
2682 return 0;
2685 unsigned int cef = call_expr_flags (arg0);
2686 if (flags & OEP_PURE_SAME)
2687 cef &= ECF_CONST | ECF_PURE;
2688 else
2689 cef &= ECF_CONST;
2690 if (!cef)
2691 return 0;
2694 /* Now see if all the arguments are the same. */
2696 const_call_expr_arg_iterator iter0, iter1;
2697 const_tree a0, a1;
2698 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2699 a1 = first_const_call_expr_arg (arg1, &iter1);
2700 a0 && a1;
2701 a0 = next_const_call_expr_arg (&iter0),
2702 a1 = next_const_call_expr_arg (&iter1))
2703 if (! operand_equal_p (a0, a1, flags))
2704 return 0;
2706 /* If we get here and both argument lists are exhausted
2707 then the CALL_EXPRs are equal. */
2708 return ! (a0 || a1);
2710 default:
2711 return 0;
2714 case tcc_declaration:
2715 /* Consider __builtin_sqrt equal to sqrt. */
2716 return (TREE_CODE (arg0) == FUNCTION_DECL
2717 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2718 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2719 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2721 default:
2722 return 0;
2725 #undef OP_SAME
2726 #undef OP_SAME_WITH_NULL
2729 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2730 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2732 When in doubt, return 0. */
2734 static int
2735 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2737 int unsignedp1, unsignedpo;
2738 tree primarg0, primarg1, primother;
2739 unsigned int correct_width;
2741 if (operand_equal_p (arg0, arg1, 0))
2742 return 1;
2744 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2745 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2746 return 0;
2748 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2749 and see if the inner values are the same. This removes any
2750 signedness comparison, which doesn't matter here. */
2751 primarg0 = arg0, primarg1 = arg1;
2752 STRIP_NOPS (primarg0);
2753 STRIP_NOPS (primarg1);
2754 if (operand_equal_p (primarg0, primarg1, 0))
2755 return 1;
2757 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2758 actual comparison operand, ARG0.
2760 First throw away any conversions to wider types
2761 already present in the operands. */
2763 primarg1 = get_narrower (arg1, &unsignedp1);
2764 primother = get_narrower (other, &unsignedpo);
2766 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2767 if (unsignedp1 == unsignedpo
2768 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2769 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2771 tree type = TREE_TYPE (arg0);
2773 /* Make sure shorter operand is extended the right way
2774 to match the longer operand. */
2775 primarg1 = fold_convert (signed_or_unsigned_type_for
2776 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2778 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2779 return 1;
2782 return 0;
2785 /* See if ARG is an expression that is either a comparison or is performing
2786 arithmetic on comparisons. The comparisons must only be comparing
2787 two different values, which will be stored in *CVAL1 and *CVAL2; if
2788 they are nonzero it means that some operands have already been found.
2789 No variables may be used anywhere else in the expression except in the
2790 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2791 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2793 If this is true, return 1. Otherwise, return zero. */
2795 static int
2796 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2798 enum tree_code code = TREE_CODE (arg);
2799 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2801 /* We can handle some of the tcc_expression cases here. */
2802 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2803 tclass = tcc_unary;
2804 else if (tclass == tcc_expression
2805 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2806 || code == COMPOUND_EXPR))
2807 tclass = tcc_binary;
2809 else if (tclass == tcc_expression && code == SAVE_EXPR
2810 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2812 /* If we've already found a CVAL1 or CVAL2, this expression is
2813 two complex to handle. */
2814 if (*cval1 || *cval2)
2815 return 0;
2817 tclass = tcc_unary;
2818 *save_p = 1;
2821 switch (tclass)
2823 case tcc_unary:
2824 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2826 case tcc_binary:
2827 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2828 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2829 cval1, cval2, save_p));
2831 case tcc_constant:
2832 return 1;
2834 case tcc_expression:
2835 if (code == COND_EXPR)
2836 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2837 cval1, cval2, save_p)
2838 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2839 cval1, cval2, save_p)
2840 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2841 cval1, cval2, save_p));
2842 return 0;
2844 case tcc_comparison:
2845 /* First see if we can handle the first operand, then the second. For
2846 the second operand, we know *CVAL1 can't be zero. It must be that
2847 one side of the comparison is each of the values; test for the
2848 case where this isn't true by failing if the two operands
2849 are the same. */
2851 if (operand_equal_p (TREE_OPERAND (arg, 0),
2852 TREE_OPERAND (arg, 1), 0))
2853 return 0;
2855 if (*cval1 == 0)
2856 *cval1 = TREE_OPERAND (arg, 0);
2857 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2859 else if (*cval2 == 0)
2860 *cval2 = TREE_OPERAND (arg, 0);
2861 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2863 else
2864 return 0;
2866 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2868 else if (*cval2 == 0)
2869 *cval2 = TREE_OPERAND (arg, 1);
2870 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2872 else
2873 return 0;
2875 return 1;
2877 default:
2878 return 0;
2882 /* ARG is a tree that is known to contain just arithmetic operations and
2883 comparisons. Evaluate the operations in the tree substituting NEW0 for
2884 any occurrence of OLD0 as an operand of a comparison and likewise for
2885 NEW1 and OLD1. */
2887 static tree
2888 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2889 tree old1, tree new1)
2891 tree type = TREE_TYPE (arg);
2892 enum tree_code code = TREE_CODE (arg);
2893 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2895 /* We can handle some of the tcc_expression cases here. */
2896 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2897 tclass = tcc_unary;
2898 else if (tclass == tcc_expression
2899 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2900 tclass = tcc_binary;
2902 switch (tclass)
2904 case tcc_unary:
2905 return fold_build1_loc (loc, code, type,
2906 eval_subst (loc, TREE_OPERAND (arg, 0),
2907 old0, new0, old1, new1));
2909 case tcc_binary:
2910 return fold_build2_loc (loc, code, type,
2911 eval_subst (loc, TREE_OPERAND (arg, 0),
2912 old0, new0, old1, new1),
2913 eval_subst (loc, TREE_OPERAND (arg, 1),
2914 old0, new0, old1, new1));
2916 case tcc_expression:
2917 switch (code)
2919 case SAVE_EXPR:
2920 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2921 old1, new1);
2923 case COMPOUND_EXPR:
2924 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2925 old1, new1);
2927 case COND_EXPR:
2928 return fold_build3_loc (loc, code, type,
2929 eval_subst (loc, TREE_OPERAND (arg, 0),
2930 old0, new0, old1, new1),
2931 eval_subst (loc, TREE_OPERAND (arg, 1),
2932 old0, new0, old1, new1),
2933 eval_subst (loc, TREE_OPERAND (arg, 2),
2934 old0, new0, old1, new1));
2935 default:
2936 break;
2938 /* Fall through - ??? */
2940 case tcc_comparison:
2942 tree arg0 = TREE_OPERAND (arg, 0);
2943 tree arg1 = TREE_OPERAND (arg, 1);
2945 /* We need to check both for exact equality and tree equality. The
2946 former will be true if the operand has a side-effect. In that
2947 case, we know the operand occurred exactly once. */
2949 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2950 arg0 = new0;
2951 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2952 arg0 = new1;
2954 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2955 arg1 = new0;
2956 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2957 arg1 = new1;
2959 return fold_build2_loc (loc, code, type, arg0, arg1);
2962 default:
2963 return arg;
2967 /* Return a tree for the case when the result of an expression is RESULT
2968 converted to TYPE and OMITTED was previously an operand of the expression
2969 but is now not needed (e.g., we folded OMITTED * 0).
2971 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2972 the conversion of RESULT to TYPE. */
2974 tree
2975 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2977 tree t = fold_convert_loc (loc, type, result);
2979 /* If the resulting operand is an empty statement, just return the omitted
2980 statement casted to void. */
2981 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2983 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
2984 goto omit_one_operand_exit;
2987 if (TREE_SIDE_EFFECTS (omitted))
2989 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2990 goto omit_one_operand_exit;
2993 return non_lvalue_loc (loc, t);
2995 omit_one_operand_exit:
2996 protected_set_expr_location (t, loc);
2997 return t;
3000 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3002 static tree
3003 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3004 tree omitted)
3006 tree t = fold_convert_loc (loc, type, result);
3008 /* If the resulting operand is an empty statement, just return the omitted
3009 statement casted to void. */
3010 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3012 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3013 goto pedantic_omit_one_operand_exit;
3016 if (TREE_SIDE_EFFECTS (omitted))
3018 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3019 goto pedantic_omit_one_operand_exit;
3022 return pedantic_non_lvalue_loc (loc, t);
3024 pedantic_omit_one_operand_exit:
3025 protected_set_expr_location (t, loc);
3026 return t;
3029 /* Return a tree for the case when the result of an expression is RESULT
3030 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3031 of the expression but are now not needed.
3033 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3034 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3035 evaluated before OMITTED2. Otherwise, if neither has side effects,
3036 just do the conversion of RESULT to TYPE. */
3038 tree
3039 omit_two_operands_loc (location_t loc, tree type, tree result,
3040 tree omitted1, tree omitted2)
3042 tree t = fold_convert_loc (loc, type, result);
3044 if (TREE_SIDE_EFFECTS (omitted2))
3046 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3047 SET_EXPR_LOCATION (t, loc);
3049 if (TREE_SIDE_EFFECTS (omitted1))
3051 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3052 SET_EXPR_LOCATION (t, loc);
3055 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3059 /* Return a simplified tree node for the truth-negation of ARG. This
3060 never alters ARG itself. We assume that ARG is an operation that
3061 returns a truth value (0 or 1).
3063 FIXME: one would think we would fold the result, but it causes
3064 problems with the dominator optimizer. */
3066 tree
3067 fold_truth_not_expr (location_t loc, tree arg)
3069 tree t, type = TREE_TYPE (arg);
3070 enum tree_code code = TREE_CODE (arg);
3071 location_t loc1, loc2;
3073 /* If this is a comparison, we can simply invert it, except for
3074 floating-point non-equality comparisons, in which case we just
3075 enclose a TRUTH_NOT_EXPR around what we have. */
3077 if (TREE_CODE_CLASS (code) == tcc_comparison)
3079 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3080 if (FLOAT_TYPE_P (op_type)
3081 && flag_trapping_math
3082 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3083 && code != NE_EXPR && code != EQ_EXPR)
3084 return NULL_TREE;
3086 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3087 if (code == ERROR_MARK)
3088 return NULL_TREE;
3090 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3091 SET_EXPR_LOCATION (t, loc);
3092 return t;
3095 switch (code)
3097 case INTEGER_CST:
3098 return constant_boolean_node (integer_zerop (arg), type);
3100 case TRUTH_AND_EXPR:
3101 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3102 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3103 if (loc1 == UNKNOWN_LOCATION)
3104 loc1 = loc;
3105 if (loc2 == UNKNOWN_LOCATION)
3106 loc2 = loc;
3107 t = build2 (TRUTH_OR_EXPR, type,
3108 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3109 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3110 break;
3112 case TRUTH_OR_EXPR:
3113 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3114 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3115 if (loc1 == UNKNOWN_LOCATION)
3116 loc1 = loc;
3117 if (loc2 == UNKNOWN_LOCATION)
3118 loc2 = loc;
3119 t = build2 (TRUTH_AND_EXPR, type,
3120 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3121 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3122 break;
3124 case TRUTH_XOR_EXPR:
3125 /* Here we can invert either operand. We invert the first operand
3126 unless the second operand is a TRUTH_NOT_EXPR in which case our
3127 result is the XOR of the first operand with the inside of the
3128 negation of the second operand. */
3130 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3131 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3132 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3133 else
3134 t = build2 (TRUTH_XOR_EXPR, type,
3135 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3136 TREE_OPERAND (arg, 1));
3137 break;
3139 case TRUTH_ANDIF_EXPR:
3140 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3141 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3142 if (loc1 == UNKNOWN_LOCATION)
3143 loc1 = loc;
3144 if (loc2 == UNKNOWN_LOCATION)
3145 loc2 = loc;
3146 t = build2 (TRUTH_ORIF_EXPR, type,
3147 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3148 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3149 break;
3151 case TRUTH_ORIF_EXPR:
3152 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3153 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3154 if (loc1 == UNKNOWN_LOCATION)
3155 loc1 = loc;
3156 if (loc2 == UNKNOWN_LOCATION)
3157 loc2 = loc;
3158 t = build2 (TRUTH_ANDIF_EXPR, type,
3159 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3160 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3161 break;
3163 case TRUTH_NOT_EXPR:
3164 return TREE_OPERAND (arg, 0);
3166 case COND_EXPR:
3168 tree arg1 = TREE_OPERAND (arg, 1);
3169 tree arg2 = TREE_OPERAND (arg, 2);
3171 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3172 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3173 if (loc1 == UNKNOWN_LOCATION)
3174 loc1 = loc;
3175 if (loc2 == UNKNOWN_LOCATION)
3176 loc2 = loc;
3178 /* A COND_EXPR may have a throw as one operand, which
3179 then has void type. Just leave void operands
3180 as they are. */
3181 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3182 VOID_TYPE_P (TREE_TYPE (arg1))
3183 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3184 VOID_TYPE_P (TREE_TYPE (arg2))
3185 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3186 break;
3189 case COMPOUND_EXPR:
3190 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3191 if (loc1 == UNKNOWN_LOCATION)
3192 loc1 = loc;
3193 t = build2 (COMPOUND_EXPR, type,
3194 TREE_OPERAND (arg, 0),
3195 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3196 break;
3198 case NON_LVALUE_EXPR:
3199 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3200 if (loc1 == UNKNOWN_LOCATION)
3201 loc1 = loc;
3202 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3204 CASE_CONVERT:
3205 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3207 t = build1 (TRUTH_NOT_EXPR, type, arg);
3208 break;
3211 /* ... fall through ... */
3213 case FLOAT_EXPR:
3214 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3215 if (loc1 == UNKNOWN_LOCATION)
3216 loc1 = loc;
3217 t = build1 (TREE_CODE (arg), type,
3218 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3219 break;
3221 case BIT_AND_EXPR:
3222 if (!integer_onep (TREE_OPERAND (arg, 1)))
3223 return NULL_TREE;
3224 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3225 break;
3227 case SAVE_EXPR:
3228 t = build1 (TRUTH_NOT_EXPR, type, arg);
3229 break;
3231 case CLEANUP_POINT_EXPR:
3232 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3233 if (loc1 == UNKNOWN_LOCATION)
3234 loc1 = loc;
3235 t = build1 (CLEANUP_POINT_EXPR, type,
3236 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3237 break;
3239 default:
3240 t = NULL_TREE;
3241 break;
3244 if (t)
3245 SET_EXPR_LOCATION (t, loc);
3247 return t;
3250 /* Return a simplified tree node for the truth-negation of ARG. This
3251 never alters ARG itself. We assume that ARG is an operation that
3252 returns a truth value (0 or 1).
3254 FIXME: one would think we would fold the result, but it causes
3255 problems with the dominator optimizer. */
3257 tree
3258 invert_truthvalue_loc (location_t loc, tree arg)
3260 tree tem;
3262 if (TREE_CODE (arg) == ERROR_MARK)
3263 return arg;
3265 tem = fold_truth_not_expr (loc, arg);
3266 if (!tem)
3268 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3269 SET_EXPR_LOCATION (tem, loc);
3272 return tem;
3275 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3276 operands are another bit-wise operation with a common input. If so,
3277 distribute the bit operations to save an operation and possibly two if
3278 constants are involved. For example, convert
3279 (A | B) & (A | C) into A | (B & C)
3280 Further simplification will occur if B and C are constants.
3282 If this optimization cannot be done, 0 will be returned. */
3284 static tree
3285 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3286 tree arg0, tree arg1)
3288 tree common;
3289 tree left, right;
3291 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3292 || TREE_CODE (arg0) == code
3293 || (TREE_CODE (arg0) != BIT_AND_EXPR
3294 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3295 return 0;
3297 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3299 common = TREE_OPERAND (arg0, 0);
3300 left = TREE_OPERAND (arg0, 1);
3301 right = TREE_OPERAND (arg1, 1);
3303 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3305 common = TREE_OPERAND (arg0, 0);
3306 left = TREE_OPERAND (arg0, 1);
3307 right = TREE_OPERAND (arg1, 0);
3309 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3311 common = TREE_OPERAND (arg0, 1);
3312 left = TREE_OPERAND (arg0, 0);
3313 right = TREE_OPERAND (arg1, 1);
3315 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3317 common = TREE_OPERAND (arg0, 1);
3318 left = TREE_OPERAND (arg0, 0);
3319 right = TREE_OPERAND (arg1, 0);
3321 else
3322 return 0;
3324 common = fold_convert_loc (loc, type, common);
3325 left = fold_convert_loc (loc, type, left);
3326 right = fold_convert_loc (loc, type, right);
3327 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3328 fold_build2_loc (loc, code, type, left, right));
3331 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3332 with code CODE. This optimization is unsafe. */
3333 static tree
3334 distribute_real_division (location_t loc, enum tree_code code, tree type,
3335 tree arg0, tree arg1)
3337 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3338 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3340 /* (A / C) +- (B / C) -> (A +- B) / C. */
3341 if (mul0 == mul1
3342 && operand_equal_p (TREE_OPERAND (arg0, 1),
3343 TREE_OPERAND (arg1, 1), 0))
3344 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3345 fold_build2_loc (loc, code, type,
3346 TREE_OPERAND (arg0, 0),
3347 TREE_OPERAND (arg1, 0)),
3348 TREE_OPERAND (arg0, 1));
3350 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3351 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3352 TREE_OPERAND (arg1, 0), 0)
3353 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3354 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3356 REAL_VALUE_TYPE r0, r1;
3357 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3358 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3359 if (!mul0)
3360 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3361 if (!mul1)
3362 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3363 real_arithmetic (&r0, code, &r0, &r1);
3364 return fold_build2_loc (loc, MULT_EXPR, type,
3365 TREE_OPERAND (arg0, 0),
3366 build_real (type, r0));
3369 return NULL_TREE;
3372 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3373 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3375 static tree
3376 make_bit_field_ref (location_t loc, tree inner, tree type,
3377 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3379 tree result, bftype;
3381 if (bitpos == 0)
3383 tree size = TYPE_SIZE (TREE_TYPE (inner));
3384 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3385 || POINTER_TYPE_P (TREE_TYPE (inner)))
3386 && host_integerp (size, 0)
3387 && tree_low_cst (size, 0) == bitsize)
3388 return fold_convert_loc (loc, type, inner);
3391 bftype = type;
3392 if (TYPE_PRECISION (bftype) != bitsize
3393 || TYPE_UNSIGNED (bftype) == !unsignedp)
3394 bftype = build_nonstandard_integer_type (bitsize, 0);
3396 result = build3 (BIT_FIELD_REF, bftype, inner,
3397 size_int (bitsize), bitsize_int (bitpos));
3398 SET_EXPR_LOCATION (result, loc);
3400 if (bftype != type)
3401 result = fold_convert_loc (loc, type, result);
3403 return result;
3406 /* Optimize a bit-field compare.
3408 There are two cases: First is a compare against a constant and the
3409 second is a comparison of two items where the fields are at the same
3410 bit position relative to the start of a chunk (byte, halfword, word)
3411 large enough to contain it. In these cases we can avoid the shift
3412 implicit in bitfield extractions.
3414 For constants, we emit a compare of the shifted constant with the
3415 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3416 compared. For two fields at the same position, we do the ANDs with the
3417 similar mask and compare the result of the ANDs.
3419 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3420 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3421 are the left and right operands of the comparison, respectively.
3423 If the optimization described above can be done, we return the resulting
3424 tree. Otherwise we return zero. */
3426 static tree
3427 optimize_bit_field_compare (location_t loc, enum tree_code code,
3428 tree compare_type, tree lhs, tree rhs)
3430 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3431 tree type = TREE_TYPE (lhs);
3432 tree signed_type, unsigned_type;
3433 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3434 enum machine_mode lmode, rmode, nmode;
3435 int lunsignedp, runsignedp;
3436 int lvolatilep = 0, rvolatilep = 0;
3437 tree linner, rinner = NULL_TREE;
3438 tree mask;
3439 tree offset;
3441 /* Get all the information about the extractions being done. If the bit size
3442 if the same as the size of the underlying object, we aren't doing an
3443 extraction at all and so can do nothing. We also don't want to
3444 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3445 then will no longer be able to replace it. */
3446 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3447 &lunsignedp, &lvolatilep, false);
3448 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3449 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3450 return 0;
3452 if (!const_p)
3454 /* If this is not a constant, we can only do something if bit positions,
3455 sizes, and signedness are the same. */
3456 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3457 &runsignedp, &rvolatilep, false);
3459 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3460 || lunsignedp != runsignedp || offset != 0
3461 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3462 return 0;
3465 /* See if we can find a mode to refer to this field. We should be able to,
3466 but fail if we can't. */
3467 nmode = get_best_mode (lbitsize, lbitpos,
3468 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3469 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3470 TYPE_ALIGN (TREE_TYPE (rinner))),
3471 word_mode, lvolatilep || rvolatilep);
3472 if (nmode == VOIDmode)
3473 return 0;
3475 /* Set signed and unsigned types of the precision of this mode for the
3476 shifts below. */
3477 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3478 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3480 /* Compute the bit position and size for the new reference and our offset
3481 within it. If the new reference is the same size as the original, we
3482 won't optimize anything, so return zero. */
3483 nbitsize = GET_MODE_BITSIZE (nmode);
3484 nbitpos = lbitpos & ~ (nbitsize - 1);
3485 lbitpos -= nbitpos;
3486 if (nbitsize == lbitsize)
3487 return 0;
3489 if (BYTES_BIG_ENDIAN)
3490 lbitpos = nbitsize - lbitsize - lbitpos;
3492 /* Make the mask to be used against the extracted field. */
3493 mask = build_int_cst_type (unsigned_type, -1);
3494 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3495 mask = const_binop (RSHIFT_EXPR, mask,
3496 size_int (nbitsize - lbitsize - lbitpos), 0);
3498 if (! const_p)
3499 /* If not comparing with constant, just rework the comparison
3500 and return. */
3501 return fold_build2_loc (loc, code, compare_type,
3502 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3503 make_bit_field_ref (loc, linner,
3504 unsigned_type,
3505 nbitsize, nbitpos,
3507 mask),
3508 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3509 make_bit_field_ref (loc, rinner,
3510 unsigned_type,
3511 nbitsize, nbitpos,
3513 mask));
3515 /* Otherwise, we are handling the constant case. See if the constant is too
3516 big for the field. Warn and return a tree of for 0 (false) if so. We do
3517 this not only for its own sake, but to avoid having to test for this
3518 error case below. If we didn't, we might generate wrong code.
3520 For unsigned fields, the constant shifted right by the field length should
3521 be all zero. For signed fields, the high-order bits should agree with
3522 the sign bit. */
3524 if (lunsignedp)
3526 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3527 fold_convert_loc (loc,
3528 unsigned_type, rhs),
3529 size_int (lbitsize), 0)))
3531 warning (0, "comparison is always %d due to width of bit-field",
3532 code == NE_EXPR);
3533 return constant_boolean_node (code == NE_EXPR, compare_type);
3536 else
3538 tree tem = const_binop (RSHIFT_EXPR,
3539 fold_convert_loc (loc, signed_type, rhs),
3540 size_int (lbitsize - 1), 0);
3541 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3543 warning (0, "comparison is always %d due to width of bit-field",
3544 code == NE_EXPR);
3545 return constant_boolean_node (code == NE_EXPR, compare_type);
3549 /* Single-bit compares should always be against zero. */
3550 if (lbitsize == 1 && ! integer_zerop (rhs))
3552 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3553 rhs = build_int_cst (type, 0);
3556 /* Make a new bitfield reference, shift the constant over the
3557 appropriate number of bits and mask it with the computed mask
3558 (in case this was a signed field). If we changed it, make a new one. */
3559 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3560 if (lvolatilep)
3562 TREE_SIDE_EFFECTS (lhs) = 1;
3563 TREE_THIS_VOLATILE (lhs) = 1;
3566 rhs = const_binop (BIT_AND_EXPR,
3567 const_binop (LSHIFT_EXPR,
3568 fold_convert_loc (loc, unsigned_type, rhs),
3569 size_int (lbitpos), 0),
3570 mask, 0);
3572 lhs = build2 (code, compare_type,
3573 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3574 rhs);
3575 SET_EXPR_LOCATION (lhs, loc);
3576 return lhs;
3579 /* Subroutine for fold_truthop: decode a field reference.
3581 If EXP is a comparison reference, we return the innermost reference.
3583 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3584 set to the starting bit number.
3586 If the innermost field can be completely contained in a mode-sized
3587 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3589 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3590 otherwise it is not changed.
3592 *PUNSIGNEDP is set to the signedness of the field.
3594 *PMASK is set to the mask used. This is either contained in a
3595 BIT_AND_EXPR or derived from the width of the field.
3597 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3599 Return 0 if this is not a component reference or is one that we can't
3600 do anything with. */
3602 static tree
3603 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3604 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3605 int *punsignedp, int *pvolatilep,
3606 tree *pmask, tree *pand_mask)
3608 tree outer_type = 0;
3609 tree and_mask = 0;
3610 tree mask, inner, offset;
3611 tree unsigned_type;
3612 unsigned int precision;
3614 /* All the optimizations using this function assume integer fields.
3615 There are problems with FP fields since the type_for_size call
3616 below can fail for, e.g., XFmode. */
3617 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3618 return 0;
3620 /* We are interested in the bare arrangement of bits, so strip everything
3621 that doesn't affect the machine mode. However, record the type of the
3622 outermost expression if it may matter below. */
3623 if (CONVERT_EXPR_P (exp)
3624 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3625 outer_type = TREE_TYPE (exp);
3626 STRIP_NOPS (exp);
3628 if (TREE_CODE (exp) == BIT_AND_EXPR)
3630 and_mask = TREE_OPERAND (exp, 1);
3631 exp = TREE_OPERAND (exp, 0);
3632 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3633 if (TREE_CODE (and_mask) != INTEGER_CST)
3634 return 0;
3637 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3638 punsignedp, pvolatilep, false);
3639 if ((inner == exp && and_mask == 0)
3640 || *pbitsize < 0 || offset != 0
3641 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3642 return 0;
3644 /* If the number of bits in the reference is the same as the bitsize of
3645 the outer type, then the outer type gives the signedness. Otherwise
3646 (in case of a small bitfield) the signedness is unchanged. */
3647 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3648 *punsignedp = TYPE_UNSIGNED (outer_type);
3650 /* Compute the mask to access the bitfield. */
3651 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3652 precision = TYPE_PRECISION (unsigned_type);
3654 mask = build_int_cst_type (unsigned_type, -1);
3656 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3657 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3659 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3660 if (and_mask != 0)
3661 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3662 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3664 *pmask = mask;
3665 *pand_mask = and_mask;
3666 return inner;
3669 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3670 bit positions. */
3672 static int
3673 all_ones_mask_p (const_tree mask, int size)
3675 tree type = TREE_TYPE (mask);
3676 unsigned int precision = TYPE_PRECISION (type);
3677 tree tmask;
3679 tmask = build_int_cst_type (signed_type_for (type), -1);
3681 return
3682 tree_int_cst_equal (mask,
3683 const_binop (RSHIFT_EXPR,
3684 const_binop (LSHIFT_EXPR, tmask,
3685 size_int (precision - size),
3687 size_int (precision - size), 0));
3690 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3691 represents the sign bit of EXP's type. If EXP represents a sign
3692 or zero extension, also test VAL against the unextended type.
3693 The return value is the (sub)expression whose sign bit is VAL,
3694 or NULL_TREE otherwise. */
3696 static tree
3697 sign_bit_p (tree exp, const_tree val)
3699 unsigned HOST_WIDE_INT mask_lo, lo;
3700 HOST_WIDE_INT mask_hi, hi;
3701 int width;
3702 tree t;
3704 /* Tree EXP must have an integral type. */
3705 t = TREE_TYPE (exp);
3706 if (! INTEGRAL_TYPE_P (t))
3707 return NULL_TREE;
3709 /* Tree VAL must be an integer constant. */
3710 if (TREE_CODE (val) != INTEGER_CST
3711 || TREE_OVERFLOW (val))
3712 return NULL_TREE;
3714 width = TYPE_PRECISION (t);
3715 if (width > HOST_BITS_PER_WIDE_INT)
3717 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3718 lo = 0;
3720 mask_hi = ((unsigned HOST_WIDE_INT) -1
3721 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3722 mask_lo = -1;
3724 else
3726 hi = 0;
3727 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3729 mask_hi = 0;
3730 mask_lo = ((unsigned HOST_WIDE_INT) -1
3731 >> (HOST_BITS_PER_WIDE_INT - width));
3734 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3735 treat VAL as if it were unsigned. */
3736 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3737 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3738 return exp;
3740 /* Handle extension from a narrower type. */
3741 if (TREE_CODE (exp) == NOP_EXPR
3742 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3743 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3745 return NULL_TREE;
3748 /* Subroutine for fold_truthop: determine if an operand is simple enough
3749 to be evaluated unconditionally. */
3751 static int
3752 simple_operand_p (const_tree exp)
3754 /* Strip any conversions that don't change the machine mode. */
3755 STRIP_NOPS (exp);
3757 return (CONSTANT_CLASS_P (exp)
3758 || TREE_CODE (exp) == SSA_NAME
3759 || (DECL_P (exp)
3760 && ! TREE_ADDRESSABLE (exp)
3761 && ! TREE_THIS_VOLATILE (exp)
3762 && ! DECL_NONLOCAL (exp)
3763 /* Don't regard global variables as simple. They may be
3764 allocated in ways unknown to the compiler (shared memory,
3765 #pragma weak, etc). */
3766 && ! TREE_PUBLIC (exp)
3767 && ! DECL_EXTERNAL (exp)
3768 /* Loading a static variable is unduly expensive, but global
3769 registers aren't expensive. */
3770 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3773 /* The following functions are subroutines to fold_range_test and allow it to
3774 try to change a logical combination of comparisons into a range test.
3776 For example, both
3777 X == 2 || X == 3 || X == 4 || X == 5
3779 X >= 2 && X <= 5
3780 are converted to
3781 (unsigned) (X - 2) <= 3
3783 We describe each set of comparisons as being either inside or outside
3784 a range, using a variable named like IN_P, and then describe the
3785 range with a lower and upper bound. If one of the bounds is omitted,
3786 it represents either the highest or lowest value of the type.
3788 In the comments below, we represent a range by two numbers in brackets
3789 preceded by a "+" to designate being inside that range, or a "-" to
3790 designate being outside that range, so the condition can be inverted by
3791 flipping the prefix. An omitted bound is represented by a "-". For
3792 example, "- [-, 10]" means being outside the range starting at the lowest
3793 possible value and ending at 10, in other words, being greater than 10.
3794 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3795 always false.
3797 We set up things so that the missing bounds are handled in a consistent
3798 manner so neither a missing bound nor "true" and "false" need to be
3799 handled using a special case. */
3801 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3802 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3803 and UPPER1_P are nonzero if the respective argument is an upper bound
3804 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3805 must be specified for a comparison. ARG1 will be converted to ARG0's
3806 type if both are specified. */
3808 static tree
3809 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3810 tree arg1, int upper1_p)
3812 tree tem;
3813 int result;
3814 int sgn0, sgn1;
3816 /* If neither arg represents infinity, do the normal operation.
3817 Else, if not a comparison, return infinity. Else handle the special
3818 comparison rules. Note that most of the cases below won't occur, but
3819 are handled for consistency. */
3821 if (arg0 != 0 && arg1 != 0)
3823 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3824 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3825 STRIP_NOPS (tem);
3826 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3829 if (TREE_CODE_CLASS (code) != tcc_comparison)
3830 return 0;
3832 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3833 for neither. In real maths, we cannot assume open ended ranges are
3834 the same. But, this is computer arithmetic, where numbers are finite.
3835 We can therefore make the transformation of any unbounded range with
3836 the value Z, Z being greater than any representable number. This permits
3837 us to treat unbounded ranges as equal. */
3838 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3839 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3840 switch (code)
3842 case EQ_EXPR:
3843 result = sgn0 == sgn1;
3844 break;
3845 case NE_EXPR:
3846 result = sgn0 != sgn1;
3847 break;
3848 case LT_EXPR:
3849 result = sgn0 < sgn1;
3850 break;
3851 case LE_EXPR:
3852 result = sgn0 <= sgn1;
3853 break;
3854 case GT_EXPR:
3855 result = sgn0 > sgn1;
3856 break;
3857 case GE_EXPR:
3858 result = sgn0 >= sgn1;
3859 break;
3860 default:
3861 gcc_unreachable ();
3864 return constant_boolean_node (result, type);
3867 /* Given EXP, a logical expression, set the range it is testing into
3868 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3869 actually being tested. *PLOW and *PHIGH will be made of the same
3870 type as the returned expression. If EXP is not a comparison, we
3871 will most likely not be returning a useful value and range. Set
3872 *STRICT_OVERFLOW_P to true if the return value is only valid
3873 because signed overflow is undefined; otherwise, do not change
3874 *STRICT_OVERFLOW_P. */
3876 tree
3877 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3878 bool *strict_overflow_p)
3880 enum tree_code code;
3881 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3882 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3883 int in_p, n_in_p;
3884 tree low, high, n_low, n_high;
3885 location_t loc = EXPR_LOCATION (exp);
3887 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3888 and see if we can refine the range. Some of the cases below may not
3889 happen, but it doesn't seem worth worrying about this. We "continue"
3890 the outer loop when we've changed something; otherwise we "break"
3891 the switch, which will "break" the while. */
3893 in_p = 0;
3894 low = high = build_int_cst (TREE_TYPE (exp), 0);
3896 while (1)
3898 code = TREE_CODE (exp);
3899 exp_type = TREE_TYPE (exp);
3901 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3903 if (TREE_OPERAND_LENGTH (exp) > 0)
3904 arg0 = TREE_OPERAND (exp, 0);
3905 if (TREE_CODE_CLASS (code) == tcc_comparison
3906 || TREE_CODE_CLASS (code) == tcc_unary
3907 || TREE_CODE_CLASS (code) == tcc_binary)
3908 arg0_type = TREE_TYPE (arg0);
3909 if (TREE_CODE_CLASS (code) == tcc_binary
3910 || TREE_CODE_CLASS (code) == tcc_comparison
3911 || (TREE_CODE_CLASS (code) == tcc_expression
3912 && TREE_OPERAND_LENGTH (exp) > 1))
3913 arg1 = TREE_OPERAND (exp, 1);
3916 switch (code)
3918 case TRUTH_NOT_EXPR:
3919 in_p = ! in_p, exp = arg0;
3920 continue;
3922 case EQ_EXPR: case NE_EXPR:
3923 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3924 /* We can only do something if the range is testing for zero
3925 and if the second operand is an integer constant. Note that
3926 saying something is "in" the range we make is done by
3927 complementing IN_P since it will set in the initial case of
3928 being not equal to zero; "out" is leaving it alone. */
3929 if (low == 0 || high == 0
3930 || ! integer_zerop (low) || ! integer_zerop (high)
3931 || TREE_CODE (arg1) != INTEGER_CST)
3932 break;
3934 switch (code)
3936 case NE_EXPR: /* - [c, c] */
3937 low = high = arg1;
3938 break;
3939 case EQ_EXPR: /* + [c, c] */
3940 in_p = ! in_p, low = high = arg1;
3941 break;
3942 case GT_EXPR: /* - [-, c] */
3943 low = 0, high = arg1;
3944 break;
3945 case GE_EXPR: /* + [c, -] */
3946 in_p = ! in_p, low = arg1, high = 0;
3947 break;
3948 case LT_EXPR: /* - [c, -] */
3949 low = arg1, high = 0;
3950 break;
3951 case LE_EXPR: /* + [-, c] */
3952 in_p = ! in_p, low = 0, high = arg1;
3953 break;
3954 default:
3955 gcc_unreachable ();
3958 /* If this is an unsigned comparison, we also know that EXP is
3959 greater than or equal to zero. We base the range tests we make
3960 on that fact, so we record it here so we can parse existing
3961 range tests. We test arg0_type since often the return type
3962 of, e.g. EQ_EXPR, is boolean. */
3963 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3965 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3966 in_p, low, high, 1,
3967 build_int_cst (arg0_type, 0),
3968 NULL_TREE))
3969 break;
3971 in_p = n_in_p, low = n_low, high = n_high;
3973 /* If the high bound is missing, but we have a nonzero low
3974 bound, reverse the range so it goes from zero to the low bound
3975 minus 1. */
3976 if (high == 0 && low && ! integer_zerop (low))
3978 in_p = ! in_p;
3979 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3980 integer_one_node, 0);
3981 low = build_int_cst (arg0_type, 0);
3985 exp = arg0;
3986 continue;
3988 case NEGATE_EXPR:
3989 /* (-x) IN [a,b] -> x in [-b, -a] */
3990 n_low = range_binop (MINUS_EXPR, exp_type,
3991 build_int_cst (exp_type, 0),
3992 0, high, 1);
3993 n_high = range_binop (MINUS_EXPR, exp_type,
3994 build_int_cst (exp_type, 0),
3995 0, low, 0);
3996 low = n_low, high = n_high;
3997 exp = arg0;
3998 continue;
4000 case BIT_NOT_EXPR:
4001 /* ~ X -> -X - 1 */
4002 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4003 build_int_cst (exp_type, 1));
4004 SET_EXPR_LOCATION (exp, loc);
4005 continue;
4007 case PLUS_EXPR: case MINUS_EXPR:
4008 if (TREE_CODE (arg1) != INTEGER_CST)
4009 break;
4011 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4012 move a constant to the other side. */
4013 if (!TYPE_UNSIGNED (arg0_type)
4014 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4015 break;
4017 /* If EXP is signed, any overflow in the computation is undefined,
4018 so we don't worry about it so long as our computations on
4019 the bounds don't overflow. For unsigned, overflow is defined
4020 and this is exactly the right thing. */
4021 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4022 arg0_type, low, 0, arg1, 0);
4023 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4024 arg0_type, high, 1, arg1, 0);
4025 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4026 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4027 break;
4029 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4030 *strict_overflow_p = true;
4032 /* Check for an unsigned range which has wrapped around the maximum
4033 value thus making n_high < n_low, and normalize it. */
4034 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4036 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4037 integer_one_node, 0);
4038 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4039 integer_one_node, 0);
4041 /* If the range is of the form +/- [ x+1, x ], we won't
4042 be able to normalize it. But then, it represents the
4043 whole range or the empty set, so make it
4044 +/- [ -, - ]. */
4045 if (tree_int_cst_equal (n_low, low)
4046 && tree_int_cst_equal (n_high, high))
4047 low = high = 0;
4048 else
4049 in_p = ! in_p;
4051 else
4052 low = n_low, high = n_high;
4054 exp = arg0;
4055 continue;
4057 CASE_CONVERT: case NON_LVALUE_EXPR:
4058 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4059 break;
4061 if (! INTEGRAL_TYPE_P (arg0_type)
4062 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4063 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4064 break;
4066 n_low = low, n_high = high;
4068 if (n_low != 0)
4069 n_low = fold_convert_loc (loc, arg0_type, n_low);
4071 if (n_high != 0)
4072 n_high = fold_convert_loc (loc, arg0_type, n_high);
4075 /* If we're converting arg0 from an unsigned type, to exp,
4076 a signed type, we will be doing the comparison as unsigned.
4077 The tests above have already verified that LOW and HIGH
4078 are both positive.
4080 So we have to ensure that we will handle large unsigned
4081 values the same way that the current signed bounds treat
4082 negative values. */
4084 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4086 tree high_positive;
4087 tree equiv_type;
4088 /* For fixed-point modes, we need to pass the saturating flag
4089 as the 2nd parameter. */
4090 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4091 equiv_type = lang_hooks.types.type_for_mode
4092 (TYPE_MODE (arg0_type),
4093 TYPE_SATURATING (arg0_type));
4094 else
4095 equiv_type = lang_hooks.types.type_for_mode
4096 (TYPE_MODE (arg0_type), 1);
4098 /* A range without an upper bound is, naturally, unbounded.
4099 Since convert would have cropped a very large value, use
4100 the max value for the destination type. */
4101 high_positive
4102 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4103 : TYPE_MAX_VALUE (arg0_type);
4105 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4106 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4107 fold_convert_loc (loc, arg0_type,
4108 high_positive),
4109 build_int_cst (arg0_type, 1));
4111 /* If the low bound is specified, "and" the range with the
4112 range for which the original unsigned value will be
4113 positive. */
4114 if (low != 0)
4116 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4117 1, n_low, n_high, 1,
4118 fold_convert_loc (loc, arg0_type,
4119 integer_zero_node),
4120 high_positive))
4121 break;
4123 in_p = (n_in_p == in_p);
4125 else
4127 /* Otherwise, "or" the range with the range of the input
4128 that will be interpreted as negative. */
4129 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4130 0, n_low, n_high, 1,
4131 fold_convert_loc (loc, arg0_type,
4132 integer_zero_node),
4133 high_positive))
4134 break;
4136 in_p = (in_p != n_in_p);
4140 exp = arg0;
4141 low = n_low, high = n_high;
4142 continue;
4144 default:
4145 break;
4148 break;
4151 /* If EXP is a constant, we can evaluate whether this is true or false. */
4152 if (TREE_CODE (exp) == INTEGER_CST)
4154 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4155 exp, 0, low, 0))
4156 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4157 exp, 1, high, 1)));
4158 low = high = 0;
4159 exp = 0;
4162 *pin_p = in_p, *plow = low, *phigh = high;
4163 return exp;
4166 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4167 type, TYPE, return an expression to test if EXP is in (or out of, depending
4168 on IN_P) the range. Return 0 if the test couldn't be created. */
4170 tree
4171 build_range_check (location_t loc, tree type, tree exp, int in_p,
4172 tree low, tree high)
4174 tree etype = TREE_TYPE (exp), value;
4176 #ifdef HAVE_canonicalize_funcptr_for_compare
4177 /* Disable this optimization for function pointer expressions
4178 on targets that require function pointer canonicalization. */
4179 if (HAVE_canonicalize_funcptr_for_compare
4180 && TREE_CODE (etype) == POINTER_TYPE
4181 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4182 return NULL_TREE;
4183 #endif
4185 if (! in_p)
4187 value = build_range_check (loc, type, exp, 1, low, high);
4188 if (value != 0)
4189 return invert_truthvalue_loc (loc, value);
4191 return 0;
4194 if (low == 0 && high == 0)
4195 return build_int_cst (type, 1);
4197 if (low == 0)
4198 return fold_build2_loc (loc, LE_EXPR, type, exp,
4199 fold_convert_loc (loc, etype, high));
4201 if (high == 0)
4202 return fold_build2_loc (loc, GE_EXPR, type, exp,
4203 fold_convert_loc (loc, etype, low));
4205 if (operand_equal_p (low, high, 0))
4206 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4207 fold_convert_loc (loc, etype, low));
4209 if (integer_zerop (low))
4211 if (! TYPE_UNSIGNED (etype))
4213 etype = unsigned_type_for (etype);
4214 high = fold_convert_loc (loc, etype, high);
4215 exp = fold_convert_loc (loc, etype, exp);
4217 return build_range_check (loc, type, exp, 1, 0, high);
4220 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4221 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4223 unsigned HOST_WIDE_INT lo;
4224 HOST_WIDE_INT hi;
4225 int prec;
4227 prec = TYPE_PRECISION (etype);
4228 if (prec <= HOST_BITS_PER_WIDE_INT)
4230 hi = 0;
4231 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4233 else
4235 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4236 lo = (unsigned HOST_WIDE_INT) -1;
4239 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4241 if (TYPE_UNSIGNED (etype))
4243 tree signed_etype = signed_type_for (etype);
4244 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4245 etype
4246 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4247 else
4248 etype = signed_etype;
4249 exp = fold_convert_loc (loc, etype, exp);
4251 return fold_build2_loc (loc, GT_EXPR, type, exp,
4252 build_int_cst (etype, 0));
4256 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4257 This requires wrap-around arithmetics for the type of the expression.
4258 First make sure that arithmetics in this type is valid, then make sure
4259 that it wraps around. */
4260 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4261 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4262 TYPE_UNSIGNED (etype));
4264 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4266 tree utype, minv, maxv;
4268 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4269 for the type in question, as we rely on this here. */
4270 utype = unsigned_type_for (etype);
4271 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4272 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4273 integer_one_node, 1);
4274 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4276 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4277 minv, 1, maxv, 1)))
4278 etype = utype;
4279 else
4280 return 0;
4283 high = fold_convert_loc (loc, etype, high);
4284 low = fold_convert_loc (loc, etype, low);
4285 exp = fold_convert_loc (loc, etype, exp);
4287 value = const_binop (MINUS_EXPR, high, low, 0);
4290 if (POINTER_TYPE_P (etype))
4292 if (value != 0 && !TREE_OVERFLOW (value))
4294 low = fold_convert_loc (loc, sizetype, low);
4295 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4296 return build_range_check (loc, type,
4297 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4298 etype, exp, low),
4299 1, build_int_cst (etype, 0), value);
4301 return 0;
4304 if (value != 0 && !TREE_OVERFLOW (value))
4305 return build_range_check (loc, type,
4306 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4307 1, build_int_cst (etype, 0), value);
4309 return 0;
4312 /* Return the predecessor of VAL in its type, handling the infinite case. */
4314 static tree
4315 range_predecessor (tree val)
4317 tree type = TREE_TYPE (val);
4319 if (INTEGRAL_TYPE_P (type)
4320 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4321 return 0;
4322 else
4323 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4326 /* Return the successor of VAL in its type, handling the infinite case. */
4328 static tree
4329 range_successor (tree val)
4331 tree type = TREE_TYPE (val);
4333 if (INTEGRAL_TYPE_P (type)
4334 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4335 return 0;
4336 else
4337 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4340 /* Given two ranges, see if we can merge them into one. Return 1 if we
4341 can, 0 if we can't. Set the output range into the specified parameters. */
4343 bool
4344 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4345 tree high0, int in1_p, tree low1, tree high1)
4347 int no_overlap;
4348 int subset;
4349 int temp;
4350 tree tem;
4351 int in_p;
4352 tree low, high;
4353 int lowequal = ((low0 == 0 && low1 == 0)
4354 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4355 low0, 0, low1, 0)));
4356 int highequal = ((high0 == 0 && high1 == 0)
4357 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4358 high0, 1, high1, 1)));
4360 /* Make range 0 be the range that starts first, or ends last if they
4361 start at the same value. Swap them if it isn't. */
4362 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4363 low0, 0, low1, 0))
4364 || (lowequal
4365 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4366 high1, 1, high0, 1))))
4368 temp = in0_p, in0_p = in1_p, in1_p = temp;
4369 tem = low0, low0 = low1, low1 = tem;
4370 tem = high0, high0 = high1, high1 = tem;
4373 /* Now flag two cases, whether the ranges are disjoint or whether the
4374 second range is totally subsumed in the first. Note that the tests
4375 below are simplified by the ones above. */
4376 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4377 high0, 1, low1, 0));
4378 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4379 high1, 1, high0, 1));
4381 /* We now have four cases, depending on whether we are including or
4382 excluding the two ranges. */
4383 if (in0_p && in1_p)
4385 /* If they don't overlap, the result is false. If the second range
4386 is a subset it is the result. Otherwise, the range is from the start
4387 of the second to the end of the first. */
4388 if (no_overlap)
4389 in_p = 0, low = high = 0;
4390 else if (subset)
4391 in_p = 1, low = low1, high = high1;
4392 else
4393 in_p = 1, low = low1, high = high0;
4396 else if (in0_p && ! in1_p)
4398 /* If they don't overlap, the result is the first range. If they are
4399 equal, the result is false. If the second range is a subset of the
4400 first, and the ranges begin at the same place, we go from just after
4401 the end of the second range to the end of the first. If the second
4402 range is not a subset of the first, or if it is a subset and both
4403 ranges end at the same place, the range starts at the start of the
4404 first range and ends just before the second range.
4405 Otherwise, we can't describe this as a single range. */
4406 if (no_overlap)
4407 in_p = 1, low = low0, high = high0;
4408 else if (lowequal && highequal)
4409 in_p = 0, low = high = 0;
4410 else if (subset && lowequal)
4412 low = range_successor (high1);
4413 high = high0;
4414 in_p = 1;
4415 if (low == 0)
4417 /* We are in the weird situation where high0 > high1 but
4418 high1 has no successor. Punt. */
4419 return 0;
4422 else if (! subset || highequal)
4424 low = low0;
4425 high = range_predecessor (low1);
4426 in_p = 1;
4427 if (high == 0)
4429 /* low0 < low1 but low1 has no predecessor. Punt. */
4430 return 0;
4433 else
4434 return 0;
4437 else if (! in0_p && in1_p)
4439 /* If they don't overlap, the result is the second range. If the second
4440 is a subset of the first, the result is false. Otherwise,
4441 the range starts just after the first range and ends at the
4442 end of the second. */
4443 if (no_overlap)
4444 in_p = 1, low = low1, high = high1;
4445 else if (subset || highequal)
4446 in_p = 0, low = high = 0;
4447 else
4449 low = range_successor (high0);
4450 high = high1;
4451 in_p = 1;
4452 if (low == 0)
4454 /* high1 > high0 but high0 has no successor. Punt. */
4455 return 0;
4460 else
4462 /* The case where we are excluding both ranges. Here the complex case
4463 is if they don't overlap. In that case, the only time we have a
4464 range is if they are adjacent. If the second is a subset of the
4465 first, the result is the first. Otherwise, the range to exclude
4466 starts at the beginning of the first range and ends at the end of the
4467 second. */
4468 if (no_overlap)
4470 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4471 range_successor (high0),
4472 1, low1, 0)))
4473 in_p = 0, low = low0, high = high1;
4474 else
4476 /* Canonicalize - [min, x] into - [-, x]. */
4477 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4478 switch (TREE_CODE (TREE_TYPE (low0)))
4480 case ENUMERAL_TYPE:
4481 if (TYPE_PRECISION (TREE_TYPE (low0))
4482 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4483 break;
4484 /* FALLTHROUGH */
4485 case INTEGER_TYPE:
4486 if (tree_int_cst_equal (low0,
4487 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4488 low0 = 0;
4489 break;
4490 case POINTER_TYPE:
4491 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4492 && integer_zerop (low0))
4493 low0 = 0;
4494 break;
4495 default:
4496 break;
4499 /* Canonicalize - [x, max] into - [x, -]. */
4500 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4501 switch (TREE_CODE (TREE_TYPE (high1)))
4503 case ENUMERAL_TYPE:
4504 if (TYPE_PRECISION (TREE_TYPE (high1))
4505 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4506 break;
4507 /* FALLTHROUGH */
4508 case INTEGER_TYPE:
4509 if (tree_int_cst_equal (high1,
4510 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4511 high1 = 0;
4512 break;
4513 case POINTER_TYPE:
4514 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4515 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4516 high1, 1,
4517 integer_one_node, 1)))
4518 high1 = 0;
4519 break;
4520 default:
4521 break;
4524 /* The ranges might be also adjacent between the maximum and
4525 minimum values of the given type. For
4526 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4527 return + [x + 1, y - 1]. */
4528 if (low0 == 0 && high1 == 0)
4530 low = range_successor (high0);
4531 high = range_predecessor (low1);
4532 if (low == 0 || high == 0)
4533 return 0;
4535 in_p = 1;
4537 else
4538 return 0;
4541 else if (subset)
4542 in_p = 0, low = low0, high = high0;
4543 else
4544 in_p = 0, low = low0, high = high1;
4547 *pin_p = in_p, *plow = low, *phigh = high;
4548 return 1;
4552 /* Subroutine of fold, looking inside expressions of the form
4553 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4554 of the COND_EXPR. This function is being used also to optimize
4555 A op B ? C : A, by reversing the comparison first.
4557 Return a folded expression whose code is not a COND_EXPR
4558 anymore, or NULL_TREE if no folding opportunity is found. */
4560 static tree
4561 fold_cond_expr_with_comparison (location_t loc, tree type,
4562 tree arg0, tree arg1, tree arg2)
4564 enum tree_code comp_code = TREE_CODE (arg0);
4565 tree arg00 = TREE_OPERAND (arg0, 0);
4566 tree arg01 = TREE_OPERAND (arg0, 1);
4567 tree arg1_type = TREE_TYPE (arg1);
4568 tree tem;
4570 STRIP_NOPS (arg1);
4571 STRIP_NOPS (arg2);
4573 /* If we have A op 0 ? A : -A, consider applying the following
4574 transformations:
4576 A == 0? A : -A same as -A
4577 A != 0? A : -A same as A
4578 A >= 0? A : -A same as abs (A)
4579 A > 0? A : -A same as abs (A)
4580 A <= 0? A : -A same as -abs (A)
4581 A < 0? A : -A same as -abs (A)
4583 None of these transformations work for modes with signed
4584 zeros. If A is +/-0, the first two transformations will
4585 change the sign of the result (from +0 to -0, or vice
4586 versa). The last four will fix the sign of the result,
4587 even though the original expressions could be positive or
4588 negative, depending on the sign of A.
4590 Note that all these transformations are correct if A is
4591 NaN, since the two alternatives (A and -A) are also NaNs. */
4592 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4593 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4594 ? real_zerop (arg01)
4595 : integer_zerop (arg01))
4596 && ((TREE_CODE (arg2) == NEGATE_EXPR
4597 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4598 /* In the case that A is of the form X-Y, '-A' (arg2) may
4599 have already been folded to Y-X, check for that. */
4600 || (TREE_CODE (arg1) == MINUS_EXPR
4601 && TREE_CODE (arg2) == MINUS_EXPR
4602 && operand_equal_p (TREE_OPERAND (arg1, 0),
4603 TREE_OPERAND (arg2, 1), 0)
4604 && operand_equal_p (TREE_OPERAND (arg1, 1),
4605 TREE_OPERAND (arg2, 0), 0))))
4606 switch (comp_code)
4608 case EQ_EXPR:
4609 case UNEQ_EXPR:
4610 tem = fold_convert_loc (loc, arg1_type, arg1);
4611 return pedantic_non_lvalue_loc (loc,
4612 fold_convert_loc (loc, type,
4613 negate_expr (tem)));
4614 case NE_EXPR:
4615 case LTGT_EXPR:
4616 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4617 case UNGE_EXPR:
4618 case UNGT_EXPR:
4619 if (flag_trapping_math)
4620 break;
4621 /* Fall through. */
4622 case GE_EXPR:
4623 case GT_EXPR:
4624 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4625 arg1 = fold_convert_loc (loc, signed_type_for
4626 (TREE_TYPE (arg1)), arg1);
4627 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4628 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4629 case UNLE_EXPR:
4630 case UNLT_EXPR:
4631 if (flag_trapping_math)
4632 break;
4633 case LE_EXPR:
4634 case LT_EXPR:
4635 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4636 arg1 = fold_convert_loc (loc, signed_type_for
4637 (TREE_TYPE (arg1)), arg1);
4638 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4639 return negate_expr (fold_convert_loc (loc, type, tem));
4640 default:
4641 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4642 break;
4645 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4646 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4647 both transformations are correct when A is NaN: A != 0
4648 is then true, and A == 0 is false. */
4650 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4651 && integer_zerop (arg01) && integer_zerop (arg2))
4653 if (comp_code == NE_EXPR)
4654 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4655 else if (comp_code == EQ_EXPR)
4656 return build_int_cst (type, 0);
4659 /* Try some transformations of A op B ? A : B.
4661 A == B? A : B same as B
4662 A != B? A : B same as A
4663 A >= B? A : B same as max (A, B)
4664 A > B? A : B same as max (B, A)
4665 A <= B? A : B same as min (A, B)
4666 A < B? A : B same as min (B, A)
4668 As above, these transformations don't work in the presence
4669 of signed zeros. For example, if A and B are zeros of
4670 opposite sign, the first two transformations will change
4671 the sign of the result. In the last four, the original
4672 expressions give different results for (A=+0, B=-0) and
4673 (A=-0, B=+0), but the transformed expressions do not.
4675 The first two transformations are correct if either A or B
4676 is a NaN. In the first transformation, the condition will
4677 be false, and B will indeed be chosen. In the case of the
4678 second transformation, the condition A != B will be true,
4679 and A will be chosen.
4681 The conversions to max() and min() are not correct if B is
4682 a number and A is not. The conditions in the original
4683 expressions will be false, so all four give B. The min()
4684 and max() versions would give a NaN instead. */
4685 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4686 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4687 /* Avoid these transformations if the COND_EXPR may be used
4688 as an lvalue in the C++ front-end. PR c++/19199. */
4689 && (in_gimple_form
4690 || (strcmp (lang_hooks.name, "GNU C++") != 0
4691 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4692 || ! maybe_lvalue_p (arg1)
4693 || ! maybe_lvalue_p (arg2)))
4695 tree comp_op0 = arg00;
4696 tree comp_op1 = arg01;
4697 tree comp_type = TREE_TYPE (comp_op0);
4699 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4700 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4702 comp_type = type;
4703 comp_op0 = arg1;
4704 comp_op1 = arg2;
4707 switch (comp_code)
4709 case EQ_EXPR:
4710 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4711 case NE_EXPR:
4712 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4713 case LE_EXPR:
4714 case LT_EXPR:
4715 case UNLE_EXPR:
4716 case UNLT_EXPR:
4717 /* In C++ a ?: expression can be an lvalue, so put the
4718 operand which will be used if they are equal first
4719 so that we can convert this back to the
4720 corresponding COND_EXPR. */
4721 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4723 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4724 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4725 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4726 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4727 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4728 comp_op1, comp_op0);
4729 return pedantic_non_lvalue_loc (loc,
4730 fold_convert_loc (loc, type, tem));
4732 break;
4733 case GE_EXPR:
4734 case GT_EXPR:
4735 case UNGE_EXPR:
4736 case UNGT_EXPR:
4737 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4739 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4740 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4741 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4742 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4743 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4744 comp_op1, comp_op0);
4745 return pedantic_non_lvalue_loc (loc,
4746 fold_convert_loc (loc, type, tem));
4748 break;
4749 case UNEQ_EXPR:
4750 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4751 return pedantic_non_lvalue_loc (loc,
4752 fold_convert_loc (loc, type, arg2));
4753 break;
4754 case LTGT_EXPR:
4755 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4756 return pedantic_non_lvalue_loc (loc,
4757 fold_convert_loc (loc, type, arg1));
4758 break;
4759 default:
4760 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4761 break;
4765 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4766 we might still be able to simplify this. For example,
4767 if C1 is one less or one more than C2, this might have started
4768 out as a MIN or MAX and been transformed by this function.
4769 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4771 if (INTEGRAL_TYPE_P (type)
4772 && TREE_CODE (arg01) == INTEGER_CST
4773 && TREE_CODE (arg2) == INTEGER_CST)
4774 switch (comp_code)
4776 case EQ_EXPR:
4777 if (TREE_CODE (arg1) == INTEGER_CST)
4778 break;
4779 /* We can replace A with C1 in this case. */
4780 arg1 = fold_convert_loc (loc, type, arg01);
4781 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4783 case LT_EXPR:
4784 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4785 MIN_EXPR, to preserve the signedness of the comparison. */
4786 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4787 OEP_ONLY_CONST)
4788 && operand_equal_p (arg01,
4789 const_binop (PLUS_EXPR, arg2,
4790 build_int_cst (type, 1), 0),
4791 OEP_ONLY_CONST))
4793 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4794 fold_convert_loc (loc, TREE_TYPE (arg00),
4795 arg2));
4796 return pedantic_non_lvalue_loc (loc,
4797 fold_convert_loc (loc, type, tem));
4799 break;
4801 case LE_EXPR:
4802 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4803 as above. */
4804 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4805 OEP_ONLY_CONST)
4806 && operand_equal_p (arg01,
4807 const_binop (MINUS_EXPR, arg2,
4808 build_int_cst (type, 1), 0),
4809 OEP_ONLY_CONST))
4811 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4812 fold_convert_loc (loc, TREE_TYPE (arg00),
4813 arg2));
4814 return pedantic_non_lvalue_loc (loc,
4815 fold_convert_loc (loc, type, tem));
4817 break;
4819 case GT_EXPR:
4820 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4821 MAX_EXPR, to preserve the signedness of the comparison. */
4822 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4823 OEP_ONLY_CONST)
4824 && operand_equal_p (arg01,
4825 const_binop (MINUS_EXPR, arg2,
4826 build_int_cst (type, 1), 0),
4827 OEP_ONLY_CONST))
4829 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4830 fold_convert_loc (loc, TREE_TYPE (arg00),
4831 arg2));
4832 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4834 break;
4836 case GE_EXPR:
4837 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4838 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4839 OEP_ONLY_CONST)
4840 && operand_equal_p (arg01,
4841 const_binop (PLUS_EXPR, arg2,
4842 build_int_cst (type, 1), 0),
4843 OEP_ONLY_CONST))
4845 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4846 fold_convert_loc (loc, TREE_TYPE (arg00),
4847 arg2));
4848 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4850 break;
4851 case NE_EXPR:
4852 break;
4853 default:
4854 gcc_unreachable ();
4857 return NULL_TREE;
4862 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4863 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4864 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4865 false) >= 2)
4866 #endif
4868 /* EXP is some logical combination of boolean tests. See if we can
4869 merge it into some range test. Return the new tree if so. */
4871 static tree
4872 fold_range_test (location_t loc, enum tree_code code, tree type,
4873 tree op0, tree op1)
4875 int or_op = (code == TRUTH_ORIF_EXPR
4876 || code == TRUTH_OR_EXPR);
4877 int in0_p, in1_p, in_p;
4878 tree low0, low1, low, high0, high1, high;
4879 bool strict_overflow_p = false;
4880 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4881 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4882 tree tem;
4883 const char * const warnmsg = G_("assuming signed overflow does not occur "
4884 "when simplifying range test");
4886 /* If this is an OR operation, invert both sides; we will invert
4887 again at the end. */
4888 if (or_op)
4889 in0_p = ! in0_p, in1_p = ! in1_p;
4891 /* If both expressions are the same, if we can merge the ranges, and we
4892 can build the range test, return it or it inverted. If one of the
4893 ranges is always true or always false, consider it to be the same
4894 expression as the other. */
4895 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4896 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4897 in1_p, low1, high1)
4898 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
4899 lhs != 0 ? lhs
4900 : rhs != 0 ? rhs : integer_zero_node,
4901 in_p, low, high))))
4903 if (strict_overflow_p)
4904 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4905 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4908 /* On machines where the branch cost is expensive, if this is a
4909 short-circuited branch and the underlying object on both sides
4910 is the same, make a non-short-circuit operation. */
4911 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4912 && lhs != 0 && rhs != 0
4913 && (code == TRUTH_ANDIF_EXPR
4914 || code == TRUTH_ORIF_EXPR)
4915 && operand_equal_p (lhs, rhs, 0))
4917 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4918 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4919 which cases we can't do this. */
4920 if (simple_operand_p (lhs))
4922 tem = build2 (code == TRUTH_ANDIF_EXPR
4923 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4924 type, op0, op1);
4925 SET_EXPR_LOCATION (tem, loc);
4926 return tem;
4929 else if (lang_hooks.decls.global_bindings_p () == 0
4930 && ! CONTAINS_PLACEHOLDER_P (lhs))
4932 tree common = save_expr (lhs);
4934 if (0 != (lhs = build_range_check (loc, type, common,
4935 or_op ? ! in0_p : in0_p,
4936 low0, high0))
4937 && (0 != (rhs = build_range_check (loc, type, common,
4938 or_op ? ! in1_p : in1_p,
4939 low1, high1))))
4941 if (strict_overflow_p)
4942 fold_overflow_warning (warnmsg,
4943 WARN_STRICT_OVERFLOW_COMPARISON);
4944 tem = build2 (code == TRUTH_ANDIF_EXPR
4945 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4946 type, lhs, rhs);
4947 SET_EXPR_LOCATION (tem, loc);
4948 return tem;
4953 return 0;
4956 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4957 bit value. Arrange things so the extra bits will be set to zero if and
4958 only if C is signed-extended to its full width. If MASK is nonzero,
4959 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4961 static tree
4962 unextend (tree c, int p, int unsignedp, tree mask)
4964 tree type = TREE_TYPE (c);
4965 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4966 tree temp;
4968 if (p == modesize || unsignedp)
4969 return c;
4971 /* We work by getting just the sign bit into the low-order bit, then
4972 into the high-order bit, then sign-extend. We then XOR that value
4973 with C. */
4974 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4975 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4977 /* We must use a signed type in order to get an arithmetic right shift.
4978 However, we must also avoid introducing accidental overflows, so that
4979 a subsequent call to integer_zerop will work. Hence we must
4980 do the type conversion here. At this point, the constant is either
4981 zero or one, and the conversion to a signed type can never overflow.
4982 We could get an overflow if this conversion is done anywhere else. */
4983 if (TYPE_UNSIGNED (type))
4984 temp = fold_convert (signed_type_for (type), temp);
4986 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4987 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4988 if (mask != 0)
4989 temp = const_binop (BIT_AND_EXPR, temp,
4990 fold_convert (TREE_TYPE (c), mask),
4992 /* If necessary, convert the type back to match the type of C. */
4993 if (TYPE_UNSIGNED (type))
4994 temp = fold_convert (type, temp);
4996 return fold_convert (type,
4997 const_binop (BIT_XOR_EXPR, c, temp, 0));
5000 /* Find ways of folding logical expressions of LHS and RHS:
5001 Try to merge two comparisons to the same innermost item.
5002 Look for range tests like "ch >= '0' && ch <= '9'".
5003 Look for combinations of simple terms on machines with expensive branches
5004 and evaluate the RHS unconditionally.
5006 For example, if we have p->a == 2 && p->b == 4 and we can make an
5007 object large enough to span both A and B, we can do this with a comparison
5008 against the object ANDed with the a mask.
5010 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5011 operations to do this with one comparison.
5013 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5014 function and the one above.
5016 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5017 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5019 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5020 two operands.
5022 We return the simplified tree or 0 if no optimization is possible. */
5024 static tree
5025 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5026 tree lhs, tree rhs)
5028 /* If this is the "or" of two comparisons, we can do something if
5029 the comparisons are NE_EXPR. If this is the "and", we can do something
5030 if the comparisons are EQ_EXPR. I.e.,
5031 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5033 WANTED_CODE is this operation code. For single bit fields, we can
5034 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5035 comparison for one-bit fields. */
5037 enum tree_code wanted_code;
5038 enum tree_code lcode, rcode;
5039 tree ll_arg, lr_arg, rl_arg, rr_arg;
5040 tree ll_inner, lr_inner, rl_inner, rr_inner;
5041 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5042 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5043 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5044 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5045 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5046 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5047 enum machine_mode lnmode, rnmode;
5048 tree ll_mask, lr_mask, rl_mask, rr_mask;
5049 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5050 tree l_const, r_const;
5051 tree lntype, rntype, result;
5052 HOST_WIDE_INT first_bit, end_bit;
5053 int volatilep;
5054 tree orig_lhs = lhs, orig_rhs = rhs;
5055 enum tree_code orig_code = code;
5057 /* Start by getting the comparison codes. Fail if anything is volatile.
5058 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5059 it were surrounded with a NE_EXPR. */
5061 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5062 return 0;
5064 lcode = TREE_CODE (lhs);
5065 rcode = TREE_CODE (rhs);
5067 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5069 lhs = build2 (NE_EXPR, truth_type, lhs,
5070 build_int_cst (TREE_TYPE (lhs), 0));
5071 lcode = NE_EXPR;
5074 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5076 rhs = build2 (NE_EXPR, truth_type, rhs,
5077 build_int_cst (TREE_TYPE (rhs), 0));
5078 rcode = NE_EXPR;
5081 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5082 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5083 return 0;
5085 ll_arg = TREE_OPERAND (lhs, 0);
5086 lr_arg = TREE_OPERAND (lhs, 1);
5087 rl_arg = TREE_OPERAND (rhs, 0);
5088 rr_arg = TREE_OPERAND (rhs, 1);
5090 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5091 if (simple_operand_p (ll_arg)
5092 && simple_operand_p (lr_arg))
5094 tree result;
5095 if (operand_equal_p (ll_arg, rl_arg, 0)
5096 && operand_equal_p (lr_arg, rr_arg, 0))
5098 result = combine_comparisons (loc, code, lcode, rcode,
5099 truth_type, ll_arg, lr_arg);
5100 if (result)
5101 return result;
5103 else if (operand_equal_p (ll_arg, rr_arg, 0)
5104 && operand_equal_p (lr_arg, rl_arg, 0))
5106 result = combine_comparisons (loc, code, lcode,
5107 swap_tree_comparison (rcode),
5108 truth_type, ll_arg, lr_arg);
5109 if (result)
5110 return result;
5114 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5115 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5117 /* If the RHS can be evaluated unconditionally and its operands are
5118 simple, it wins to evaluate the RHS unconditionally on machines
5119 with expensive branches. In this case, this isn't a comparison
5120 that can be merged. Avoid doing this if the RHS is a floating-point
5121 comparison since those can trap. */
5123 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5124 false) >= 2
5125 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5126 && simple_operand_p (rl_arg)
5127 && simple_operand_p (rr_arg))
5129 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5130 if (code == TRUTH_OR_EXPR
5131 && lcode == NE_EXPR && integer_zerop (lr_arg)
5132 && rcode == NE_EXPR && integer_zerop (rr_arg)
5133 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5134 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5136 result = build2 (NE_EXPR, truth_type,
5137 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5138 ll_arg, rl_arg),
5139 build_int_cst (TREE_TYPE (ll_arg), 0));
5140 goto fold_truthop_exit;
5143 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5144 if (code == TRUTH_AND_EXPR
5145 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5146 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5147 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5148 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5150 result = build2 (EQ_EXPR, truth_type,
5151 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5152 ll_arg, rl_arg),
5153 build_int_cst (TREE_TYPE (ll_arg), 0));
5154 goto fold_truthop_exit;
5157 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5159 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5161 result = build2 (code, truth_type, lhs, rhs);
5162 goto fold_truthop_exit;
5164 return NULL_TREE;
5168 /* See if the comparisons can be merged. Then get all the parameters for
5169 each side. */
5171 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5172 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5173 return 0;
5175 volatilep = 0;
5176 ll_inner = decode_field_reference (loc, ll_arg,
5177 &ll_bitsize, &ll_bitpos, &ll_mode,
5178 &ll_unsignedp, &volatilep, &ll_mask,
5179 &ll_and_mask);
5180 lr_inner = decode_field_reference (loc, lr_arg,
5181 &lr_bitsize, &lr_bitpos, &lr_mode,
5182 &lr_unsignedp, &volatilep, &lr_mask,
5183 &lr_and_mask);
5184 rl_inner = decode_field_reference (loc, rl_arg,
5185 &rl_bitsize, &rl_bitpos, &rl_mode,
5186 &rl_unsignedp, &volatilep, &rl_mask,
5187 &rl_and_mask);
5188 rr_inner = decode_field_reference (loc, rr_arg,
5189 &rr_bitsize, &rr_bitpos, &rr_mode,
5190 &rr_unsignedp, &volatilep, &rr_mask,
5191 &rr_and_mask);
5193 /* It must be true that the inner operation on the lhs of each
5194 comparison must be the same if we are to be able to do anything.
5195 Then see if we have constants. If not, the same must be true for
5196 the rhs's. */
5197 if (volatilep || ll_inner == 0 || rl_inner == 0
5198 || ! operand_equal_p (ll_inner, rl_inner, 0))
5199 return 0;
5201 if (TREE_CODE (lr_arg) == INTEGER_CST
5202 && TREE_CODE (rr_arg) == INTEGER_CST)
5203 l_const = lr_arg, r_const = rr_arg;
5204 else if (lr_inner == 0 || rr_inner == 0
5205 || ! operand_equal_p (lr_inner, rr_inner, 0))
5206 return 0;
5207 else
5208 l_const = r_const = 0;
5210 /* If either comparison code is not correct for our logical operation,
5211 fail. However, we can convert a one-bit comparison against zero into
5212 the opposite comparison against that bit being set in the field. */
5214 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5215 if (lcode != wanted_code)
5217 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5219 /* Make the left operand unsigned, since we are only interested
5220 in the value of one bit. Otherwise we are doing the wrong
5221 thing below. */
5222 ll_unsignedp = 1;
5223 l_const = ll_mask;
5225 else
5226 return 0;
5229 /* This is analogous to the code for l_const above. */
5230 if (rcode != wanted_code)
5232 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5234 rl_unsignedp = 1;
5235 r_const = rl_mask;
5237 else
5238 return 0;
5241 /* See if we can find a mode that contains both fields being compared on
5242 the left. If we can't, fail. Otherwise, update all constants and masks
5243 to be relative to a field of that size. */
5244 first_bit = MIN (ll_bitpos, rl_bitpos);
5245 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5246 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5247 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5248 volatilep);
5249 if (lnmode == VOIDmode)
5250 return 0;
5252 lnbitsize = GET_MODE_BITSIZE (lnmode);
5253 lnbitpos = first_bit & ~ (lnbitsize - 1);
5254 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5255 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5257 if (BYTES_BIG_ENDIAN)
5259 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5260 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5263 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5264 size_int (xll_bitpos), 0);
5265 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5266 size_int (xrl_bitpos), 0);
5268 if (l_const)
5270 l_const = fold_convert_loc (loc, lntype, l_const);
5271 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5272 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5273 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5274 fold_build1_loc (loc, BIT_NOT_EXPR,
5275 lntype, ll_mask),
5276 0)))
5278 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5280 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5283 if (r_const)
5285 r_const = fold_convert_loc (loc, lntype, r_const);
5286 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5287 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5288 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5289 fold_build1_loc (loc, BIT_NOT_EXPR,
5290 lntype, rl_mask),
5291 0)))
5293 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5295 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5299 /* If the right sides are not constant, do the same for it. Also,
5300 disallow this optimization if a size or signedness mismatch occurs
5301 between the left and right sides. */
5302 if (l_const == 0)
5304 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5305 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5306 /* Make sure the two fields on the right
5307 correspond to the left without being swapped. */
5308 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5309 return 0;
5311 first_bit = MIN (lr_bitpos, rr_bitpos);
5312 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5313 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5314 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5315 volatilep);
5316 if (rnmode == VOIDmode)
5317 return 0;
5319 rnbitsize = GET_MODE_BITSIZE (rnmode);
5320 rnbitpos = first_bit & ~ (rnbitsize - 1);
5321 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5322 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5324 if (BYTES_BIG_ENDIAN)
5326 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5327 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5330 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5331 rntype, lr_mask),
5332 size_int (xlr_bitpos), 0);
5333 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5334 rntype, rr_mask),
5335 size_int (xrr_bitpos), 0);
5337 /* Make a mask that corresponds to both fields being compared.
5338 Do this for both items being compared. If the operands are the
5339 same size and the bits being compared are in the same position
5340 then we can do this by masking both and comparing the masked
5341 results. */
5342 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5343 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5344 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5346 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5347 ll_unsignedp || rl_unsignedp);
5348 if (! all_ones_mask_p (ll_mask, lnbitsize))
5349 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5351 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5352 lr_unsignedp || rr_unsignedp);
5353 if (! all_ones_mask_p (lr_mask, rnbitsize))
5354 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5356 result = build2 (wanted_code, truth_type, lhs, rhs);
5357 goto fold_truthop_exit;
5360 /* There is still another way we can do something: If both pairs of
5361 fields being compared are adjacent, we may be able to make a wider
5362 field containing them both.
5364 Note that we still must mask the lhs/rhs expressions. Furthermore,
5365 the mask must be shifted to account for the shift done by
5366 make_bit_field_ref. */
5367 if ((ll_bitsize + ll_bitpos == rl_bitpos
5368 && lr_bitsize + lr_bitpos == rr_bitpos)
5369 || (ll_bitpos == rl_bitpos + rl_bitsize
5370 && lr_bitpos == rr_bitpos + rr_bitsize))
5372 tree type;
5374 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5375 ll_bitsize + rl_bitsize,
5376 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5377 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5378 lr_bitsize + rr_bitsize,
5379 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5381 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5382 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5383 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5384 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5386 /* Convert to the smaller type before masking out unwanted bits. */
5387 type = lntype;
5388 if (lntype != rntype)
5390 if (lnbitsize > rnbitsize)
5392 lhs = fold_convert_loc (loc, rntype, lhs);
5393 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5394 type = rntype;
5396 else if (lnbitsize < rnbitsize)
5398 rhs = fold_convert_loc (loc, lntype, rhs);
5399 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5400 type = lntype;
5404 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5405 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5407 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5408 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5410 result = build2 (wanted_code, truth_type, lhs, rhs);
5411 goto fold_truthop_exit;
5414 return 0;
5417 /* Handle the case of comparisons with constants. If there is something in
5418 common between the masks, those bits of the constants must be the same.
5419 If not, the condition is always false. Test for this to avoid generating
5420 incorrect code below. */
5421 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5422 if (! integer_zerop (result)
5423 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5424 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5426 if (wanted_code == NE_EXPR)
5428 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5429 return constant_boolean_node (true, truth_type);
5431 else
5433 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5434 return constant_boolean_node (false, truth_type);
5438 /* Construct the expression we will return. First get the component
5439 reference we will make. Unless the mask is all ones the width of
5440 that field, perform the mask operation. Then compare with the
5441 merged constant. */
5442 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5443 ll_unsignedp || rl_unsignedp);
5445 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5446 if (! all_ones_mask_p (ll_mask, lnbitsize))
5448 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5449 SET_EXPR_LOCATION (result, loc);
5452 result = build2 (wanted_code, truth_type, result,
5453 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5455 fold_truthop_exit:
5456 SET_EXPR_LOCATION (result, loc);
5457 return result;
5460 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5461 constant. */
5463 static tree
5464 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5465 tree op0, tree op1)
5467 tree arg0 = op0;
5468 enum tree_code op_code;
5469 tree comp_const;
5470 tree minmax_const;
5471 int consts_equal, consts_lt;
5472 tree inner;
5474 STRIP_SIGN_NOPS (arg0);
5476 op_code = TREE_CODE (arg0);
5477 minmax_const = TREE_OPERAND (arg0, 1);
5478 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5479 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5480 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5481 inner = TREE_OPERAND (arg0, 0);
5483 /* If something does not permit us to optimize, return the original tree. */
5484 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5485 || TREE_CODE (comp_const) != INTEGER_CST
5486 || TREE_OVERFLOW (comp_const)
5487 || TREE_CODE (minmax_const) != INTEGER_CST
5488 || TREE_OVERFLOW (minmax_const))
5489 return NULL_TREE;
5491 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5492 and GT_EXPR, doing the rest with recursive calls using logical
5493 simplifications. */
5494 switch (code)
5496 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5498 tree tem
5499 = optimize_minmax_comparison (loc,
5500 invert_tree_comparison (code, false),
5501 type, op0, op1);
5502 if (tem)
5503 return invert_truthvalue_loc (loc, tem);
5504 return NULL_TREE;
5507 case GE_EXPR:
5508 return
5509 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5510 optimize_minmax_comparison
5511 (loc, EQ_EXPR, type, arg0, comp_const),
5512 optimize_minmax_comparison
5513 (loc, GT_EXPR, type, arg0, comp_const));
5515 case EQ_EXPR:
5516 if (op_code == MAX_EXPR && consts_equal)
5517 /* MAX (X, 0) == 0 -> X <= 0 */
5518 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5520 else if (op_code == MAX_EXPR && consts_lt)
5521 /* MAX (X, 0) == 5 -> X == 5 */
5522 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5524 else if (op_code == MAX_EXPR)
5525 /* MAX (X, 0) == -1 -> false */
5526 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5528 else if (consts_equal)
5529 /* MIN (X, 0) == 0 -> X >= 0 */
5530 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5532 else if (consts_lt)
5533 /* MIN (X, 0) == 5 -> false */
5534 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5536 else
5537 /* MIN (X, 0) == -1 -> X == -1 */
5538 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5540 case GT_EXPR:
5541 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5542 /* MAX (X, 0) > 0 -> X > 0
5543 MAX (X, 0) > 5 -> X > 5 */
5544 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5546 else if (op_code == MAX_EXPR)
5547 /* MAX (X, 0) > -1 -> true */
5548 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5550 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5551 /* MIN (X, 0) > 0 -> false
5552 MIN (X, 0) > 5 -> false */
5553 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5555 else
5556 /* MIN (X, 0) > -1 -> X > -1 */
5557 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5559 default:
5560 return NULL_TREE;
5564 /* T is an integer expression that is being multiplied, divided, or taken a
5565 modulus (CODE says which and what kind of divide or modulus) by a
5566 constant C. See if we can eliminate that operation by folding it with
5567 other operations already in T. WIDE_TYPE, if non-null, is a type that
5568 should be used for the computation if wider than our type.
5570 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5571 (X * 2) + (Y * 4). We must, however, be assured that either the original
5572 expression would not overflow or that overflow is undefined for the type
5573 in the language in question.
5575 If we return a non-null expression, it is an equivalent form of the
5576 original computation, but need not be in the original type.
5578 We set *STRICT_OVERFLOW_P to true if the return values depends on
5579 signed overflow being undefined. Otherwise we do not change
5580 *STRICT_OVERFLOW_P. */
5582 static tree
5583 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5584 bool *strict_overflow_p)
5586 /* To avoid exponential search depth, refuse to allow recursion past
5587 three levels. Beyond that (1) it's highly unlikely that we'll find
5588 something interesting and (2) we've probably processed it before
5589 when we built the inner expression. */
5591 static int depth;
5592 tree ret;
5594 if (depth > 3)
5595 return NULL;
5597 depth++;
5598 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5599 depth--;
5601 return ret;
5604 static tree
5605 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5606 bool *strict_overflow_p)
5608 tree type = TREE_TYPE (t);
5609 enum tree_code tcode = TREE_CODE (t);
5610 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5611 > GET_MODE_SIZE (TYPE_MODE (type)))
5612 ? wide_type : type);
5613 tree t1, t2;
5614 int same_p = tcode == code;
5615 tree op0 = NULL_TREE, op1 = NULL_TREE;
5616 bool sub_strict_overflow_p;
5618 /* Don't deal with constants of zero here; they confuse the code below. */
5619 if (integer_zerop (c))
5620 return NULL_TREE;
5622 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5623 op0 = TREE_OPERAND (t, 0);
5625 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5626 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5628 /* Note that we need not handle conditional operations here since fold
5629 already handles those cases. So just do arithmetic here. */
5630 switch (tcode)
5632 case INTEGER_CST:
5633 /* For a constant, we can always simplify if we are a multiply
5634 or (for divide and modulus) if it is a multiple of our constant. */
5635 if (code == MULT_EXPR
5636 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5637 return const_binop (code, fold_convert (ctype, t),
5638 fold_convert (ctype, c), 0);
5639 break;
5641 CASE_CONVERT: case NON_LVALUE_EXPR:
5642 /* If op0 is an expression ... */
5643 if ((COMPARISON_CLASS_P (op0)
5644 || UNARY_CLASS_P (op0)
5645 || BINARY_CLASS_P (op0)
5646 || VL_EXP_CLASS_P (op0)
5647 || EXPRESSION_CLASS_P (op0))
5648 /* ... and has wrapping overflow, and its type is smaller
5649 than ctype, then we cannot pass through as widening. */
5650 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5651 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5652 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5653 && (TYPE_PRECISION (ctype)
5654 > TYPE_PRECISION (TREE_TYPE (op0))))
5655 /* ... or this is a truncation (t is narrower than op0),
5656 then we cannot pass through this narrowing. */
5657 || (TYPE_PRECISION (type)
5658 < TYPE_PRECISION (TREE_TYPE (op0)))
5659 /* ... or signedness changes for division or modulus,
5660 then we cannot pass through this conversion. */
5661 || (code != MULT_EXPR
5662 && (TYPE_UNSIGNED (ctype)
5663 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5664 /* ... or has undefined overflow while the converted to
5665 type has not, we cannot do the operation in the inner type
5666 as that would introduce undefined overflow. */
5667 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5668 && !TYPE_OVERFLOW_UNDEFINED (type))))
5669 break;
5671 /* Pass the constant down and see if we can make a simplification. If
5672 we can, replace this expression with the inner simplification for
5673 possible later conversion to our or some other type. */
5674 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5675 && TREE_CODE (t2) == INTEGER_CST
5676 && !TREE_OVERFLOW (t2)
5677 && (0 != (t1 = extract_muldiv (op0, t2, code,
5678 code == MULT_EXPR
5679 ? ctype : NULL_TREE,
5680 strict_overflow_p))))
5681 return t1;
5682 break;
5684 case ABS_EXPR:
5685 /* If widening the type changes it from signed to unsigned, then we
5686 must avoid building ABS_EXPR itself as unsigned. */
5687 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5689 tree cstype = (*signed_type_for) (ctype);
5690 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5691 != 0)
5693 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5694 return fold_convert (ctype, t1);
5696 break;
5698 /* If the constant is negative, we cannot simplify this. */
5699 if (tree_int_cst_sgn (c) == -1)
5700 break;
5701 /* FALLTHROUGH */
5702 case NEGATE_EXPR:
5703 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5704 != 0)
5705 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5706 break;
5708 case MIN_EXPR: case MAX_EXPR:
5709 /* If widening the type changes the signedness, then we can't perform
5710 this optimization as that changes the result. */
5711 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5712 break;
5714 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5715 sub_strict_overflow_p = false;
5716 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5717 &sub_strict_overflow_p)) != 0
5718 && (t2 = extract_muldiv (op1, c, code, wide_type,
5719 &sub_strict_overflow_p)) != 0)
5721 if (tree_int_cst_sgn (c) < 0)
5722 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5723 if (sub_strict_overflow_p)
5724 *strict_overflow_p = true;
5725 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5726 fold_convert (ctype, t2));
5728 break;
5730 case LSHIFT_EXPR: case RSHIFT_EXPR:
5731 /* If the second operand is constant, this is a multiplication
5732 or floor division, by a power of two, so we can treat it that
5733 way unless the multiplier or divisor overflows. Signed
5734 left-shift overflow is implementation-defined rather than
5735 undefined in C90, so do not convert signed left shift into
5736 multiplication. */
5737 if (TREE_CODE (op1) == INTEGER_CST
5738 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5739 /* const_binop may not detect overflow correctly,
5740 so check for it explicitly here. */
5741 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5742 && TREE_INT_CST_HIGH (op1) == 0
5743 && 0 != (t1 = fold_convert (ctype,
5744 const_binop (LSHIFT_EXPR,
5745 size_one_node,
5746 op1, 0)))
5747 && !TREE_OVERFLOW (t1))
5748 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5749 ? MULT_EXPR : FLOOR_DIV_EXPR,
5750 ctype,
5751 fold_convert (ctype, op0),
5752 t1),
5753 c, code, wide_type, strict_overflow_p);
5754 break;
5756 case PLUS_EXPR: case MINUS_EXPR:
5757 /* See if we can eliminate the operation on both sides. If we can, we
5758 can return a new PLUS or MINUS. If we can't, the only remaining
5759 cases where we can do anything are if the second operand is a
5760 constant. */
5761 sub_strict_overflow_p = false;
5762 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5763 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5764 if (t1 != 0 && t2 != 0
5765 && (code == MULT_EXPR
5766 /* If not multiplication, we can only do this if both operands
5767 are divisible by c. */
5768 || (multiple_of_p (ctype, op0, c)
5769 && multiple_of_p (ctype, op1, c))))
5771 if (sub_strict_overflow_p)
5772 *strict_overflow_p = true;
5773 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5774 fold_convert (ctype, t2));
5777 /* If this was a subtraction, negate OP1 and set it to be an addition.
5778 This simplifies the logic below. */
5779 if (tcode == MINUS_EXPR)
5781 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5782 /* If OP1 was not easily negatable, the constant may be OP0. */
5783 if (TREE_CODE (op0) == INTEGER_CST)
5785 tree tem = op0;
5786 op0 = op1;
5787 op1 = tem;
5788 tem = t1;
5789 t1 = t2;
5790 t2 = tem;
5794 if (TREE_CODE (op1) != INTEGER_CST)
5795 break;
5797 /* If either OP1 or C are negative, this optimization is not safe for
5798 some of the division and remainder types while for others we need
5799 to change the code. */
5800 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5802 if (code == CEIL_DIV_EXPR)
5803 code = FLOOR_DIV_EXPR;
5804 else if (code == FLOOR_DIV_EXPR)
5805 code = CEIL_DIV_EXPR;
5806 else if (code != MULT_EXPR
5807 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5808 break;
5811 /* If it's a multiply or a division/modulus operation of a multiple
5812 of our constant, do the operation and verify it doesn't overflow. */
5813 if (code == MULT_EXPR
5814 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5816 op1 = const_binop (code, fold_convert (ctype, op1),
5817 fold_convert (ctype, c), 0);
5818 /* We allow the constant to overflow with wrapping semantics. */
5819 if (op1 == 0
5820 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5821 break;
5823 else
5824 break;
5826 /* If we have an unsigned type is not a sizetype, we cannot widen
5827 the operation since it will change the result if the original
5828 computation overflowed. */
5829 if (TYPE_UNSIGNED (ctype)
5830 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5831 && ctype != type)
5832 break;
5834 /* If we were able to eliminate our operation from the first side,
5835 apply our operation to the second side and reform the PLUS. */
5836 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5837 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5839 /* The last case is if we are a multiply. In that case, we can
5840 apply the distributive law to commute the multiply and addition
5841 if the multiplication of the constants doesn't overflow. */
5842 if (code == MULT_EXPR)
5843 return fold_build2 (tcode, ctype,
5844 fold_build2 (code, ctype,
5845 fold_convert (ctype, op0),
5846 fold_convert (ctype, c)),
5847 op1);
5849 break;
5851 case MULT_EXPR:
5852 /* We have a special case here if we are doing something like
5853 (C * 8) % 4 since we know that's zero. */
5854 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5855 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5856 /* If the multiplication can overflow we cannot optimize this.
5857 ??? Until we can properly mark individual operations as
5858 not overflowing we need to treat sizetype special here as
5859 stor-layout relies on this opimization to make
5860 DECL_FIELD_BIT_OFFSET always a constant. */
5861 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5862 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5863 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5864 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5865 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5867 *strict_overflow_p = true;
5868 return omit_one_operand (type, integer_zero_node, op0);
5871 /* ... fall through ... */
5873 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5874 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5875 /* If we can extract our operation from the LHS, do so and return a
5876 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5877 do something only if the second operand is a constant. */
5878 if (same_p
5879 && (t1 = extract_muldiv (op0, c, code, wide_type,
5880 strict_overflow_p)) != 0)
5881 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5882 fold_convert (ctype, op1));
5883 else if (tcode == MULT_EXPR && code == MULT_EXPR
5884 && (t1 = extract_muldiv (op1, c, code, wide_type,
5885 strict_overflow_p)) != 0)
5886 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5887 fold_convert (ctype, t1));
5888 else if (TREE_CODE (op1) != INTEGER_CST)
5889 return 0;
5891 /* If these are the same operation types, we can associate them
5892 assuming no overflow. */
5893 if (tcode == code
5894 && 0 != (t1 = int_const_binop (MULT_EXPR,
5895 fold_convert (ctype, op1),
5896 fold_convert (ctype, c), 1))
5897 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
5898 TREE_INT_CST_HIGH (t1),
5899 (TYPE_UNSIGNED (ctype)
5900 && tcode != MULT_EXPR) ? -1 : 1,
5901 TREE_OVERFLOW (t1)))
5902 && !TREE_OVERFLOW (t1))
5903 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5905 /* If these operations "cancel" each other, we have the main
5906 optimizations of this pass, which occur when either constant is a
5907 multiple of the other, in which case we replace this with either an
5908 operation or CODE or TCODE.
5910 If we have an unsigned type that is not a sizetype, we cannot do
5911 this since it will change the result if the original computation
5912 overflowed. */
5913 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5914 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5915 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5916 || (tcode == MULT_EXPR
5917 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5918 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5919 && code != MULT_EXPR)))
5921 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5923 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5924 *strict_overflow_p = true;
5925 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5926 fold_convert (ctype,
5927 const_binop (TRUNC_DIV_EXPR,
5928 op1, c, 0)));
5930 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5932 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5933 *strict_overflow_p = true;
5934 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5935 fold_convert (ctype,
5936 const_binop (TRUNC_DIV_EXPR,
5937 c, op1, 0)));
5940 break;
5942 default:
5943 break;
5946 return 0;
5949 /* Return a node which has the indicated constant VALUE (either 0 or
5950 1), and is of the indicated TYPE. */
5952 tree
5953 constant_boolean_node (int value, tree type)
5955 if (type == integer_type_node)
5956 return value ? integer_one_node : integer_zero_node;
5957 else if (type == boolean_type_node)
5958 return value ? boolean_true_node : boolean_false_node;
5959 else
5960 return build_int_cst (type, value);
5964 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5965 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5966 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5967 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5968 COND is the first argument to CODE; otherwise (as in the example
5969 given here), it is the second argument. TYPE is the type of the
5970 original expression. Return NULL_TREE if no simplification is
5971 possible. */
5973 static tree
5974 fold_binary_op_with_conditional_arg (location_t loc,
5975 enum tree_code code,
5976 tree type, tree op0, tree op1,
5977 tree cond, tree arg, int cond_first_p)
5979 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5980 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5981 tree test, true_value, false_value;
5982 tree lhs = NULL_TREE;
5983 tree rhs = NULL_TREE;
5985 if (TREE_CODE (cond) == COND_EXPR)
5987 test = TREE_OPERAND (cond, 0);
5988 true_value = TREE_OPERAND (cond, 1);
5989 false_value = TREE_OPERAND (cond, 2);
5990 /* If this operand throws an expression, then it does not make
5991 sense to try to perform a logical or arithmetic operation
5992 involving it. */
5993 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5994 lhs = true_value;
5995 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5996 rhs = false_value;
5998 else
6000 tree testtype = TREE_TYPE (cond);
6001 test = cond;
6002 true_value = constant_boolean_node (true, testtype);
6003 false_value = constant_boolean_node (false, testtype);
6006 /* This transformation is only worthwhile if we don't have to wrap ARG
6007 in a SAVE_EXPR and the operation can be simplified on at least one
6008 of the branches once its pushed inside the COND_EXPR. */
6009 if (!TREE_CONSTANT (arg)
6010 && (TREE_SIDE_EFFECTS (arg)
6011 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6012 return NULL_TREE;
6014 arg = fold_convert_loc (loc, arg_type, arg);
6015 if (lhs == 0)
6017 true_value = fold_convert_loc (loc, cond_type, true_value);
6018 if (cond_first_p)
6019 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6020 else
6021 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6023 if (rhs == 0)
6025 false_value = fold_convert_loc (loc, cond_type, false_value);
6026 if (cond_first_p)
6027 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6028 else
6029 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6032 /* Check that we have simplified at least one of the branches. */
6033 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6034 return NULL_TREE;
6036 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6040 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6042 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6043 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6044 ADDEND is the same as X.
6046 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6047 and finite. The problematic cases are when X is zero, and its mode
6048 has signed zeros. In the case of rounding towards -infinity,
6049 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6050 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6052 bool
6053 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6055 if (!real_zerop (addend))
6056 return false;
6058 /* Don't allow the fold with -fsignaling-nans. */
6059 if (HONOR_SNANS (TYPE_MODE (type)))
6060 return false;
6062 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6063 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6064 return true;
6066 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6067 if (TREE_CODE (addend) == REAL_CST
6068 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6069 negate = !negate;
6071 /* The mode has signed zeros, and we have to honor their sign.
6072 In this situation, there is only one case we can return true for.
6073 X - 0 is the same as X unless rounding towards -infinity is
6074 supported. */
6075 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6078 /* Subroutine of fold() that checks comparisons of built-in math
6079 functions against real constants.
6081 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6082 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6083 is the type of the result and ARG0 and ARG1 are the operands of the
6084 comparison. ARG1 must be a TREE_REAL_CST.
6086 The function returns the constant folded tree if a simplification
6087 can be made, and NULL_TREE otherwise. */
6089 static tree
6090 fold_mathfn_compare (location_t loc,
6091 enum built_in_function fcode, enum tree_code code,
6092 tree type, tree arg0, tree arg1)
6094 REAL_VALUE_TYPE c;
6096 if (BUILTIN_SQRT_P (fcode))
6098 tree arg = CALL_EXPR_ARG (arg0, 0);
6099 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6101 c = TREE_REAL_CST (arg1);
6102 if (REAL_VALUE_NEGATIVE (c))
6104 /* sqrt(x) < y is always false, if y is negative. */
6105 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6106 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6108 /* sqrt(x) > y is always true, if y is negative and we
6109 don't care about NaNs, i.e. negative values of x. */
6110 if (code == NE_EXPR || !HONOR_NANS (mode))
6111 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6113 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6114 return fold_build2_loc (loc, GE_EXPR, type, arg,
6115 build_real (TREE_TYPE (arg), dconst0));
6117 else if (code == GT_EXPR || code == GE_EXPR)
6119 REAL_VALUE_TYPE c2;
6121 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6122 real_convert (&c2, mode, &c2);
6124 if (REAL_VALUE_ISINF (c2))
6126 /* sqrt(x) > y is x == +Inf, when y is very large. */
6127 if (HONOR_INFINITIES (mode))
6128 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6129 build_real (TREE_TYPE (arg), c2));
6131 /* sqrt(x) > y is always false, when y is very large
6132 and we don't care about infinities. */
6133 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6136 /* sqrt(x) > c is the same as x > c*c. */
6137 return fold_build2_loc (loc, code, type, arg,
6138 build_real (TREE_TYPE (arg), c2));
6140 else if (code == LT_EXPR || code == LE_EXPR)
6142 REAL_VALUE_TYPE c2;
6144 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6145 real_convert (&c2, mode, &c2);
6147 if (REAL_VALUE_ISINF (c2))
6149 /* sqrt(x) < y is always true, when y is a very large
6150 value and we don't care about NaNs or Infinities. */
6151 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6152 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6154 /* sqrt(x) < y is x != +Inf when y is very large and we
6155 don't care about NaNs. */
6156 if (! HONOR_NANS (mode))
6157 return fold_build2_loc (loc, NE_EXPR, type, arg,
6158 build_real (TREE_TYPE (arg), c2));
6160 /* sqrt(x) < y is x >= 0 when y is very large and we
6161 don't care about Infinities. */
6162 if (! HONOR_INFINITIES (mode))
6163 return fold_build2_loc (loc, GE_EXPR, type, arg,
6164 build_real (TREE_TYPE (arg), dconst0));
6166 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6167 if (lang_hooks.decls.global_bindings_p () != 0
6168 || CONTAINS_PLACEHOLDER_P (arg))
6169 return NULL_TREE;
6171 arg = save_expr (arg);
6172 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6173 fold_build2_loc (loc, GE_EXPR, type, arg,
6174 build_real (TREE_TYPE (arg),
6175 dconst0)),
6176 fold_build2_loc (loc, NE_EXPR, type, arg,
6177 build_real (TREE_TYPE (arg),
6178 c2)));
6181 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6182 if (! HONOR_NANS (mode))
6183 return fold_build2_loc (loc, code, type, arg,
6184 build_real (TREE_TYPE (arg), c2));
6186 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6187 if (lang_hooks.decls.global_bindings_p () == 0
6188 && ! CONTAINS_PLACEHOLDER_P (arg))
6190 arg = save_expr (arg);
6191 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6192 fold_build2_loc (loc, GE_EXPR, type, arg,
6193 build_real (TREE_TYPE (arg),
6194 dconst0)),
6195 fold_build2_loc (loc, code, type, arg,
6196 build_real (TREE_TYPE (arg),
6197 c2)));
6202 return NULL_TREE;
6205 /* Subroutine of fold() that optimizes comparisons against Infinities,
6206 either +Inf or -Inf.
6208 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6209 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6210 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6212 The function returns the constant folded tree if a simplification
6213 can be made, and NULL_TREE otherwise. */
6215 static tree
6216 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6217 tree arg0, tree arg1)
6219 enum machine_mode mode;
6220 REAL_VALUE_TYPE max;
6221 tree temp;
6222 bool neg;
6224 mode = TYPE_MODE (TREE_TYPE (arg0));
6226 /* For negative infinity swap the sense of the comparison. */
6227 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6228 if (neg)
6229 code = swap_tree_comparison (code);
6231 switch (code)
6233 case GT_EXPR:
6234 /* x > +Inf is always false, if with ignore sNANs. */
6235 if (HONOR_SNANS (mode))
6236 return NULL_TREE;
6237 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6239 case LE_EXPR:
6240 /* x <= +Inf is always true, if we don't case about NaNs. */
6241 if (! HONOR_NANS (mode))
6242 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6244 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6245 if (lang_hooks.decls.global_bindings_p () == 0
6246 && ! CONTAINS_PLACEHOLDER_P (arg0))
6248 arg0 = save_expr (arg0);
6249 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6251 break;
6253 case EQ_EXPR:
6254 case GE_EXPR:
6255 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6256 real_maxval (&max, neg, mode);
6257 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6258 arg0, build_real (TREE_TYPE (arg0), max));
6260 case LT_EXPR:
6261 /* x < +Inf is always equal to x <= DBL_MAX. */
6262 real_maxval (&max, neg, mode);
6263 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6264 arg0, build_real (TREE_TYPE (arg0), max));
6266 case NE_EXPR:
6267 /* x != +Inf is always equal to !(x > DBL_MAX). */
6268 real_maxval (&max, neg, mode);
6269 if (! HONOR_NANS (mode))
6270 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6271 arg0, build_real (TREE_TYPE (arg0), max));
6273 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6274 arg0, build_real (TREE_TYPE (arg0), max));
6275 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6277 default:
6278 break;
6281 return NULL_TREE;
6284 /* Subroutine of fold() that optimizes comparisons of a division by
6285 a nonzero integer constant against an integer constant, i.e.
6286 X/C1 op C2.
6288 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6289 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6290 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6292 The function returns the constant folded tree if a simplification
6293 can be made, and NULL_TREE otherwise. */
6295 static tree
6296 fold_div_compare (location_t loc,
6297 enum tree_code code, tree type, tree arg0, tree arg1)
6299 tree prod, tmp, hi, lo;
6300 tree arg00 = TREE_OPERAND (arg0, 0);
6301 tree arg01 = TREE_OPERAND (arg0, 1);
6302 unsigned HOST_WIDE_INT lpart;
6303 HOST_WIDE_INT hpart;
6304 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6305 bool neg_overflow;
6306 int overflow;
6308 /* We have to do this the hard way to detect unsigned overflow.
6309 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6310 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6311 TREE_INT_CST_HIGH (arg01),
6312 TREE_INT_CST_LOW (arg1),
6313 TREE_INT_CST_HIGH (arg1),
6314 &lpart, &hpart, unsigned_p);
6315 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6316 -1, overflow);
6317 neg_overflow = false;
6319 if (unsigned_p)
6321 tmp = int_const_binop (MINUS_EXPR, arg01,
6322 build_int_cst (TREE_TYPE (arg01), 1), 0);
6323 lo = prod;
6325 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6326 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6327 TREE_INT_CST_HIGH (prod),
6328 TREE_INT_CST_LOW (tmp),
6329 TREE_INT_CST_HIGH (tmp),
6330 &lpart, &hpart, unsigned_p);
6331 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6332 -1, overflow | TREE_OVERFLOW (prod));
6334 else if (tree_int_cst_sgn (arg01) >= 0)
6336 tmp = int_const_binop (MINUS_EXPR, arg01,
6337 build_int_cst (TREE_TYPE (arg01), 1), 0);
6338 switch (tree_int_cst_sgn (arg1))
6340 case -1:
6341 neg_overflow = true;
6342 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6343 hi = prod;
6344 break;
6346 case 0:
6347 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6348 hi = tmp;
6349 break;
6351 case 1:
6352 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6353 lo = prod;
6354 break;
6356 default:
6357 gcc_unreachable ();
6360 else
6362 /* A negative divisor reverses the relational operators. */
6363 code = swap_tree_comparison (code);
6365 tmp = int_const_binop (PLUS_EXPR, arg01,
6366 build_int_cst (TREE_TYPE (arg01), 1), 0);
6367 switch (tree_int_cst_sgn (arg1))
6369 case -1:
6370 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6371 lo = prod;
6372 break;
6374 case 0:
6375 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6376 lo = tmp;
6377 break;
6379 case 1:
6380 neg_overflow = true;
6381 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6382 hi = prod;
6383 break;
6385 default:
6386 gcc_unreachable ();
6390 switch (code)
6392 case EQ_EXPR:
6393 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6394 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6395 if (TREE_OVERFLOW (hi))
6396 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6397 if (TREE_OVERFLOW (lo))
6398 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6399 return build_range_check (loc, type, arg00, 1, lo, hi);
6401 case NE_EXPR:
6402 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6403 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6404 if (TREE_OVERFLOW (hi))
6405 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6406 if (TREE_OVERFLOW (lo))
6407 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6408 return build_range_check (loc, type, arg00, 0, lo, hi);
6410 case LT_EXPR:
6411 if (TREE_OVERFLOW (lo))
6413 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6414 return omit_one_operand_loc (loc, type, tmp, arg00);
6416 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6418 case LE_EXPR:
6419 if (TREE_OVERFLOW (hi))
6421 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6422 return omit_one_operand_loc (loc, type, tmp, arg00);
6424 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6426 case GT_EXPR:
6427 if (TREE_OVERFLOW (hi))
6429 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6430 return omit_one_operand_loc (loc, type, tmp, arg00);
6432 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6434 case GE_EXPR:
6435 if (TREE_OVERFLOW (lo))
6437 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6438 return omit_one_operand_loc (loc, type, tmp, arg00);
6440 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6442 default:
6443 break;
6446 return NULL_TREE;
6450 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6451 equality/inequality test, then return a simplified form of the test
6452 using a sign testing. Otherwise return NULL. TYPE is the desired
6453 result type. */
6455 static tree
6456 fold_single_bit_test_into_sign_test (location_t loc,
6457 enum tree_code code, tree arg0, tree arg1,
6458 tree result_type)
6460 /* If this is testing a single bit, we can optimize the test. */
6461 if ((code == NE_EXPR || code == EQ_EXPR)
6462 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6463 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6465 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6466 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6467 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6469 if (arg00 != NULL_TREE
6470 /* This is only a win if casting to a signed type is cheap,
6471 i.e. when arg00's type is not a partial mode. */
6472 && TYPE_PRECISION (TREE_TYPE (arg00))
6473 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6475 tree stype = signed_type_for (TREE_TYPE (arg00));
6476 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6477 result_type,
6478 fold_convert_loc (loc, stype, arg00),
6479 build_int_cst (stype, 0));
6483 return NULL_TREE;
6486 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6487 equality/inequality test, then return a simplified form of
6488 the test using shifts and logical operations. Otherwise return
6489 NULL. TYPE is the desired result type. */
6491 tree
6492 fold_single_bit_test (location_t loc, enum tree_code code,
6493 tree arg0, tree arg1, tree result_type)
6495 /* If this is testing a single bit, we can optimize the test. */
6496 if ((code == NE_EXPR || code == EQ_EXPR)
6497 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6498 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6500 tree inner = TREE_OPERAND (arg0, 0);
6501 tree type = TREE_TYPE (arg0);
6502 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6503 enum machine_mode operand_mode = TYPE_MODE (type);
6504 int ops_unsigned;
6505 tree signed_type, unsigned_type, intermediate_type;
6506 tree tem, one;
6508 /* First, see if we can fold the single bit test into a sign-bit
6509 test. */
6510 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6511 result_type);
6512 if (tem)
6513 return tem;
6515 /* Otherwise we have (A & C) != 0 where C is a single bit,
6516 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6517 Similarly for (A & C) == 0. */
6519 /* If INNER is a right shift of a constant and it plus BITNUM does
6520 not overflow, adjust BITNUM and INNER. */
6521 if (TREE_CODE (inner) == RSHIFT_EXPR
6522 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6523 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6524 && bitnum < TYPE_PRECISION (type)
6525 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6526 bitnum - TYPE_PRECISION (type)))
6528 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6529 inner = TREE_OPERAND (inner, 0);
6532 /* If we are going to be able to omit the AND below, we must do our
6533 operations as unsigned. If we must use the AND, we have a choice.
6534 Normally unsigned is faster, but for some machines signed is. */
6535 #ifdef LOAD_EXTEND_OP
6536 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6537 && !flag_syntax_only) ? 0 : 1;
6538 #else
6539 ops_unsigned = 1;
6540 #endif
6542 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6543 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6544 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6545 inner = fold_convert_loc (loc, intermediate_type, inner);
6547 if (bitnum != 0)
6548 inner = build2 (RSHIFT_EXPR, intermediate_type,
6549 inner, size_int (bitnum));
6551 one = build_int_cst (intermediate_type, 1);
6553 if (code == EQ_EXPR)
6554 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6556 /* Put the AND last so it can combine with more things. */
6557 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6559 /* Make sure to return the proper type. */
6560 inner = fold_convert_loc (loc, result_type, inner);
6562 return inner;
6564 return NULL_TREE;
6567 /* Check whether we are allowed to reorder operands arg0 and arg1,
6568 such that the evaluation of arg1 occurs before arg0. */
6570 static bool
6571 reorder_operands_p (const_tree arg0, const_tree arg1)
6573 if (! flag_evaluation_order)
6574 return true;
6575 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6576 return true;
6577 return ! TREE_SIDE_EFFECTS (arg0)
6578 && ! TREE_SIDE_EFFECTS (arg1);
6581 /* Test whether it is preferable two swap two operands, ARG0 and
6582 ARG1, for example because ARG0 is an integer constant and ARG1
6583 isn't. If REORDER is true, only recommend swapping if we can
6584 evaluate the operands in reverse order. */
6586 bool
6587 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6589 STRIP_SIGN_NOPS (arg0);
6590 STRIP_SIGN_NOPS (arg1);
6592 if (TREE_CODE (arg1) == INTEGER_CST)
6593 return 0;
6594 if (TREE_CODE (arg0) == INTEGER_CST)
6595 return 1;
6597 if (TREE_CODE (arg1) == REAL_CST)
6598 return 0;
6599 if (TREE_CODE (arg0) == REAL_CST)
6600 return 1;
6602 if (TREE_CODE (arg1) == FIXED_CST)
6603 return 0;
6604 if (TREE_CODE (arg0) == FIXED_CST)
6605 return 1;
6607 if (TREE_CODE (arg1) == COMPLEX_CST)
6608 return 0;
6609 if (TREE_CODE (arg0) == COMPLEX_CST)
6610 return 1;
6612 if (TREE_CONSTANT (arg1))
6613 return 0;
6614 if (TREE_CONSTANT (arg0))
6615 return 1;
6617 if (optimize_function_for_size_p (cfun))
6618 return 0;
6620 if (reorder && flag_evaluation_order
6621 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6622 return 0;
6624 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6625 for commutative and comparison operators. Ensuring a canonical
6626 form allows the optimizers to find additional redundancies without
6627 having to explicitly check for both orderings. */
6628 if (TREE_CODE (arg0) == SSA_NAME
6629 && TREE_CODE (arg1) == SSA_NAME
6630 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6631 return 1;
6633 /* Put SSA_NAMEs last. */
6634 if (TREE_CODE (arg1) == SSA_NAME)
6635 return 0;
6636 if (TREE_CODE (arg0) == SSA_NAME)
6637 return 1;
6639 /* Put variables last. */
6640 if (DECL_P (arg1))
6641 return 0;
6642 if (DECL_P (arg0))
6643 return 1;
6645 return 0;
6648 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6649 ARG0 is extended to a wider type. */
6651 static tree
6652 fold_widened_comparison (location_t loc, enum tree_code code,
6653 tree type, tree arg0, tree arg1)
6655 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6656 tree arg1_unw;
6657 tree shorter_type, outer_type;
6658 tree min, max;
6659 bool above, below;
6661 if (arg0_unw == arg0)
6662 return NULL_TREE;
6663 shorter_type = TREE_TYPE (arg0_unw);
6665 #ifdef HAVE_canonicalize_funcptr_for_compare
6666 /* Disable this optimization if we're casting a function pointer
6667 type on targets that require function pointer canonicalization. */
6668 if (HAVE_canonicalize_funcptr_for_compare
6669 && TREE_CODE (shorter_type) == POINTER_TYPE
6670 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6671 return NULL_TREE;
6672 #endif
6674 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6675 return NULL_TREE;
6677 arg1_unw = get_unwidened (arg1, NULL_TREE);
6679 /* If possible, express the comparison in the shorter mode. */
6680 if ((code == EQ_EXPR || code == NE_EXPR
6681 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6682 && (TREE_TYPE (arg1_unw) == shorter_type
6683 || ((TYPE_PRECISION (shorter_type)
6684 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6685 && (TYPE_UNSIGNED (shorter_type)
6686 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6687 || (TREE_CODE (arg1_unw) == INTEGER_CST
6688 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6689 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6690 && int_fits_type_p (arg1_unw, shorter_type))))
6691 return fold_build2_loc (loc, code, type, arg0_unw,
6692 fold_convert_loc (loc, shorter_type, arg1_unw));
6694 if (TREE_CODE (arg1_unw) != INTEGER_CST
6695 || TREE_CODE (shorter_type) != INTEGER_TYPE
6696 || !int_fits_type_p (arg1_unw, shorter_type))
6697 return NULL_TREE;
6699 /* If we are comparing with the integer that does not fit into the range
6700 of the shorter type, the result is known. */
6701 outer_type = TREE_TYPE (arg1_unw);
6702 min = lower_bound_in_type (outer_type, shorter_type);
6703 max = upper_bound_in_type (outer_type, shorter_type);
6705 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6706 max, arg1_unw));
6707 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6708 arg1_unw, min));
6710 switch (code)
6712 case EQ_EXPR:
6713 if (above || below)
6714 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6715 break;
6717 case NE_EXPR:
6718 if (above || below)
6719 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6720 break;
6722 case LT_EXPR:
6723 case LE_EXPR:
6724 if (above)
6725 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6726 else if (below)
6727 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6729 case GT_EXPR:
6730 case GE_EXPR:
6731 if (above)
6732 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6733 else if (below)
6734 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6736 default:
6737 break;
6740 return NULL_TREE;
6743 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6744 ARG0 just the signedness is changed. */
6746 static tree
6747 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6748 tree arg0, tree arg1)
6750 tree arg0_inner;
6751 tree inner_type, outer_type;
6753 if (!CONVERT_EXPR_P (arg0))
6754 return NULL_TREE;
6756 outer_type = TREE_TYPE (arg0);
6757 arg0_inner = TREE_OPERAND (arg0, 0);
6758 inner_type = TREE_TYPE (arg0_inner);
6760 #ifdef HAVE_canonicalize_funcptr_for_compare
6761 /* Disable this optimization if we're casting a function pointer
6762 type on targets that require function pointer canonicalization. */
6763 if (HAVE_canonicalize_funcptr_for_compare
6764 && TREE_CODE (inner_type) == POINTER_TYPE
6765 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6766 return NULL_TREE;
6767 #endif
6769 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6770 return NULL_TREE;
6772 if (TREE_CODE (arg1) != INTEGER_CST
6773 && !(CONVERT_EXPR_P (arg1)
6774 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6775 return NULL_TREE;
6777 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6778 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6779 && code != NE_EXPR
6780 && code != EQ_EXPR)
6781 return NULL_TREE;
6783 if (TREE_CODE (arg1) == INTEGER_CST)
6784 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6785 TREE_INT_CST_HIGH (arg1), 0,
6786 TREE_OVERFLOW (arg1));
6787 else
6788 arg1 = fold_convert_loc (loc, inner_type, arg1);
6790 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6793 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6794 step of the array. Reconstructs s and delta in the case of s *
6795 delta being an integer constant (and thus already folded). ADDR is
6796 the address. MULT is the multiplicative expression. If the
6797 function succeeds, the new address expression is returned.
6798 Otherwise NULL_TREE is returned. LOC is the location of the
6799 resulting expression. */
6801 static tree
6802 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6804 tree s, delta, step;
6805 tree ref = TREE_OPERAND (addr, 0), pref;
6806 tree ret, pos;
6807 tree itype;
6808 bool mdim = false;
6810 /* Strip the nops that might be added when converting op1 to sizetype. */
6811 STRIP_NOPS (op1);
6813 /* Canonicalize op1 into a possibly non-constant delta
6814 and an INTEGER_CST s. */
6815 if (TREE_CODE (op1) == MULT_EXPR)
6817 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6819 STRIP_NOPS (arg0);
6820 STRIP_NOPS (arg1);
6822 if (TREE_CODE (arg0) == INTEGER_CST)
6824 s = arg0;
6825 delta = arg1;
6827 else if (TREE_CODE (arg1) == INTEGER_CST)
6829 s = arg1;
6830 delta = arg0;
6832 else
6833 return NULL_TREE;
6835 else if (TREE_CODE (op1) == INTEGER_CST)
6837 delta = op1;
6838 s = NULL_TREE;
6840 else
6842 /* Simulate we are delta * 1. */
6843 delta = op1;
6844 s = integer_one_node;
6847 for (;; ref = TREE_OPERAND (ref, 0))
6849 if (TREE_CODE (ref) == ARRAY_REF)
6851 tree domain;
6853 /* Remember if this was a multi-dimensional array. */
6854 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6855 mdim = true;
6857 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6858 if (! domain)
6859 continue;
6860 itype = TREE_TYPE (domain);
6862 step = array_ref_element_size (ref);
6863 if (TREE_CODE (step) != INTEGER_CST)
6864 continue;
6866 if (s)
6868 if (! tree_int_cst_equal (step, s))
6869 continue;
6871 else
6873 /* Try if delta is a multiple of step. */
6874 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6875 if (! tmp)
6876 continue;
6877 delta = tmp;
6880 /* Only fold here if we can verify we do not overflow one
6881 dimension of a multi-dimensional array. */
6882 if (mdim)
6884 tree tmp;
6886 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6887 || !TYPE_MAX_VALUE (domain)
6888 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6889 continue;
6891 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6892 fold_convert_loc (loc, itype,
6893 TREE_OPERAND (ref, 1)),
6894 fold_convert_loc (loc, itype, delta));
6895 if (!tmp
6896 || TREE_CODE (tmp) != INTEGER_CST
6897 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6898 continue;
6901 break;
6903 else
6904 mdim = false;
6906 if (!handled_component_p (ref))
6907 return NULL_TREE;
6910 /* We found the suitable array reference. So copy everything up to it,
6911 and replace the index. */
6913 pref = TREE_OPERAND (addr, 0);
6914 ret = copy_node (pref);
6915 SET_EXPR_LOCATION (ret, loc);
6916 pos = ret;
6918 while (pref != ref)
6920 pref = TREE_OPERAND (pref, 0);
6921 TREE_OPERAND (pos, 0) = copy_node (pref);
6922 pos = TREE_OPERAND (pos, 0);
6925 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
6926 fold_convert_loc (loc, itype,
6927 TREE_OPERAND (pos, 1)),
6928 fold_convert_loc (loc, itype, delta));
6930 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6934 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6935 means A >= Y && A != MAX, but in this case we know that
6936 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6938 static tree
6939 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6941 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6943 if (TREE_CODE (bound) == LT_EXPR)
6944 a = TREE_OPERAND (bound, 0);
6945 else if (TREE_CODE (bound) == GT_EXPR)
6946 a = TREE_OPERAND (bound, 1);
6947 else
6948 return NULL_TREE;
6950 typea = TREE_TYPE (a);
6951 if (!INTEGRAL_TYPE_P (typea)
6952 && !POINTER_TYPE_P (typea))
6953 return NULL_TREE;
6955 if (TREE_CODE (ineq) == LT_EXPR)
6957 a1 = TREE_OPERAND (ineq, 1);
6958 y = TREE_OPERAND (ineq, 0);
6960 else if (TREE_CODE (ineq) == GT_EXPR)
6962 a1 = TREE_OPERAND (ineq, 0);
6963 y = TREE_OPERAND (ineq, 1);
6965 else
6966 return NULL_TREE;
6968 if (TREE_TYPE (a1) != typea)
6969 return NULL_TREE;
6971 if (POINTER_TYPE_P (typea))
6973 /* Convert the pointer types into integer before taking the difference. */
6974 tree ta = fold_convert_loc (loc, ssizetype, a);
6975 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6976 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6978 else
6979 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6981 if (!diff || !integer_onep (diff))
6982 return NULL_TREE;
6984 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6987 /* Fold a sum or difference of at least one multiplication.
6988 Returns the folded tree or NULL if no simplification could be made. */
6990 static tree
6991 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6992 tree arg0, tree arg1)
6994 tree arg00, arg01, arg10, arg11;
6995 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6997 /* (A * C) +- (B * C) -> (A+-B) * C.
6998 (A * C) +- A -> A * (C+-1).
6999 We are most concerned about the case where C is a constant,
7000 but other combinations show up during loop reduction. Since
7001 it is not difficult, try all four possibilities. */
7003 if (TREE_CODE (arg0) == MULT_EXPR)
7005 arg00 = TREE_OPERAND (arg0, 0);
7006 arg01 = TREE_OPERAND (arg0, 1);
7008 else if (TREE_CODE (arg0) == INTEGER_CST)
7010 arg00 = build_one_cst (type);
7011 arg01 = arg0;
7013 else
7015 /* We cannot generate constant 1 for fract. */
7016 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7017 return NULL_TREE;
7018 arg00 = arg0;
7019 arg01 = build_one_cst (type);
7021 if (TREE_CODE (arg1) == MULT_EXPR)
7023 arg10 = TREE_OPERAND (arg1, 0);
7024 arg11 = TREE_OPERAND (arg1, 1);
7026 else if (TREE_CODE (arg1) == INTEGER_CST)
7028 arg10 = build_one_cst (type);
7029 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7030 the purpose of this canonicalization. */
7031 if (TREE_INT_CST_HIGH (arg1) == -1
7032 && negate_expr_p (arg1)
7033 && code == PLUS_EXPR)
7035 arg11 = negate_expr (arg1);
7036 code = MINUS_EXPR;
7038 else
7039 arg11 = arg1;
7041 else
7043 /* We cannot generate constant 1 for fract. */
7044 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7045 return NULL_TREE;
7046 arg10 = arg1;
7047 arg11 = build_one_cst (type);
7049 same = NULL_TREE;
7051 if (operand_equal_p (arg01, arg11, 0))
7052 same = arg01, alt0 = arg00, alt1 = arg10;
7053 else if (operand_equal_p (arg00, arg10, 0))
7054 same = arg00, alt0 = arg01, alt1 = arg11;
7055 else if (operand_equal_p (arg00, arg11, 0))
7056 same = arg00, alt0 = arg01, alt1 = arg10;
7057 else if (operand_equal_p (arg01, arg10, 0))
7058 same = arg01, alt0 = arg00, alt1 = arg11;
7060 /* No identical multiplicands; see if we can find a common
7061 power-of-two factor in non-power-of-two multiplies. This
7062 can help in multi-dimensional array access. */
7063 else if (host_integerp (arg01, 0)
7064 && host_integerp (arg11, 0))
7066 HOST_WIDE_INT int01, int11, tmp;
7067 bool swap = false;
7068 tree maybe_same;
7069 int01 = TREE_INT_CST_LOW (arg01);
7070 int11 = TREE_INT_CST_LOW (arg11);
7072 /* Move min of absolute values to int11. */
7073 if ((int01 >= 0 ? int01 : -int01)
7074 < (int11 >= 0 ? int11 : -int11))
7076 tmp = int01, int01 = int11, int11 = tmp;
7077 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7078 maybe_same = arg01;
7079 swap = true;
7081 else
7082 maybe_same = arg11;
7084 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7085 /* The remainder should not be a constant, otherwise we
7086 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7087 increased the number of multiplications necessary. */
7088 && TREE_CODE (arg10) != INTEGER_CST)
7090 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7091 build_int_cst (TREE_TYPE (arg00),
7092 int01 / int11));
7093 alt1 = arg10;
7094 same = maybe_same;
7095 if (swap)
7096 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7100 if (same)
7101 return fold_build2_loc (loc, MULT_EXPR, type,
7102 fold_build2_loc (loc, code, type,
7103 fold_convert_loc (loc, type, alt0),
7104 fold_convert_loc (loc, type, alt1)),
7105 fold_convert_loc (loc, type, same));
7107 return NULL_TREE;
7110 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7111 specified by EXPR into the buffer PTR of length LEN bytes.
7112 Return the number of bytes placed in the buffer, or zero
7113 upon failure. */
7115 static int
7116 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7118 tree type = TREE_TYPE (expr);
7119 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7120 int byte, offset, word, words;
7121 unsigned char value;
7123 if (total_bytes > len)
7124 return 0;
7125 words = total_bytes / UNITS_PER_WORD;
7127 for (byte = 0; byte < total_bytes; byte++)
7129 int bitpos = byte * BITS_PER_UNIT;
7130 if (bitpos < HOST_BITS_PER_WIDE_INT)
7131 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7132 else
7133 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7134 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7136 if (total_bytes > UNITS_PER_WORD)
7138 word = byte / UNITS_PER_WORD;
7139 if (WORDS_BIG_ENDIAN)
7140 word = (words - 1) - word;
7141 offset = word * UNITS_PER_WORD;
7142 if (BYTES_BIG_ENDIAN)
7143 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7144 else
7145 offset += byte % UNITS_PER_WORD;
7147 else
7148 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7149 ptr[offset] = value;
7151 return total_bytes;
7155 /* Subroutine of native_encode_expr. Encode the REAL_CST
7156 specified by EXPR into the buffer PTR of length LEN bytes.
7157 Return the number of bytes placed in the buffer, or zero
7158 upon failure. */
7160 static int
7161 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7163 tree type = TREE_TYPE (expr);
7164 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7165 int byte, offset, word, words, bitpos;
7166 unsigned char value;
7168 /* There are always 32 bits in each long, no matter the size of
7169 the hosts long. We handle floating point representations with
7170 up to 192 bits. */
7171 long tmp[6];
7173 if (total_bytes > len)
7174 return 0;
7175 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7177 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7179 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7180 bitpos += BITS_PER_UNIT)
7182 byte = (bitpos / BITS_PER_UNIT) & 3;
7183 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7185 if (UNITS_PER_WORD < 4)
7187 word = byte / UNITS_PER_WORD;
7188 if (WORDS_BIG_ENDIAN)
7189 word = (words - 1) - word;
7190 offset = word * UNITS_PER_WORD;
7191 if (BYTES_BIG_ENDIAN)
7192 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7193 else
7194 offset += byte % UNITS_PER_WORD;
7196 else
7197 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7198 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7200 return total_bytes;
7203 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7204 specified by EXPR into the buffer PTR of length LEN bytes.
7205 Return the number of bytes placed in the buffer, or zero
7206 upon failure. */
7208 static int
7209 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7211 int rsize, isize;
7212 tree part;
7214 part = TREE_REALPART (expr);
7215 rsize = native_encode_expr (part, ptr, len);
7216 if (rsize == 0)
7217 return 0;
7218 part = TREE_IMAGPART (expr);
7219 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7220 if (isize != rsize)
7221 return 0;
7222 return rsize + isize;
7226 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7227 specified by EXPR into the buffer PTR of length LEN bytes.
7228 Return the number of bytes placed in the buffer, or zero
7229 upon failure. */
7231 static int
7232 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7234 int i, size, offset, count;
7235 tree itype, elem, elements;
7237 offset = 0;
7238 elements = TREE_VECTOR_CST_ELTS (expr);
7239 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7240 itype = TREE_TYPE (TREE_TYPE (expr));
7241 size = GET_MODE_SIZE (TYPE_MODE (itype));
7242 for (i = 0; i < count; i++)
7244 if (elements)
7246 elem = TREE_VALUE (elements);
7247 elements = TREE_CHAIN (elements);
7249 else
7250 elem = NULL_TREE;
7252 if (elem)
7254 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7255 return 0;
7257 else
7259 if (offset + size > len)
7260 return 0;
7261 memset (ptr+offset, 0, size);
7263 offset += size;
7265 return offset;
7269 /* Subroutine of native_encode_expr. Encode the STRING_CST
7270 specified by EXPR into the buffer PTR of length LEN bytes.
7271 Return the number of bytes placed in the buffer, or zero
7272 upon failure. */
7274 static int
7275 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7277 tree type = TREE_TYPE (expr);
7278 HOST_WIDE_INT total_bytes;
7280 if (TREE_CODE (type) != ARRAY_TYPE
7281 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7282 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7283 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7284 return 0;
7285 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7286 if (total_bytes > len)
7287 return 0;
7288 if (TREE_STRING_LENGTH (expr) < total_bytes)
7290 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7291 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7292 total_bytes - TREE_STRING_LENGTH (expr));
7294 else
7295 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7296 return total_bytes;
7300 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7301 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7302 buffer PTR of length LEN bytes. Return the number of bytes
7303 placed in the buffer, or zero upon failure. */
7306 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7308 switch (TREE_CODE (expr))
7310 case INTEGER_CST:
7311 return native_encode_int (expr, ptr, len);
7313 case REAL_CST:
7314 return native_encode_real (expr, ptr, len);
7316 case COMPLEX_CST:
7317 return native_encode_complex (expr, ptr, len);
7319 case VECTOR_CST:
7320 return native_encode_vector (expr, ptr, len);
7322 case STRING_CST:
7323 return native_encode_string (expr, ptr, len);
7325 default:
7326 return 0;
7331 /* Subroutine of native_interpret_expr. Interpret the contents of
7332 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7333 If the buffer cannot be interpreted, return NULL_TREE. */
7335 static tree
7336 native_interpret_int (tree type, const unsigned char *ptr, int len)
7338 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7339 int byte, offset, word, words;
7340 unsigned char value;
7341 unsigned int HOST_WIDE_INT lo = 0;
7342 HOST_WIDE_INT hi = 0;
7344 if (total_bytes > len)
7345 return NULL_TREE;
7346 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7347 return NULL_TREE;
7348 words = total_bytes / UNITS_PER_WORD;
7350 for (byte = 0; byte < total_bytes; byte++)
7352 int bitpos = byte * BITS_PER_UNIT;
7353 if (total_bytes > UNITS_PER_WORD)
7355 word = byte / UNITS_PER_WORD;
7356 if (WORDS_BIG_ENDIAN)
7357 word = (words - 1) - word;
7358 offset = word * UNITS_PER_WORD;
7359 if (BYTES_BIG_ENDIAN)
7360 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7361 else
7362 offset += byte % UNITS_PER_WORD;
7364 else
7365 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7366 value = ptr[offset];
7368 if (bitpos < HOST_BITS_PER_WIDE_INT)
7369 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7370 else
7371 hi |= (unsigned HOST_WIDE_INT) value
7372 << (bitpos - HOST_BITS_PER_WIDE_INT);
7375 return build_int_cst_wide_type (type, lo, hi);
7379 /* Subroutine of native_interpret_expr. Interpret the contents of
7380 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7381 If the buffer cannot be interpreted, return NULL_TREE. */
7383 static tree
7384 native_interpret_real (tree type, const unsigned char *ptr, int len)
7386 enum machine_mode mode = TYPE_MODE (type);
7387 int total_bytes = GET_MODE_SIZE (mode);
7388 int byte, offset, word, words, bitpos;
7389 unsigned char value;
7390 /* There are always 32 bits in each long, no matter the size of
7391 the hosts long. We handle floating point representations with
7392 up to 192 bits. */
7393 REAL_VALUE_TYPE r;
7394 long tmp[6];
7396 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7397 if (total_bytes > len || total_bytes > 24)
7398 return NULL_TREE;
7399 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7401 memset (tmp, 0, sizeof (tmp));
7402 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7403 bitpos += BITS_PER_UNIT)
7405 byte = (bitpos / BITS_PER_UNIT) & 3;
7406 if (UNITS_PER_WORD < 4)
7408 word = byte / UNITS_PER_WORD;
7409 if (WORDS_BIG_ENDIAN)
7410 word = (words - 1) - word;
7411 offset = word * UNITS_PER_WORD;
7412 if (BYTES_BIG_ENDIAN)
7413 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7414 else
7415 offset += byte % UNITS_PER_WORD;
7417 else
7418 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7419 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7421 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7424 real_from_target (&r, tmp, mode);
7425 return build_real (type, r);
7429 /* Subroutine of native_interpret_expr. Interpret the contents of
7430 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7431 If the buffer cannot be interpreted, return NULL_TREE. */
7433 static tree
7434 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7436 tree etype, rpart, ipart;
7437 int size;
7439 etype = TREE_TYPE (type);
7440 size = GET_MODE_SIZE (TYPE_MODE (etype));
7441 if (size * 2 > len)
7442 return NULL_TREE;
7443 rpart = native_interpret_expr (etype, ptr, size);
7444 if (!rpart)
7445 return NULL_TREE;
7446 ipart = native_interpret_expr (etype, ptr+size, size);
7447 if (!ipart)
7448 return NULL_TREE;
7449 return build_complex (type, rpart, ipart);
7453 /* Subroutine of native_interpret_expr. Interpret the contents of
7454 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7455 If the buffer cannot be interpreted, return NULL_TREE. */
7457 static tree
7458 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7460 tree etype, elem, elements;
7461 int i, size, count;
7463 etype = TREE_TYPE (type);
7464 size = GET_MODE_SIZE (TYPE_MODE (etype));
7465 count = TYPE_VECTOR_SUBPARTS (type);
7466 if (size * count > len)
7467 return NULL_TREE;
7469 elements = NULL_TREE;
7470 for (i = count - 1; i >= 0; i--)
7472 elem = native_interpret_expr (etype, ptr+(i*size), size);
7473 if (!elem)
7474 return NULL_TREE;
7475 elements = tree_cons (NULL_TREE, elem, elements);
7477 return build_vector (type, elements);
7481 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7482 the buffer PTR of length LEN as a constant of type TYPE. For
7483 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7484 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7485 return NULL_TREE. */
7487 tree
7488 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7490 switch (TREE_CODE (type))
7492 case INTEGER_TYPE:
7493 case ENUMERAL_TYPE:
7494 case BOOLEAN_TYPE:
7495 return native_interpret_int (type, ptr, len);
7497 case REAL_TYPE:
7498 return native_interpret_real (type, ptr, len);
7500 case COMPLEX_TYPE:
7501 return native_interpret_complex (type, ptr, len);
7503 case VECTOR_TYPE:
7504 return native_interpret_vector (type, ptr, len);
7506 default:
7507 return NULL_TREE;
7512 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7513 TYPE at compile-time. If we're unable to perform the conversion
7514 return NULL_TREE. */
7516 static tree
7517 fold_view_convert_expr (tree type, tree expr)
7519 /* We support up to 512-bit values (for V8DFmode). */
7520 unsigned char buffer[64];
7521 int len;
7523 /* Check that the host and target are sane. */
7524 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7525 return NULL_TREE;
7527 len = native_encode_expr (expr, buffer, sizeof (buffer));
7528 if (len == 0)
7529 return NULL_TREE;
7531 return native_interpret_expr (type, buffer, len);
7534 /* Build an expression for the address of T. Folds away INDIRECT_REF
7535 to avoid confusing the gimplify process. */
7537 tree
7538 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7540 /* The size of the object is not relevant when talking about its address. */
7541 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7542 t = TREE_OPERAND (t, 0);
7544 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7545 if (TREE_CODE (t) == INDIRECT_REF
7546 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7548 t = TREE_OPERAND (t, 0);
7550 if (TREE_TYPE (t) != ptrtype)
7552 t = build1 (NOP_EXPR, ptrtype, t);
7553 SET_EXPR_LOCATION (t, loc);
7556 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7558 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7560 if (TREE_TYPE (t) != ptrtype)
7561 t = fold_convert_loc (loc, ptrtype, t);
7563 else
7565 t = build1 (ADDR_EXPR, ptrtype, t);
7566 SET_EXPR_LOCATION (t, loc);
7569 return t;
7572 /* Build an expression for the address of T. */
7574 tree
7575 build_fold_addr_expr_loc (location_t loc, tree t)
7577 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7579 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7582 /* Fold a unary expression of code CODE and type TYPE with operand
7583 OP0. Return the folded expression if folding is successful.
7584 Otherwise, return NULL_TREE. */
7586 tree
7587 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7589 tree tem;
7590 tree arg0;
7591 enum tree_code_class kind = TREE_CODE_CLASS (code);
7593 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7594 && TREE_CODE_LENGTH (code) == 1);
7596 arg0 = op0;
7597 if (arg0)
7599 if (CONVERT_EXPR_CODE_P (code)
7600 || code == FLOAT_EXPR || code == ABS_EXPR)
7602 /* Don't use STRIP_NOPS, because signedness of argument type
7603 matters. */
7604 STRIP_SIGN_NOPS (arg0);
7606 else
7608 /* Strip any conversions that don't change the mode. This
7609 is safe for every expression, except for a comparison
7610 expression because its signedness is derived from its
7611 operands.
7613 Note that this is done as an internal manipulation within
7614 the constant folder, in order to find the simplest
7615 representation of the arguments so that their form can be
7616 studied. In any cases, the appropriate type conversions
7617 should be put back in the tree that will get out of the
7618 constant folder. */
7619 STRIP_NOPS (arg0);
7623 if (TREE_CODE_CLASS (code) == tcc_unary)
7625 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7626 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7627 fold_build1_loc (loc, code, type,
7628 fold_convert_loc (loc, TREE_TYPE (op0),
7629 TREE_OPERAND (arg0, 1))));
7630 else if (TREE_CODE (arg0) == COND_EXPR)
7632 tree arg01 = TREE_OPERAND (arg0, 1);
7633 tree arg02 = TREE_OPERAND (arg0, 2);
7634 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7635 arg01 = fold_build1_loc (loc, code, type,
7636 fold_convert_loc (loc,
7637 TREE_TYPE (op0), arg01));
7638 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7639 arg02 = fold_build1_loc (loc, code, type,
7640 fold_convert_loc (loc,
7641 TREE_TYPE (op0), arg02));
7642 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7643 arg01, arg02);
7645 /* If this was a conversion, and all we did was to move into
7646 inside the COND_EXPR, bring it back out. But leave it if
7647 it is a conversion from integer to integer and the
7648 result precision is no wider than a word since such a
7649 conversion is cheap and may be optimized away by combine,
7650 while it couldn't if it were outside the COND_EXPR. Then return
7651 so we don't get into an infinite recursion loop taking the
7652 conversion out and then back in. */
7654 if ((CONVERT_EXPR_CODE_P (code)
7655 || code == NON_LVALUE_EXPR)
7656 && TREE_CODE (tem) == COND_EXPR
7657 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7658 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7659 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7660 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7661 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7662 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7663 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7664 && (INTEGRAL_TYPE_P
7665 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7666 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7667 || flag_syntax_only))
7669 tem = build1 (code, type,
7670 build3 (COND_EXPR,
7671 TREE_TYPE (TREE_OPERAND
7672 (TREE_OPERAND (tem, 1), 0)),
7673 TREE_OPERAND (tem, 0),
7674 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7675 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7676 SET_EXPR_LOCATION (tem, loc);
7678 return tem;
7680 else if (COMPARISON_CLASS_P (arg0))
7682 if (TREE_CODE (type) == BOOLEAN_TYPE)
7684 arg0 = copy_node (arg0);
7685 TREE_TYPE (arg0) = type;
7686 return arg0;
7688 else if (TREE_CODE (type) != INTEGER_TYPE)
7689 return fold_build3_loc (loc, COND_EXPR, type, arg0,
7690 fold_build1_loc (loc, code, type,
7691 integer_one_node),
7692 fold_build1_loc (loc, code, type,
7693 integer_zero_node));
7697 switch (code)
7699 case PAREN_EXPR:
7700 /* Re-association barriers around constants and other re-association
7701 barriers can be removed. */
7702 if (CONSTANT_CLASS_P (op0)
7703 || TREE_CODE (op0) == PAREN_EXPR)
7704 return fold_convert_loc (loc, type, op0);
7705 return NULL_TREE;
7707 CASE_CONVERT:
7708 case FLOAT_EXPR:
7709 case FIX_TRUNC_EXPR:
7710 if (TREE_TYPE (op0) == type)
7711 return op0;
7713 /* If we have (type) (a CMP b) and type is an integral type, return
7714 new expression involving the new type. */
7715 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7716 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7717 TREE_OPERAND (op0, 1));
7719 /* Handle cases of two conversions in a row. */
7720 if (CONVERT_EXPR_P (op0))
7722 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7723 tree inter_type = TREE_TYPE (op0);
7724 int inside_int = INTEGRAL_TYPE_P (inside_type);
7725 int inside_ptr = POINTER_TYPE_P (inside_type);
7726 int inside_float = FLOAT_TYPE_P (inside_type);
7727 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7728 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7729 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7730 int inter_int = INTEGRAL_TYPE_P (inter_type);
7731 int inter_ptr = POINTER_TYPE_P (inter_type);
7732 int inter_float = FLOAT_TYPE_P (inter_type);
7733 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7734 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7735 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7736 int final_int = INTEGRAL_TYPE_P (type);
7737 int final_ptr = POINTER_TYPE_P (type);
7738 int final_float = FLOAT_TYPE_P (type);
7739 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7740 unsigned int final_prec = TYPE_PRECISION (type);
7741 int final_unsignedp = TYPE_UNSIGNED (type);
7743 /* In addition to the cases of two conversions in a row
7744 handled below, if we are converting something to its own
7745 type via an object of identical or wider precision, neither
7746 conversion is needed. */
7747 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7748 && (((inter_int || inter_ptr) && final_int)
7749 || (inter_float && final_float))
7750 && inter_prec >= final_prec)
7751 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7753 /* Likewise, if the intermediate and initial types are either both
7754 float or both integer, we don't need the middle conversion if the
7755 former is wider than the latter and doesn't change the signedness
7756 (for integers). Avoid this if the final type is a pointer since
7757 then we sometimes need the middle conversion. Likewise if the
7758 final type has a precision not equal to the size of its mode. */
7759 if (((inter_int && inside_int)
7760 || (inter_float && inside_float)
7761 || (inter_vec && inside_vec))
7762 && inter_prec >= inside_prec
7763 && (inter_float || inter_vec
7764 || inter_unsignedp == inside_unsignedp)
7765 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7766 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7767 && ! final_ptr
7768 && (! final_vec || inter_prec == inside_prec))
7769 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7771 /* If we have a sign-extension of a zero-extended value, we can
7772 replace that by a single zero-extension. */
7773 if (inside_int && inter_int && final_int
7774 && inside_prec < inter_prec && inter_prec < final_prec
7775 && inside_unsignedp && !inter_unsignedp)
7776 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7778 /* Two conversions in a row are not needed unless:
7779 - some conversion is floating-point (overstrict for now), or
7780 - some conversion is a vector (overstrict for now), or
7781 - the intermediate type is narrower than both initial and
7782 final, or
7783 - the intermediate type and innermost type differ in signedness,
7784 and the outermost type is wider than the intermediate, or
7785 - the initial type is a pointer type and the precisions of the
7786 intermediate and final types differ, or
7787 - the final type is a pointer type and the precisions of the
7788 initial and intermediate types differ. */
7789 if (! inside_float && ! inter_float && ! final_float
7790 && ! inside_vec && ! inter_vec && ! final_vec
7791 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7792 && ! (inside_int && inter_int
7793 && inter_unsignedp != inside_unsignedp
7794 && inter_prec < final_prec)
7795 && ((inter_unsignedp && inter_prec > inside_prec)
7796 == (final_unsignedp && final_prec > inter_prec))
7797 && ! (inside_ptr && inter_prec != final_prec)
7798 && ! (final_ptr && inside_prec != inter_prec)
7799 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7800 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7801 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7804 /* Handle (T *)&A.B.C for A being of type T and B and C
7805 living at offset zero. This occurs frequently in
7806 C++ upcasting and then accessing the base. */
7807 if (TREE_CODE (op0) == ADDR_EXPR
7808 && POINTER_TYPE_P (type)
7809 && handled_component_p (TREE_OPERAND (op0, 0)))
7811 HOST_WIDE_INT bitsize, bitpos;
7812 tree offset;
7813 enum machine_mode mode;
7814 int unsignedp, volatilep;
7815 tree base = TREE_OPERAND (op0, 0);
7816 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7817 &mode, &unsignedp, &volatilep, false);
7818 /* If the reference was to a (constant) zero offset, we can use
7819 the address of the base if it has the same base type
7820 as the result type and the pointer type is unqualified. */
7821 if (! offset && bitpos == 0
7822 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7823 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7824 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7825 return fold_convert_loc (loc, type,
7826 build_fold_addr_expr_loc (loc, base));
7829 if (TREE_CODE (op0) == MODIFY_EXPR
7830 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7831 /* Detect assigning a bitfield. */
7832 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7833 && DECL_BIT_FIELD
7834 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7836 /* Don't leave an assignment inside a conversion
7837 unless assigning a bitfield. */
7838 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7839 /* First do the assignment, then return converted constant. */
7840 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7841 TREE_NO_WARNING (tem) = 1;
7842 TREE_USED (tem) = 1;
7843 SET_EXPR_LOCATION (tem, loc);
7844 return tem;
7847 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7848 constants (if x has signed type, the sign bit cannot be set
7849 in c). This folds extension into the BIT_AND_EXPR.
7850 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7851 very likely don't have maximal range for their precision and this
7852 transformation effectively doesn't preserve non-maximal ranges. */
7853 if (TREE_CODE (type) == INTEGER_TYPE
7854 && TREE_CODE (op0) == BIT_AND_EXPR
7855 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7857 tree and_expr = op0;
7858 tree and0 = TREE_OPERAND (and_expr, 0);
7859 tree and1 = TREE_OPERAND (and_expr, 1);
7860 int change = 0;
7862 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7863 || (TYPE_PRECISION (type)
7864 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7865 change = 1;
7866 else if (TYPE_PRECISION (TREE_TYPE (and1))
7867 <= HOST_BITS_PER_WIDE_INT
7868 && host_integerp (and1, 1))
7870 unsigned HOST_WIDE_INT cst;
7872 cst = tree_low_cst (and1, 1);
7873 cst &= (HOST_WIDE_INT) -1
7874 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7875 change = (cst == 0);
7876 #ifdef LOAD_EXTEND_OP
7877 if (change
7878 && !flag_syntax_only
7879 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7880 == ZERO_EXTEND))
7882 tree uns = unsigned_type_for (TREE_TYPE (and0));
7883 and0 = fold_convert_loc (loc, uns, and0);
7884 and1 = fold_convert_loc (loc, uns, and1);
7886 #endif
7888 if (change)
7890 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7891 TREE_INT_CST_HIGH (and1), 0,
7892 TREE_OVERFLOW (and1));
7893 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7894 fold_convert_loc (loc, type, and0), tem);
7898 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7899 when one of the new casts will fold away. Conservatively we assume
7900 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7901 if (POINTER_TYPE_P (type)
7902 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7903 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7904 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7905 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7907 tree arg00 = TREE_OPERAND (arg0, 0);
7908 tree arg01 = TREE_OPERAND (arg0, 1);
7910 return fold_build2_loc (loc,
7911 TREE_CODE (arg0), type,
7912 fold_convert_loc (loc, type, arg00),
7913 fold_convert_loc (loc, sizetype, arg01));
7916 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7917 of the same precision, and X is an integer type not narrower than
7918 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7919 if (INTEGRAL_TYPE_P (type)
7920 && TREE_CODE (op0) == BIT_NOT_EXPR
7921 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7922 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7923 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7925 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7926 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7927 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7928 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7929 fold_convert_loc (loc, type, tem));
7932 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7933 type of X and Y (integer types only). */
7934 if (INTEGRAL_TYPE_P (type)
7935 && TREE_CODE (op0) == MULT_EXPR
7936 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7937 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7939 /* Be careful not to introduce new overflows. */
7940 tree mult_type;
7941 if (TYPE_OVERFLOW_WRAPS (type))
7942 mult_type = type;
7943 else
7944 mult_type = unsigned_type_for (type);
7946 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7948 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7949 fold_convert_loc (loc, mult_type,
7950 TREE_OPERAND (op0, 0)),
7951 fold_convert_loc (loc, mult_type,
7952 TREE_OPERAND (op0, 1)));
7953 return fold_convert_loc (loc, type, tem);
7957 tem = fold_convert_const (code, type, op0);
7958 return tem ? tem : NULL_TREE;
7960 case ADDR_SPACE_CONVERT_EXPR:
7961 if (integer_zerop (arg0))
7962 return fold_convert_const (code, type, arg0);
7963 return NULL_TREE;
7965 case FIXED_CONVERT_EXPR:
7966 tem = fold_convert_const (code, type, arg0);
7967 return tem ? tem : NULL_TREE;
7969 case VIEW_CONVERT_EXPR:
7970 if (TREE_TYPE (op0) == type)
7971 return op0;
7972 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7973 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7974 type, TREE_OPERAND (op0, 0));
7976 /* For integral conversions with the same precision or pointer
7977 conversions use a NOP_EXPR instead. */
7978 if ((INTEGRAL_TYPE_P (type)
7979 || POINTER_TYPE_P (type))
7980 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7981 || POINTER_TYPE_P (TREE_TYPE (op0)))
7982 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7983 return fold_convert_loc (loc, type, op0);
7985 /* Strip inner integral conversions that do not change the precision. */
7986 if (CONVERT_EXPR_P (op0)
7987 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7988 || POINTER_TYPE_P (TREE_TYPE (op0)))
7989 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7990 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7991 && (TYPE_PRECISION (TREE_TYPE (op0))
7992 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
7993 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7994 type, TREE_OPERAND (op0, 0));
7996 return fold_view_convert_expr (type, op0);
7998 case NEGATE_EXPR:
7999 tem = fold_negate_expr (loc, arg0);
8000 if (tem)
8001 return fold_convert_loc (loc, type, tem);
8002 return NULL_TREE;
8004 case ABS_EXPR:
8005 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8006 return fold_abs_const (arg0, type);
8007 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8008 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8009 /* Convert fabs((double)float) into (double)fabsf(float). */
8010 else if (TREE_CODE (arg0) == NOP_EXPR
8011 && TREE_CODE (type) == REAL_TYPE)
8013 tree targ0 = strip_float_extensions (arg0);
8014 if (targ0 != arg0)
8015 return fold_convert_loc (loc, type,
8016 fold_build1_loc (loc, ABS_EXPR,
8017 TREE_TYPE (targ0),
8018 targ0));
8020 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8021 else if (TREE_CODE (arg0) == ABS_EXPR)
8022 return arg0;
8023 else if (tree_expr_nonnegative_p (arg0))
8024 return arg0;
8026 /* Strip sign ops from argument. */
8027 if (TREE_CODE (type) == REAL_TYPE)
8029 tem = fold_strip_sign_ops (arg0);
8030 if (tem)
8031 return fold_build1_loc (loc, ABS_EXPR, type,
8032 fold_convert_loc (loc, type, tem));
8034 return NULL_TREE;
8036 case CONJ_EXPR:
8037 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8038 return fold_convert_loc (loc, type, arg0);
8039 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8041 tree itype = TREE_TYPE (type);
8042 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8043 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8044 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8045 negate_expr (ipart));
8047 if (TREE_CODE (arg0) == COMPLEX_CST)
8049 tree itype = TREE_TYPE (type);
8050 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8051 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8052 return build_complex (type, rpart, negate_expr (ipart));
8054 if (TREE_CODE (arg0) == CONJ_EXPR)
8055 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8056 return NULL_TREE;
8058 case BIT_NOT_EXPR:
8059 if (TREE_CODE (arg0) == INTEGER_CST)
8060 return fold_not_const (arg0, type);
8061 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8062 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8063 /* Convert ~ (-A) to A - 1. */
8064 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8065 return fold_build2_loc (loc, MINUS_EXPR, type,
8066 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8067 build_int_cst (type, 1));
8068 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8069 else if (INTEGRAL_TYPE_P (type)
8070 && ((TREE_CODE (arg0) == MINUS_EXPR
8071 && integer_onep (TREE_OPERAND (arg0, 1)))
8072 || (TREE_CODE (arg0) == PLUS_EXPR
8073 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8074 return fold_build1_loc (loc, NEGATE_EXPR, type,
8075 fold_convert_loc (loc, type,
8076 TREE_OPERAND (arg0, 0)));
8077 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8078 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8079 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8080 fold_convert_loc (loc, type,
8081 TREE_OPERAND (arg0, 0)))))
8082 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8083 fold_convert_loc (loc, type,
8084 TREE_OPERAND (arg0, 1)));
8085 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8086 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8087 fold_convert_loc (loc, type,
8088 TREE_OPERAND (arg0, 1)))))
8089 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8090 fold_convert_loc (loc, type,
8091 TREE_OPERAND (arg0, 0)), tem);
8092 /* Perform BIT_NOT_EXPR on each element individually. */
8093 else if (TREE_CODE (arg0) == VECTOR_CST)
8095 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8096 int count = TYPE_VECTOR_SUBPARTS (type), i;
8098 for (i = 0; i < count; i++)
8100 if (elements)
8102 elem = TREE_VALUE (elements);
8103 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8104 if (elem == NULL_TREE)
8105 break;
8106 elements = TREE_CHAIN (elements);
8108 else
8109 elem = build_int_cst (TREE_TYPE (type), -1);
8110 list = tree_cons (NULL_TREE, elem, list);
8112 if (i == count)
8113 return build_vector (type, nreverse (list));
8116 return NULL_TREE;
8118 case TRUTH_NOT_EXPR:
8119 /* The argument to invert_truthvalue must have Boolean type. */
8120 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8121 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8123 /* Note that the operand of this must be an int
8124 and its values must be 0 or 1.
8125 ("true" is a fixed value perhaps depending on the language,
8126 but we don't handle values other than 1 correctly yet.) */
8127 tem = fold_truth_not_expr (loc, arg0);
8128 if (!tem)
8129 return NULL_TREE;
8130 return fold_convert_loc (loc, type, tem);
8132 case REALPART_EXPR:
8133 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8134 return fold_convert_loc (loc, type, arg0);
8135 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8136 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8137 TREE_OPERAND (arg0, 1));
8138 if (TREE_CODE (arg0) == COMPLEX_CST)
8139 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8140 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8142 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8143 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8144 fold_build1_loc (loc, REALPART_EXPR, itype,
8145 TREE_OPERAND (arg0, 0)),
8146 fold_build1_loc (loc, REALPART_EXPR, itype,
8147 TREE_OPERAND (arg0, 1)));
8148 return fold_convert_loc (loc, type, tem);
8150 if (TREE_CODE (arg0) == CONJ_EXPR)
8152 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8153 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8154 TREE_OPERAND (arg0, 0));
8155 return fold_convert_loc (loc, type, tem);
8157 if (TREE_CODE (arg0) == CALL_EXPR)
8159 tree fn = get_callee_fndecl (arg0);
8160 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8161 switch (DECL_FUNCTION_CODE (fn))
8163 CASE_FLT_FN (BUILT_IN_CEXPI):
8164 fn = mathfn_built_in (type, BUILT_IN_COS);
8165 if (fn)
8166 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8167 break;
8169 default:
8170 break;
8173 return NULL_TREE;
8175 case IMAGPART_EXPR:
8176 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8177 return fold_convert_loc (loc, type, integer_zero_node);
8178 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8179 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8180 TREE_OPERAND (arg0, 0));
8181 if (TREE_CODE (arg0) == COMPLEX_CST)
8182 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8183 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8185 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8186 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8187 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8188 TREE_OPERAND (arg0, 0)),
8189 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8190 TREE_OPERAND (arg0, 1)));
8191 return fold_convert_loc (loc, type, tem);
8193 if (TREE_CODE (arg0) == CONJ_EXPR)
8195 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8196 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8197 return fold_convert_loc (loc, type, negate_expr (tem));
8199 if (TREE_CODE (arg0) == CALL_EXPR)
8201 tree fn = get_callee_fndecl (arg0);
8202 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8203 switch (DECL_FUNCTION_CODE (fn))
8205 CASE_FLT_FN (BUILT_IN_CEXPI):
8206 fn = mathfn_built_in (type, BUILT_IN_SIN);
8207 if (fn)
8208 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8209 break;
8211 default:
8212 break;
8215 return NULL_TREE;
8217 case INDIRECT_REF:
8218 /* Fold *&X to X if X is an lvalue. */
8219 if (TREE_CODE (op0) == ADDR_EXPR)
8221 tree op00 = TREE_OPERAND (op0, 0);
8222 if ((TREE_CODE (op00) == VAR_DECL
8223 || TREE_CODE (op00) == PARM_DECL
8224 || TREE_CODE (op00) == RESULT_DECL)
8225 && !TREE_READONLY (op00))
8226 return op00;
8228 return NULL_TREE;
8230 default:
8231 return NULL_TREE;
8232 } /* switch (code) */
8236 /* If the operation was a conversion do _not_ mark a resulting constant
8237 with TREE_OVERFLOW if the original constant was not. These conversions
8238 have implementation defined behavior and retaining the TREE_OVERFLOW
8239 flag here would confuse later passes such as VRP. */
8240 tree
8241 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8242 tree type, tree op0)
8244 tree res = fold_unary_loc (loc, code, type, op0);
8245 if (res
8246 && TREE_CODE (res) == INTEGER_CST
8247 && TREE_CODE (op0) == INTEGER_CST
8248 && CONVERT_EXPR_CODE_P (code))
8249 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8251 return res;
8254 /* Fold a binary expression of code CODE and type TYPE with operands
8255 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8256 Return the folded expression if folding is successful. Otherwise,
8257 return NULL_TREE. */
8259 static tree
8260 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8262 enum tree_code compl_code;
8264 if (code == MIN_EXPR)
8265 compl_code = MAX_EXPR;
8266 else if (code == MAX_EXPR)
8267 compl_code = MIN_EXPR;
8268 else
8269 gcc_unreachable ();
8271 /* MIN (MAX (a, b), b) == b. */
8272 if (TREE_CODE (op0) == compl_code
8273 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8274 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8276 /* MIN (MAX (b, a), b) == b. */
8277 if (TREE_CODE (op0) == compl_code
8278 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8279 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8280 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8282 /* MIN (a, MAX (a, b)) == a. */
8283 if (TREE_CODE (op1) == compl_code
8284 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8285 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8286 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8288 /* MIN (a, MAX (b, a)) == a. */
8289 if (TREE_CODE (op1) == compl_code
8290 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8291 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8292 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8294 return NULL_TREE;
8297 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8298 by changing CODE to reduce the magnitude of constants involved in
8299 ARG0 of the comparison.
8300 Returns a canonicalized comparison tree if a simplification was
8301 possible, otherwise returns NULL_TREE.
8302 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8303 valid if signed overflow is undefined. */
8305 static tree
8306 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8307 tree arg0, tree arg1,
8308 bool *strict_overflow_p)
8310 enum tree_code code0 = TREE_CODE (arg0);
8311 tree t, cst0 = NULL_TREE;
8312 int sgn0;
8313 bool swap = false;
8315 /* Match A +- CST code arg1 and CST code arg1. We can change the
8316 first form only if overflow is undefined. */
8317 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8318 /* In principle pointers also have undefined overflow behavior,
8319 but that causes problems elsewhere. */
8320 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8321 && (code0 == MINUS_EXPR
8322 || code0 == PLUS_EXPR)
8323 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8324 || code0 == INTEGER_CST))
8325 return NULL_TREE;
8327 /* Identify the constant in arg0 and its sign. */
8328 if (code0 == INTEGER_CST)
8329 cst0 = arg0;
8330 else
8331 cst0 = TREE_OPERAND (arg0, 1);
8332 sgn0 = tree_int_cst_sgn (cst0);
8334 /* Overflowed constants and zero will cause problems. */
8335 if (integer_zerop (cst0)
8336 || TREE_OVERFLOW (cst0))
8337 return NULL_TREE;
8339 /* See if we can reduce the magnitude of the constant in
8340 arg0 by changing the comparison code. */
8341 if (code0 == INTEGER_CST)
8343 /* CST <= arg1 -> CST-1 < arg1. */
8344 if (code == LE_EXPR && sgn0 == 1)
8345 code = LT_EXPR;
8346 /* -CST < arg1 -> -CST-1 <= arg1. */
8347 else if (code == LT_EXPR && sgn0 == -1)
8348 code = LE_EXPR;
8349 /* CST > arg1 -> CST-1 >= arg1. */
8350 else if (code == GT_EXPR && sgn0 == 1)
8351 code = GE_EXPR;
8352 /* -CST >= arg1 -> -CST-1 > arg1. */
8353 else if (code == GE_EXPR && sgn0 == -1)
8354 code = GT_EXPR;
8355 else
8356 return NULL_TREE;
8357 /* arg1 code' CST' might be more canonical. */
8358 swap = true;
8360 else
8362 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8363 if (code == LT_EXPR
8364 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8365 code = LE_EXPR;
8366 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8367 else if (code == GT_EXPR
8368 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8369 code = GE_EXPR;
8370 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8371 else if (code == LE_EXPR
8372 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8373 code = LT_EXPR;
8374 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8375 else if (code == GE_EXPR
8376 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8377 code = GT_EXPR;
8378 else
8379 return NULL_TREE;
8380 *strict_overflow_p = true;
8383 /* Now build the constant reduced in magnitude. But not if that
8384 would produce one outside of its types range. */
8385 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8386 && ((sgn0 == 1
8387 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8388 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8389 || (sgn0 == -1
8390 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8391 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8392 /* We cannot swap the comparison here as that would cause us to
8393 endlessly recurse. */
8394 return NULL_TREE;
8396 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8397 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8398 if (code0 != INTEGER_CST)
8399 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8401 /* If swapping might yield to a more canonical form, do so. */
8402 if (swap)
8403 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8404 else
8405 return fold_build2_loc (loc, code, type, t, arg1);
8408 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8409 overflow further. Try to decrease the magnitude of constants involved
8410 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8411 and put sole constants at the second argument position.
8412 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8414 static tree
8415 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8416 tree arg0, tree arg1)
8418 tree t;
8419 bool strict_overflow_p;
8420 const char * const warnmsg = G_("assuming signed overflow does not occur "
8421 "when reducing constant in comparison");
8423 /* Try canonicalization by simplifying arg0. */
8424 strict_overflow_p = false;
8425 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8426 &strict_overflow_p);
8427 if (t)
8429 if (strict_overflow_p)
8430 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8431 return t;
8434 /* Try canonicalization by simplifying arg1 using the swapped
8435 comparison. */
8436 code = swap_tree_comparison (code);
8437 strict_overflow_p = false;
8438 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8439 &strict_overflow_p);
8440 if (t && strict_overflow_p)
8441 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8442 return t;
8445 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8446 space. This is used to avoid issuing overflow warnings for
8447 expressions like &p->x which can not wrap. */
8449 static bool
8450 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8452 unsigned HOST_WIDE_INT offset_low, total_low;
8453 HOST_WIDE_INT size, offset_high, total_high;
8455 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8456 return true;
8458 if (bitpos < 0)
8459 return true;
8461 if (offset == NULL_TREE)
8463 offset_low = 0;
8464 offset_high = 0;
8466 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8467 return true;
8468 else
8470 offset_low = TREE_INT_CST_LOW (offset);
8471 offset_high = TREE_INT_CST_HIGH (offset);
8474 if (add_double_with_sign (offset_low, offset_high,
8475 bitpos / BITS_PER_UNIT, 0,
8476 &total_low, &total_high,
8477 true))
8478 return true;
8480 if (total_high != 0)
8481 return true;
8483 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8484 if (size <= 0)
8485 return true;
8487 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8488 array. */
8489 if (TREE_CODE (base) == ADDR_EXPR)
8491 HOST_WIDE_INT base_size;
8493 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8494 if (base_size > 0 && size < base_size)
8495 size = base_size;
8498 return total_low > (unsigned HOST_WIDE_INT) size;
8501 /* Subroutine of fold_binary. This routine performs all of the
8502 transformations that are common to the equality/inequality
8503 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8504 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8505 fold_binary should call fold_binary. Fold a comparison with
8506 tree code CODE and type TYPE with operands OP0 and OP1. Return
8507 the folded comparison or NULL_TREE. */
8509 static tree
8510 fold_comparison (location_t loc, enum tree_code code, tree type,
8511 tree op0, tree op1)
8513 tree arg0, arg1, tem;
8515 arg0 = op0;
8516 arg1 = op1;
8518 STRIP_SIGN_NOPS (arg0);
8519 STRIP_SIGN_NOPS (arg1);
8521 tem = fold_relational_const (code, type, arg0, arg1);
8522 if (tem != NULL_TREE)
8523 return tem;
8525 /* If one arg is a real or integer constant, put it last. */
8526 if (tree_swap_operands_p (arg0, arg1, true))
8527 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8529 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8530 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8531 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8532 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8533 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8534 && (TREE_CODE (arg1) == INTEGER_CST
8535 && !TREE_OVERFLOW (arg1)))
8537 tree const1 = TREE_OPERAND (arg0, 1);
8538 tree const2 = arg1;
8539 tree variable = TREE_OPERAND (arg0, 0);
8540 tree lhs;
8541 int lhs_add;
8542 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8544 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8545 TREE_TYPE (arg1), const2, const1);
8547 /* If the constant operation overflowed this can be
8548 simplified as a comparison against INT_MAX/INT_MIN. */
8549 if (TREE_CODE (lhs) == INTEGER_CST
8550 && TREE_OVERFLOW (lhs))
8552 int const1_sgn = tree_int_cst_sgn (const1);
8553 enum tree_code code2 = code;
8555 /* Get the sign of the constant on the lhs if the
8556 operation were VARIABLE + CONST1. */
8557 if (TREE_CODE (arg0) == MINUS_EXPR)
8558 const1_sgn = -const1_sgn;
8560 /* The sign of the constant determines if we overflowed
8561 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8562 Canonicalize to the INT_MIN overflow by swapping the comparison
8563 if necessary. */
8564 if (const1_sgn == -1)
8565 code2 = swap_tree_comparison (code);
8567 /* We now can look at the canonicalized case
8568 VARIABLE + 1 CODE2 INT_MIN
8569 and decide on the result. */
8570 if (code2 == LT_EXPR
8571 || code2 == LE_EXPR
8572 || code2 == EQ_EXPR)
8573 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8574 else if (code2 == NE_EXPR
8575 || code2 == GE_EXPR
8576 || code2 == GT_EXPR)
8577 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8580 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8581 && (TREE_CODE (lhs) != INTEGER_CST
8582 || !TREE_OVERFLOW (lhs)))
8584 fold_overflow_warning (("assuming signed overflow does not occur "
8585 "when changing X +- C1 cmp C2 to "
8586 "X cmp C1 +- C2"),
8587 WARN_STRICT_OVERFLOW_COMPARISON);
8588 return fold_build2_loc (loc, code, type, variable, lhs);
8592 /* For comparisons of pointers we can decompose it to a compile time
8593 comparison of the base objects and the offsets into the object.
8594 This requires at least one operand being an ADDR_EXPR or a
8595 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8596 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8597 && (TREE_CODE (arg0) == ADDR_EXPR
8598 || TREE_CODE (arg1) == ADDR_EXPR
8599 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8600 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8602 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8603 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8604 enum machine_mode mode;
8605 int volatilep, unsignedp;
8606 bool indirect_base0 = false, indirect_base1 = false;
8608 /* Get base and offset for the access. Strip ADDR_EXPR for
8609 get_inner_reference, but put it back by stripping INDIRECT_REF
8610 off the base object if possible. indirect_baseN will be true
8611 if baseN is not an address but refers to the object itself. */
8612 base0 = arg0;
8613 if (TREE_CODE (arg0) == ADDR_EXPR)
8615 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8616 &bitsize, &bitpos0, &offset0, &mode,
8617 &unsignedp, &volatilep, false);
8618 if (TREE_CODE (base0) == INDIRECT_REF)
8619 base0 = TREE_OPERAND (base0, 0);
8620 else
8621 indirect_base0 = true;
8623 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8625 base0 = TREE_OPERAND (arg0, 0);
8626 offset0 = TREE_OPERAND (arg0, 1);
8629 base1 = arg1;
8630 if (TREE_CODE (arg1) == ADDR_EXPR)
8632 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8633 &bitsize, &bitpos1, &offset1, &mode,
8634 &unsignedp, &volatilep, false);
8635 if (TREE_CODE (base1) == INDIRECT_REF)
8636 base1 = TREE_OPERAND (base1, 0);
8637 else
8638 indirect_base1 = true;
8640 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8642 base1 = TREE_OPERAND (arg1, 0);
8643 offset1 = TREE_OPERAND (arg1, 1);
8646 /* A local variable can never be pointed to by
8647 the default SSA name of an incoming parameter. */
8648 if ((TREE_CODE (arg0) == ADDR_EXPR
8649 && indirect_base0
8650 && TREE_CODE (base0) == VAR_DECL
8651 && auto_var_in_fn_p (base0, current_function_decl)
8652 && !indirect_base1
8653 && TREE_CODE (base1) == SSA_NAME
8654 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8655 && SSA_NAME_IS_DEFAULT_DEF (base1))
8656 || (TREE_CODE (arg1) == ADDR_EXPR
8657 && indirect_base1
8658 && TREE_CODE (base1) == VAR_DECL
8659 && auto_var_in_fn_p (base1, current_function_decl)
8660 && !indirect_base0
8661 && TREE_CODE (base0) == SSA_NAME
8662 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8663 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8665 if (code == NE_EXPR)
8666 return constant_boolean_node (1, type);
8667 else if (code == EQ_EXPR)
8668 return constant_boolean_node (0, type);
8670 /* If we have equivalent bases we might be able to simplify. */
8671 else if (indirect_base0 == indirect_base1
8672 && operand_equal_p (base0, base1, 0))
8674 /* We can fold this expression to a constant if the non-constant
8675 offset parts are equal. */
8676 if ((offset0 == offset1
8677 || (offset0 && offset1
8678 && operand_equal_p (offset0, offset1, 0)))
8679 && (code == EQ_EXPR
8680 || code == NE_EXPR
8681 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8684 if (code != EQ_EXPR
8685 && code != NE_EXPR
8686 && bitpos0 != bitpos1
8687 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8688 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8689 fold_overflow_warning (("assuming pointer wraparound does not "
8690 "occur when comparing P +- C1 with "
8691 "P +- C2"),
8692 WARN_STRICT_OVERFLOW_CONDITIONAL);
8694 switch (code)
8696 case EQ_EXPR:
8697 return constant_boolean_node (bitpos0 == bitpos1, type);
8698 case NE_EXPR:
8699 return constant_boolean_node (bitpos0 != bitpos1, type);
8700 case LT_EXPR:
8701 return constant_boolean_node (bitpos0 < bitpos1, type);
8702 case LE_EXPR:
8703 return constant_boolean_node (bitpos0 <= bitpos1, type);
8704 case GE_EXPR:
8705 return constant_boolean_node (bitpos0 >= bitpos1, type);
8706 case GT_EXPR:
8707 return constant_boolean_node (bitpos0 > bitpos1, type);
8708 default:;
8711 /* We can simplify the comparison to a comparison of the variable
8712 offset parts if the constant offset parts are equal.
8713 Be careful to use signed size type here because otherwise we
8714 mess with array offsets in the wrong way. This is possible
8715 because pointer arithmetic is restricted to retain within an
8716 object and overflow on pointer differences is undefined as of
8717 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8718 else if (bitpos0 == bitpos1
8719 && ((code == EQ_EXPR || code == NE_EXPR)
8720 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8722 /* By converting to signed size type we cover middle-end pointer
8723 arithmetic which operates on unsigned pointer types of size
8724 type size and ARRAY_REF offsets which are properly sign or
8725 zero extended from their type in case it is narrower than
8726 size type. */
8727 if (offset0 == NULL_TREE)
8728 offset0 = build_int_cst (ssizetype, 0);
8729 else
8730 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8731 if (offset1 == NULL_TREE)
8732 offset1 = build_int_cst (ssizetype, 0);
8733 else
8734 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8736 if (code != EQ_EXPR
8737 && code != NE_EXPR
8738 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8739 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8740 fold_overflow_warning (("assuming pointer wraparound does not "
8741 "occur when comparing P +- C1 with "
8742 "P +- C2"),
8743 WARN_STRICT_OVERFLOW_COMPARISON);
8745 return fold_build2_loc (loc, code, type, offset0, offset1);
8748 /* For non-equal bases we can simplify if they are addresses
8749 of local binding decls or constants. */
8750 else if (indirect_base0 && indirect_base1
8751 /* We know that !operand_equal_p (base0, base1, 0)
8752 because the if condition was false. But make
8753 sure two decls are not the same. */
8754 && base0 != base1
8755 && TREE_CODE (arg0) == ADDR_EXPR
8756 && TREE_CODE (arg1) == ADDR_EXPR
8757 && (((TREE_CODE (base0) == VAR_DECL
8758 || TREE_CODE (base0) == PARM_DECL)
8759 && (targetm.binds_local_p (base0)
8760 || CONSTANT_CLASS_P (base1)))
8761 || CONSTANT_CLASS_P (base0))
8762 && (((TREE_CODE (base1) == VAR_DECL
8763 || TREE_CODE (base1) == PARM_DECL)
8764 && (targetm.binds_local_p (base1)
8765 || CONSTANT_CLASS_P (base0)))
8766 || CONSTANT_CLASS_P (base1)))
8768 if (code == EQ_EXPR)
8769 return omit_two_operands_loc (loc, type, boolean_false_node,
8770 arg0, arg1);
8771 else if (code == NE_EXPR)
8772 return omit_two_operands_loc (loc, type, boolean_true_node,
8773 arg0, arg1);
8775 /* For equal offsets we can simplify to a comparison of the
8776 base addresses. */
8777 else if (bitpos0 == bitpos1
8778 && (indirect_base0
8779 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8780 && (indirect_base1
8781 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8782 && ((offset0 == offset1)
8783 || (offset0 && offset1
8784 && operand_equal_p (offset0, offset1, 0))))
8786 if (indirect_base0)
8787 base0 = build_fold_addr_expr_loc (loc, base0);
8788 if (indirect_base1)
8789 base1 = build_fold_addr_expr_loc (loc, base1);
8790 return fold_build2_loc (loc, code, type, base0, base1);
8794 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8795 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8796 the resulting offset is smaller in absolute value than the
8797 original one. */
8798 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8799 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8800 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8801 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8802 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8803 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8804 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8806 tree const1 = TREE_OPERAND (arg0, 1);
8807 tree const2 = TREE_OPERAND (arg1, 1);
8808 tree variable1 = TREE_OPERAND (arg0, 0);
8809 tree variable2 = TREE_OPERAND (arg1, 0);
8810 tree cst;
8811 const char * const warnmsg = G_("assuming signed overflow does not "
8812 "occur when combining constants around "
8813 "a comparison");
8815 /* Put the constant on the side where it doesn't overflow and is
8816 of lower absolute value than before. */
8817 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8818 ? MINUS_EXPR : PLUS_EXPR,
8819 const2, const1, 0);
8820 if (!TREE_OVERFLOW (cst)
8821 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8823 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8824 return fold_build2_loc (loc, code, type,
8825 variable1,
8826 fold_build2_loc (loc,
8827 TREE_CODE (arg1), TREE_TYPE (arg1),
8828 variable2, cst));
8831 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8832 ? MINUS_EXPR : PLUS_EXPR,
8833 const1, const2, 0);
8834 if (!TREE_OVERFLOW (cst)
8835 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8837 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8838 return fold_build2_loc (loc, code, type,
8839 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8840 variable1, cst),
8841 variable2);
8845 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8846 signed arithmetic case. That form is created by the compiler
8847 often enough for folding it to be of value. One example is in
8848 computing loop trip counts after Operator Strength Reduction. */
8849 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8850 && TREE_CODE (arg0) == MULT_EXPR
8851 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8852 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8853 && integer_zerop (arg1))
8855 tree const1 = TREE_OPERAND (arg0, 1);
8856 tree const2 = arg1; /* zero */
8857 tree variable1 = TREE_OPERAND (arg0, 0);
8858 enum tree_code cmp_code = code;
8860 /* Handle unfolded multiplication by zero. */
8861 if (integer_zerop (const1))
8862 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8864 fold_overflow_warning (("assuming signed overflow does not occur when "
8865 "eliminating multiplication in comparison "
8866 "with zero"),
8867 WARN_STRICT_OVERFLOW_COMPARISON);
8869 /* If const1 is negative we swap the sense of the comparison. */
8870 if (tree_int_cst_sgn (const1) < 0)
8871 cmp_code = swap_tree_comparison (cmp_code);
8873 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8876 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
8877 if (tem)
8878 return tem;
8880 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8882 tree targ0 = strip_float_extensions (arg0);
8883 tree targ1 = strip_float_extensions (arg1);
8884 tree newtype = TREE_TYPE (targ0);
8886 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8887 newtype = TREE_TYPE (targ1);
8889 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8890 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8891 return fold_build2_loc (loc, code, type,
8892 fold_convert_loc (loc, newtype, targ0),
8893 fold_convert_loc (loc, newtype, targ1));
8895 /* (-a) CMP (-b) -> b CMP a */
8896 if (TREE_CODE (arg0) == NEGATE_EXPR
8897 && TREE_CODE (arg1) == NEGATE_EXPR)
8898 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8899 TREE_OPERAND (arg0, 0));
8901 if (TREE_CODE (arg1) == REAL_CST)
8903 REAL_VALUE_TYPE cst;
8904 cst = TREE_REAL_CST (arg1);
8906 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8907 if (TREE_CODE (arg0) == NEGATE_EXPR)
8908 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8909 TREE_OPERAND (arg0, 0),
8910 build_real (TREE_TYPE (arg1),
8911 REAL_VALUE_NEGATE (cst)));
8913 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8914 /* a CMP (-0) -> a CMP 0 */
8915 if (REAL_VALUE_MINUS_ZERO (cst))
8916 return fold_build2_loc (loc, code, type, arg0,
8917 build_real (TREE_TYPE (arg1), dconst0));
8919 /* x != NaN is always true, other ops are always false. */
8920 if (REAL_VALUE_ISNAN (cst)
8921 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8923 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8924 return omit_one_operand_loc (loc, type, tem, arg0);
8927 /* Fold comparisons against infinity. */
8928 if (REAL_VALUE_ISINF (cst)
8929 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
8931 tem = fold_inf_compare (loc, code, type, arg0, arg1);
8932 if (tem != NULL_TREE)
8933 return tem;
8937 /* If this is a comparison of a real constant with a PLUS_EXPR
8938 or a MINUS_EXPR of a real constant, we can convert it into a
8939 comparison with a revised real constant as long as no overflow
8940 occurs when unsafe_math_optimizations are enabled. */
8941 if (flag_unsafe_math_optimizations
8942 && TREE_CODE (arg1) == REAL_CST
8943 && (TREE_CODE (arg0) == PLUS_EXPR
8944 || TREE_CODE (arg0) == MINUS_EXPR)
8945 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8946 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8947 ? MINUS_EXPR : PLUS_EXPR,
8948 arg1, TREE_OPERAND (arg0, 1), 0))
8949 && !TREE_OVERFLOW (tem))
8950 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8952 /* Likewise, we can simplify a comparison of a real constant with
8953 a MINUS_EXPR whose first operand is also a real constant, i.e.
8954 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8955 floating-point types only if -fassociative-math is set. */
8956 if (flag_associative_math
8957 && TREE_CODE (arg1) == REAL_CST
8958 && TREE_CODE (arg0) == MINUS_EXPR
8959 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8960 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8961 arg1, 0))
8962 && !TREE_OVERFLOW (tem))
8963 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8964 TREE_OPERAND (arg0, 1), tem);
8966 /* Fold comparisons against built-in math functions. */
8967 if (TREE_CODE (arg1) == REAL_CST
8968 && flag_unsafe_math_optimizations
8969 && ! flag_errno_math)
8971 enum built_in_function fcode = builtin_mathfn_code (arg0);
8973 if (fcode != END_BUILTINS)
8975 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
8976 if (tem != NULL_TREE)
8977 return tem;
8982 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8983 && CONVERT_EXPR_P (arg0))
8985 /* If we are widening one operand of an integer comparison,
8986 see if the other operand is similarly being widened. Perhaps we
8987 can do the comparison in the narrower type. */
8988 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
8989 if (tem)
8990 return tem;
8992 /* Or if we are changing signedness. */
8993 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
8994 if (tem)
8995 return tem;
8998 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8999 constant, we can simplify it. */
9000 if (TREE_CODE (arg1) == INTEGER_CST
9001 && (TREE_CODE (arg0) == MIN_EXPR
9002 || TREE_CODE (arg0) == MAX_EXPR)
9003 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9005 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9006 if (tem)
9007 return tem;
9010 /* Simplify comparison of something with itself. (For IEEE
9011 floating-point, we can only do some of these simplifications.) */
9012 if (operand_equal_p (arg0, arg1, 0))
9014 switch (code)
9016 case EQ_EXPR:
9017 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9018 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9019 return constant_boolean_node (1, type);
9020 break;
9022 case GE_EXPR:
9023 case LE_EXPR:
9024 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9025 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9026 return constant_boolean_node (1, type);
9027 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9029 case NE_EXPR:
9030 /* For NE, we can only do this simplification if integer
9031 or we don't honor IEEE floating point NaNs. */
9032 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9033 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9034 break;
9035 /* ... fall through ... */
9036 case GT_EXPR:
9037 case LT_EXPR:
9038 return constant_boolean_node (0, type);
9039 default:
9040 gcc_unreachable ();
9044 /* If we are comparing an expression that just has comparisons
9045 of two integer values, arithmetic expressions of those comparisons,
9046 and constants, we can simplify it. There are only three cases
9047 to check: the two values can either be equal, the first can be
9048 greater, or the second can be greater. Fold the expression for
9049 those three values. Since each value must be 0 or 1, we have
9050 eight possibilities, each of which corresponds to the constant 0
9051 or 1 or one of the six possible comparisons.
9053 This handles common cases like (a > b) == 0 but also handles
9054 expressions like ((x > y) - (y > x)) > 0, which supposedly
9055 occur in macroized code. */
9057 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9059 tree cval1 = 0, cval2 = 0;
9060 int save_p = 0;
9062 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9063 /* Don't handle degenerate cases here; they should already
9064 have been handled anyway. */
9065 && cval1 != 0 && cval2 != 0
9066 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9067 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9068 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9069 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9070 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9071 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9072 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9074 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9075 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9077 /* We can't just pass T to eval_subst in case cval1 or cval2
9078 was the same as ARG1. */
9080 tree high_result
9081 = fold_build2_loc (loc, code, type,
9082 eval_subst (loc, arg0, cval1, maxval,
9083 cval2, minval),
9084 arg1);
9085 tree equal_result
9086 = fold_build2_loc (loc, code, type,
9087 eval_subst (loc, arg0, cval1, maxval,
9088 cval2, maxval),
9089 arg1);
9090 tree low_result
9091 = fold_build2_loc (loc, code, type,
9092 eval_subst (loc, arg0, cval1, minval,
9093 cval2, maxval),
9094 arg1);
9096 /* All three of these results should be 0 or 1. Confirm they are.
9097 Then use those values to select the proper code to use. */
9099 if (TREE_CODE (high_result) == INTEGER_CST
9100 && TREE_CODE (equal_result) == INTEGER_CST
9101 && TREE_CODE (low_result) == INTEGER_CST)
9103 /* Make a 3-bit mask with the high-order bit being the
9104 value for `>', the next for '=', and the low for '<'. */
9105 switch ((integer_onep (high_result) * 4)
9106 + (integer_onep (equal_result) * 2)
9107 + integer_onep (low_result))
9109 case 0:
9110 /* Always false. */
9111 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9112 case 1:
9113 code = LT_EXPR;
9114 break;
9115 case 2:
9116 code = EQ_EXPR;
9117 break;
9118 case 3:
9119 code = LE_EXPR;
9120 break;
9121 case 4:
9122 code = GT_EXPR;
9123 break;
9124 case 5:
9125 code = NE_EXPR;
9126 break;
9127 case 6:
9128 code = GE_EXPR;
9129 break;
9130 case 7:
9131 /* Always true. */
9132 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9135 if (save_p)
9137 tem = save_expr (build2 (code, type, cval1, cval2));
9138 SET_EXPR_LOCATION (tem, loc);
9139 return tem;
9141 return fold_build2_loc (loc, code, type, cval1, cval2);
9146 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9147 into a single range test. */
9148 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9149 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9150 && TREE_CODE (arg1) == INTEGER_CST
9151 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9152 && !integer_zerop (TREE_OPERAND (arg0, 1))
9153 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9154 && !TREE_OVERFLOW (arg1))
9156 tem = fold_div_compare (loc, code, type, arg0, arg1);
9157 if (tem != NULL_TREE)
9158 return tem;
9161 /* Fold ~X op ~Y as Y op X. */
9162 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9163 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9165 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9166 return fold_build2_loc (loc, code, type,
9167 fold_convert_loc (loc, cmp_type,
9168 TREE_OPERAND (arg1, 0)),
9169 TREE_OPERAND (arg0, 0));
9172 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9173 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9174 && TREE_CODE (arg1) == INTEGER_CST)
9176 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9177 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9178 TREE_OPERAND (arg0, 0),
9179 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9180 fold_convert_loc (loc, cmp_type, arg1)));
9183 return NULL_TREE;
9187 /* Subroutine of fold_binary. Optimize complex multiplications of the
9188 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9189 argument EXPR represents the expression "z" of type TYPE. */
9191 static tree
9192 fold_mult_zconjz (location_t loc, tree type, tree expr)
9194 tree itype = TREE_TYPE (type);
9195 tree rpart, ipart, tem;
9197 if (TREE_CODE (expr) == COMPLEX_EXPR)
9199 rpart = TREE_OPERAND (expr, 0);
9200 ipart = TREE_OPERAND (expr, 1);
9202 else if (TREE_CODE (expr) == COMPLEX_CST)
9204 rpart = TREE_REALPART (expr);
9205 ipart = TREE_IMAGPART (expr);
9207 else
9209 expr = save_expr (expr);
9210 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9211 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9214 rpart = save_expr (rpart);
9215 ipart = save_expr (ipart);
9216 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9217 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9218 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9219 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9220 fold_convert_loc (loc, itype, integer_zero_node));
9224 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9225 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9226 guarantees that P and N have the same least significant log2(M) bits.
9227 N is not otherwise constrained. In particular, N is not normalized to
9228 0 <= N < M as is common. In general, the precise value of P is unknown.
9229 M is chosen as large as possible such that constant N can be determined.
9231 Returns M and sets *RESIDUE to N.
9233 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9234 account. This is not always possible due to PR 35705.
9237 static unsigned HOST_WIDE_INT
9238 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9239 bool allow_func_align)
9241 enum tree_code code;
9243 *residue = 0;
9245 code = TREE_CODE (expr);
9246 if (code == ADDR_EXPR)
9248 expr = TREE_OPERAND (expr, 0);
9249 if (handled_component_p (expr))
9251 HOST_WIDE_INT bitsize, bitpos;
9252 tree offset;
9253 enum machine_mode mode;
9254 int unsignedp, volatilep;
9256 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9257 &mode, &unsignedp, &volatilep, false);
9258 *residue = bitpos / BITS_PER_UNIT;
9259 if (offset)
9261 if (TREE_CODE (offset) == INTEGER_CST)
9262 *residue += TREE_INT_CST_LOW (offset);
9263 else
9264 /* We don't handle more complicated offset expressions. */
9265 return 1;
9269 if (DECL_P (expr)
9270 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9271 return DECL_ALIGN_UNIT (expr);
9273 else if (code == POINTER_PLUS_EXPR)
9275 tree op0, op1;
9276 unsigned HOST_WIDE_INT modulus;
9277 enum tree_code inner_code;
9279 op0 = TREE_OPERAND (expr, 0);
9280 STRIP_NOPS (op0);
9281 modulus = get_pointer_modulus_and_residue (op0, residue,
9282 allow_func_align);
9284 op1 = TREE_OPERAND (expr, 1);
9285 STRIP_NOPS (op1);
9286 inner_code = TREE_CODE (op1);
9287 if (inner_code == INTEGER_CST)
9289 *residue += TREE_INT_CST_LOW (op1);
9290 return modulus;
9292 else if (inner_code == MULT_EXPR)
9294 op1 = TREE_OPERAND (op1, 1);
9295 if (TREE_CODE (op1) == INTEGER_CST)
9297 unsigned HOST_WIDE_INT align;
9299 /* Compute the greatest power-of-2 divisor of op1. */
9300 align = TREE_INT_CST_LOW (op1);
9301 align &= -align;
9303 /* If align is non-zero and less than *modulus, replace
9304 *modulus with align., If align is 0, then either op1 is 0
9305 or the greatest power-of-2 divisor of op1 doesn't fit in an
9306 unsigned HOST_WIDE_INT. In either case, no additional
9307 constraint is imposed. */
9308 if (align)
9309 modulus = MIN (modulus, align);
9311 return modulus;
9316 /* If we get here, we were unable to determine anything useful about the
9317 expression. */
9318 return 1;
9322 /* Fold a binary expression of code CODE and type TYPE with operands
9323 OP0 and OP1. LOC is the location of the resulting expression.
9324 Return the folded expression if folding is successful. Otherwise,
9325 return NULL_TREE. */
9327 tree
9328 fold_binary_loc (location_t loc,
9329 enum tree_code code, tree type, tree op0, tree op1)
9331 enum tree_code_class kind = TREE_CODE_CLASS (code);
9332 tree arg0, arg1, tem;
9333 tree t1 = NULL_TREE;
9334 bool strict_overflow_p;
9336 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9337 && TREE_CODE_LENGTH (code) == 2
9338 && op0 != NULL_TREE
9339 && op1 != NULL_TREE);
9341 arg0 = op0;
9342 arg1 = op1;
9344 /* Strip any conversions that don't change the mode. This is
9345 safe for every expression, except for a comparison expression
9346 because its signedness is derived from its operands. So, in
9347 the latter case, only strip conversions that don't change the
9348 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9349 preserved.
9351 Note that this is done as an internal manipulation within the
9352 constant folder, in order to find the simplest representation
9353 of the arguments so that their form can be studied. In any
9354 cases, the appropriate type conversions should be put back in
9355 the tree that will get out of the constant folder. */
9357 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9359 STRIP_SIGN_NOPS (arg0);
9360 STRIP_SIGN_NOPS (arg1);
9362 else
9364 STRIP_NOPS (arg0);
9365 STRIP_NOPS (arg1);
9368 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9369 constant but we can't do arithmetic on them. */
9370 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9371 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9372 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9373 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9374 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9375 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9377 if (kind == tcc_binary)
9379 /* Make sure type and arg0 have the same saturating flag. */
9380 gcc_assert (TYPE_SATURATING (type)
9381 == TYPE_SATURATING (TREE_TYPE (arg0)));
9382 tem = const_binop (code, arg0, arg1, 0);
9384 else if (kind == tcc_comparison)
9385 tem = fold_relational_const (code, type, arg0, arg1);
9386 else
9387 tem = NULL_TREE;
9389 if (tem != NULL_TREE)
9391 if (TREE_TYPE (tem) != type)
9392 tem = fold_convert_loc (loc, type, tem);
9393 return tem;
9397 /* If this is a commutative operation, and ARG0 is a constant, move it
9398 to ARG1 to reduce the number of tests below. */
9399 if (commutative_tree_code (code)
9400 && tree_swap_operands_p (arg0, arg1, true))
9401 return fold_build2_loc (loc, code, type, op1, op0);
9403 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9405 First check for cases where an arithmetic operation is applied to a
9406 compound, conditional, or comparison operation. Push the arithmetic
9407 operation inside the compound or conditional to see if any folding
9408 can then be done. Convert comparison to conditional for this purpose.
9409 The also optimizes non-constant cases that used to be done in
9410 expand_expr.
9412 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9413 one of the operands is a comparison and the other is a comparison, a
9414 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9415 code below would make the expression more complex. Change it to a
9416 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9417 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9419 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9420 || code == EQ_EXPR || code == NE_EXPR)
9421 && ((truth_value_p (TREE_CODE (arg0))
9422 && (truth_value_p (TREE_CODE (arg1))
9423 || (TREE_CODE (arg1) == BIT_AND_EXPR
9424 && integer_onep (TREE_OPERAND (arg1, 1)))))
9425 || (truth_value_p (TREE_CODE (arg1))
9426 && (truth_value_p (TREE_CODE (arg0))
9427 || (TREE_CODE (arg0) == BIT_AND_EXPR
9428 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9430 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9431 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9432 : TRUTH_XOR_EXPR,
9433 boolean_type_node,
9434 fold_convert_loc (loc, boolean_type_node, arg0),
9435 fold_convert_loc (loc, boolean_type_node, arg1));
9437 if (code == EQ_EXPR)
9438 tem = invert_truthvalue_loc (loc, tem);
9440 return fold_convert_loc (loc, type, tem);
9443 if (TREE_CODE_CLASS (code) == tcc_binary
9444 || TREE_CODE_CLASS (code) == tcc_comparison)
9446 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9448 tem = fold_build2_loc (loc, code, type,
9449 fold_convert_loc (loc, TREE_TYPE (op0),
9450 TREE_OPERAND (arg0, 1)), op1);
9451 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
9452 goto fold_binary_exit;
9454 if (TREE_CODE (arg1) == COMPOUND_EXPR
9455 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9457 tem = fold_build2_loc (loc, code, type, op0,
9458 fold_convert_loc (loc, TREE_TYPE (op1),
9459 TREE_OPERAND (arg1, 1)));
9460 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
9461 goto fold_binary_exit;
9464 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9466 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9467 arg0, arg1,
9468 /*cond_first_p=*/1);
9469 if (tem != NULL_TREE)
9470 return tem;
9473 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9475 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9476 arg1, arg0,
9477 /*cond_first_p=*/0);
9478 if (tem != NULL_TREE)
9479 return tem;
9483 switch (code)
9485 case POINTER_PLUS_EXPR:
9486 /* 0 +p index -> (type)index */
9487 if (integer_zerop (arg0))
9488 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9490 /* PTR +p 0 -> PTR */
9491 if (integer_zerop (arg1))
9492 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9494 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9495 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9496 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9497 return fold_convert_loc (loc, type,
9498 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9499 fold_convert_loc (loc, sizetype,
9500 arg1),
9501 fold_convert_loc (loc, sizetype,
9502 arg0)));
9504 /* index +p PTR -> PTR +p index */
9505 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9506 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9507 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
9508 fold_convert_loc (loc, type, arg1),
9509 fold_convert_loc (loc, sizetype, arg0));
9511 /* (PTR +p B) +p A -> PTR +p (B + A) */
9512 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9514 tree inner;
9515 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9516 tree arg00 = TREE_OPERAND (arg0, 0);
9517 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9518 arg01, fold_convert_loc (loc, sizetype, arg1));
9519 return fold_convert_loc (loc, type,
9520 fold_build2_loc (loc, POINTER_PLUS_EXPR,
9521 TREE_TYPE (arg00),
9522 arg00, inner));
9525 /* PTR_CST +p CST -> CST1 */
9526 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9527 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9528 fold_convert_loc (loc, type, arg1));
9530 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9531 of the array. Loop optimizer sometimes produce this type of
9532 expressions. */
9533 if (TREE_CODE (arg0) == ADDR_EXPR)
9535 tem = try_move_mult_to_index (loc, arg0,
9536 fold_convert_loc (loc, sizetype, arg1));
9537 if (tem)
9538 return fold_convert_loc (loc, type, tem);
9541 return NULL_TREE;
9543 case PLUS_EXPR:
9544 /* A + (-B) -> A - B */
9545 if (TREE_CODE (arg1) == NEGATE_EXPR)
9546 return fold_build2_loc (loc, MINUS_EXPR, type,
9547 fold_convert_loc (loc, type, arg0),
9548 fold_convert_loc (loc, type,
9549 TREE_OPERAND (arg1, 0)));
9550 /* (-A) + B -> B - A */
9551 if (TREE_CODE (arg0) == NEGATE_EXPR
9552 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9553 return fold_build2_loc (loc, MINUS_EXPR, type,
9554 fold_convert_loc (loc, type, arg1),
9555 fold_convert_loc (loc, type,
9556 TREE_OPERAND (arg0, 0)));
9558 if (INTEGRAL_TYPE_P (type))
9560 /* Convert ~A + 1 to -A. */
9561 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9562 && integer_onep (arg1))
9563 return fold_build1_loc (loc, NEGATE_EXPR, type,
9564 fold_convert_loc (loc, type,
9565 TREE_OPERAND (arg0, 0)));
9567 /* ~X + X is -1. */
9568 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9569 && !TYPE_OVERFLOW_TRAPS (type))
9571 tree tem = TREE_OPERAND (arg0, 0);
9573 STRIP_NOPS (tem);
9574 if (operand_equal_p (tem, arg1, 0))
9576 t1 = build_int_cst_type (type, -1);
9577 return omit_one_operand_loc (loc, type, t1, arg1);
9581 /* X + ~X is -1. */
9582 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9583 && !TYPE_OVERFLOW_TRAPS (type))
9585 tree tem = TREE_OPERAND (arg1, 0);
9587 STRIP_NOPS (tem);
9588 if (operand_equal_p (arg0, tem, 0))
9590 t1 = build_int_cst_type (type, -1);
9591 return omit_one_operand_loc (loc, type, t1, arg0);
9595 /* X + (X / CST) * -CST is X % CST. */
9596 if (TREE_CODE (arg1) == MULT_EXPR
9597 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9598 && operand_equal_p (arg0,
9599 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9601 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9602 tree cst1 = TREE_OPERAND (arg1, 1);
9603 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9604 cst1, cst0);
9605 if (sum && integer_zerop (sum))
9606 return fold_convert_loc (loc, type,
9607 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9608 TREE_TYPE (arg0), arg0,
9609 cst0));
9613 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9614 same or one. Make sure type is not saturating.
9615 fold_plusminus_mult_expr will re-associate. */
9616 if ((TREE_CODE (arg0) == MULT_EXPR
9617 || TREE_CODE (arg1) == MULT_EXPR)
9618 && !TYPE_SATURATING (type)
9619 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9621 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9622 if (tem)
9623 return tem;
9626 if (! FLOAT_TYPE_P (type))
9628 if (integer_zerop (arg1))
9629 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9631 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9632 with a constant, and the two constants have no bits in common,
9633 we should treat this as a BIT_IOR_EXPR since this may produce more
9634 simplifications. */
9635 if (TREE_CODE (arg0) == BIT_AND_EXPR
9636 && TREE_CODE (arg1) == BIT_AND_EXPR
9637 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9638 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9639 && integer_zerop (const_binop (BIT_AND_EXPR,
9640 TREE_OPERAND (arg0, 1),
9641 TREE_OPERAND (arg1, 1), 0)))
9643 code = BIT_IOR_EXPR;
9644 goto bit_ior;
9647 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9648 (plus (plus (mult) (mult)) (foo)) so that we can
9649 take advantage of the factoring cases below. */
9650 if (((TREE_CODE (arg0) == PLUS_EXPR
9651 || TREE_CODE (arg0) == MINUS_EXPR)
9652 && TREE_CODE (arg1) == MULT_EXPR)
9653 || ((TREE_CODE (arg1) == PLUS_EXPR
9654 || TREE_CODE (arg1) == MINUS_EXPR)
9655 && TREE_CODE (arg0) == MULT_EXPR))
9657 tree parg0, parg1, parg, marg;
9658 enum tree_code pcode;
9660 if (TREE_CODE (arg1) == MULT_EXPR)
9661 parg = arg0, marg = arg1;
9662 else
9663 parg = arg1, marg = arg0;
9664 pcode = TREE_CODE (parg);
9665 parg0 = TREE_OPERAND (parg, 0);
9666 parg1 = TREE_OPERAND (parg, 1);
9667 STRIP_NOPS (parg0);
9668 STRIP_NOPS (parg1);
9670 if (TREE_CODE (parg0) == MULT_EXPR
9671 && TREE_CODE (parg1) != MULT_EXPR)
9672 return fold_build2_loc (loc, pcode, type,
9673 fold_build2_loc (loc, PLUS_EXPR, type,
9674 fold_convert_loc (loc, type,
9675 parg0),
9676 fold_convert_loc (loc, type,
9677 marg)),
9678 fold_convert_loc (loc, type, parg1));
9679 if (TREE_CODE (parg0) != MULT_EXPR
9680 && TREE_CODE (parg1) == MULT_EXPR)
9681 return
9682 fold_build2_loc (loc, PLUS_EXPR, type,
9683 fold_convert_loc (loc, type, parg0),
9684 fold_build2_loc (loc, pcode, type,
9685 fold_convert_loc (loc, type, marg),
9686 fold_convert_loc (loc, type,
9687 parg1)));
9690 else
9692 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9693 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9694 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9696 /* Likewise if the operands are reversed. */
9697 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9698 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9700 /* Convert X + -C into X - C. */
9701 if (TREE_CODE (arg1) == REAL_CST
9702 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9704 tem = fold_negate_const (arg1, type);
9705 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9706 return fold_build2_loc (loc, MINUS_EXPR, type,
9707 fold_convert_loc (loc, type, arg0),
9708 fold_convert_loc (loc, type, tem));
9711 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9712 to __complex__ ( x, y ). This is not the same for SNaNs or
9713 if signed zeros are involved. */
9714 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9715 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9716 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9718 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9719 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9720 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9721 bool arg0rz = false, arg0iz = false;
9722 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9723 || (arg0i && (arg0iz = real_zerop (arg0i))))
9725 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9726 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9727 if (arg0rz && arg1i && real_zerop (arg1i))
9729 tree rp = arg1r ? arg1r
9730 : build1 (REALPART_EXPR, rtype, arg1);
9731 tree ip = arg0i ? arg0i
9732 : build1 (IMAGPART_EXPR, rtype, arg0);
9733 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9735 else if (arg0iz && arg1r && real_zerop (arg1r))
9737 tree rp = arg0r ? arg0r
9738 : build1 (REALPART_EXPR, rtype, arg0);
9739 tree ip = arg1i ? arg1i
9740 : build1 (IMAGPART_EXPR, rtype, arg1);
9741 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9746 if (flag_unsafe_math_optimizations
9747 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9748 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9749 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9750 return tem;
9752 /* Convert x+x into x*2.0. */
9753 if (operand_equal_p (arg0, arg1, 0)
9754 && SCALAR_FLOAT_TYPE_P (type))
9755 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9756 build_real (type, dconst2));
9758 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9759 We associate floats only if the user has specified
9760 -fassociative-math. */
9761 if (flag_associative_math
9762 && TREE_CODE (arg1) == PLUS_EXPR
9763 && TREE_CODE (arg0) != MULT_EXPR)
9765 tree tree10 = TREE_OPERAND (arg1, 0);
9766 tree tree11 = TREE_OPERAND (arg1, 1);
9767 if (TREE_CODE (tree11) == MULT_EXPR
9768 && TREE_CODE (tree10) == MULT_EXPR)
9770 tree tree0;
9771 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9772 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9775 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9776 We associate floats only if the user has specified
9777 -fassociative-math. */
9778 if (flag_associative_math
9779 && TREE_CODE (arg0) == PLUS_EXPR
9780 && TREE_CODE (arg1) != MULT_EXPR)
9782 tree tree00 = TREE_OPERAND (arg0, 0);
9783 tree tree01 = TREE_OPERAND (arg0, 1);
9784 if (TREE_CODE (tree01) == MULT_EXPR
9785 && TREE_CODE (tree00) == MULT_EXPR)
9787 tree tree0;
9788 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9789 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9794 bit_rotate:
9795 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9796 is a rotate of A by C1 bits. */
9797 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9798 is a rotate of A by B bits. */
9800 enum tree_code code0, code1;
9801 tree rtype;
9802 code0 = TREE_CODE (arg0);
9803 code1 = TREE_CODE (arg1);
9804 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9805 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9806 && operand_equal_p (TREE_OPERAND (arg0, 0),
9807 TREE_OPERAND (arg1, 0), 0)
9808 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9809 TYPE_UNSIGNED (rtype))
9810 /* Only create rotates in complete modes. Other cases are not
9811 expanded properly. */
9812 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9814 tree tree01, tree11;
9815 enum tree_code code01, code11;
9817 tree01 = TREE_OPERAND (arg0, 1);
9818 tree11 = TREE_OPERAND (arg1, 1);
9819 STRIP_NOPS (tree01);
9820 STRIP_NOPS (tree11);
9821 code01 = TREE_CODE (tree01);
9822 code11 = TREE_CODE (tree11);
9823 if (code01 == INTEGER_CST
9824 && code11 == INTEGER_CST
9825 && TREE_INT_CST_HIGH (tree01) == 0
9826 && TREE_INT_CST_HIGH (tree11) == 0
9827 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9828 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9830 tem = build2 (LROTATE_EXPR,
9831 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9832 TREE_OPERAND (arg0, 0),
9833 code0 == LSHIFT_EXPR
9834 ? tree01 : tree11);
9835 SET_EXPR_LOCATION (tem, loc);
9836 return fold_convert_loc (loc, type, tem);
9838 else if (code11 == MINUS_EXPR)
9840 tree tree110, tree111;
9841 tree110 = TREE_OPERAND (tree11, 0);
9842 tree111 = TREE_OPERAND (tree11, 1);
9843 STRIP_NOPS (tree110);
9844 STRIP_NOPS (tree111);
9845 if (TREE_CODE (tree110) == INTEGER_CST
9846 && 0 == compare_tree_int (tree110,
9847 TYPE_PRECISION
9848 (TREE_TYPE (TREE_OPERAND
9849 (arg0, 0))))
9850 && operand_equal_p (tree01, tree111, 0))
9851 return
9852 fold_convert_loc (loc, type,
9853 build2 ((code0 == LSHIFT_EXPR
9854 ? LROTATE_EXPR
9855 : RROTATE_EXPR),
9856 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9857 TREE_OPERAND (arg0, 0), tree01));
9859 else if (code01 == MINUS_EXPR)
9861 tree tree010, tree011;
9862 tree010 = TREE_OPERAND (tree01, 0);
9863 tree011 = TREE_OPERAND (tree01, 1);
9864 STRIP_NOPS (tree010);
9865 STRIP_NOPS (tree011);
9866 if (TREE_CODE (tree010) == INTEGER_CST
9867 && 0 == compare_tree_int (tree010,
9868 TYPE_PRECISION
9869 (TREE_TYPE (TREE_OPERAND
9870 (arg0, 0))))
9871 && operand_equal_p (tree11, tree011, 0))
9872 return fold_convert_loc
9873 (loc, type,
9874 build2 ((code0 != LSHIFT_EXPR
9875 ? LROTATE_EXPR
9876 : RROTATE_EXPR),
9877 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9878 TREE_OPERAND (arg0, 0), tree11));
9883 associate:
9884 /* In most languages, can't associate operations on floats through
9885 parentheses. Rather than remember where the parentheses were, we
9886 don't associate floats at all, unless the user has specified
9887 -fassociative-math.
9888 And, we need to make sure type is not saturating. */
9890 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9891 && !TYPE_SATURATING (type))
9893 tree var0, con0, lit0, minus_lit0;
9894 tree var1, con1, lit1, minus_lit1;
9895 bool ok = true;
9897 /* Split both trees into variables, constants, and literals. Then
9898 associate each group together, the constants with literals,
9899 then the result with variables. This increases the chances of
9900 literals being recombined later and of generating relocatable
9901 expressions for the sum of a constant and literal. */
9902 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9903 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9904 code == MINUS_EXPR);
9906 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9907 if (code == MINUS_EXPR)
9908 code = PLUS_EXPR;
9910 /* With undefined overflow we can only associate constants with one
9911 variable, and constants whose association doesn't overflow. */
9912 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9913 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9915 if (var0 && var1)
9917 tree tmp0 = var0;
9918 tree tmp1 = var1;
9920 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9921 tmp0 = TREE_OPERAND (tmp0, 0);
9922 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9923 tmp1 = TREE_OPERAND (tmp1, 0);
9924 /* The only case we can still associate with two variables
9925 is if they are the same, modulo negation. */
9926 if (!operand_equal_p (tmp0, tmp1, 0))
9927 ok = false;
9930 if (ok && lit0 && lit1)
9932 tree tmp0 = fold_convert (type, lit0);
9933 tree tmp1 = fold_convert (type, lit1);
9935 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
9936 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
9937 ok = false;
9941 /* Only do something if we found more than two objects. Otherwise,
9942 nothing has changed and we risk infinite recursion. */
9943 if (ok
9944 && (2 < ((var0 != 0) + (var1 != 0)
9945 + (con0 != 0) + (con1 != 0)
9946 + (lit0 != 0) + (lit1 != 0)
9947 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9949 var0 = associate_trees (loc, var0, var1, code, type);
9950 con0 = associate_trees (loc, con0, con1, code, type);
9951 lit0 = associate_trees (loc, lit0, lit1, code, type);
9952 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
9954 /* Preserve the MINUS_EXPR if the negative part of the literal is
9955 greater than the positive part. Otherwise, the multiplicative
9956 folding code (i.e extract_muldiv) may be fooled in case
9957 unsigned constants are subtracted, like in the following
9958 example: ((X*2 + 4) - 8U)/2. */
9959 if (minus_lit0 && lit0)
9961 if (TREE_CODE (lit0) == INTEGER_CST
9962 && TREE_CODE (minus_lit0) == INTEGER_CST
9963 && tree_int_cst_lt (lit0, minus_lit0))
9965 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9966 MINUS_EXPR, type);
9967 lit0 = 0;
9969 else
9971 lit0 = associate_trees (loc, lit0, minus_lit0,
9972 MINUS_EXPR, type);
9973 minus_lit0 = 0;
9976 if (minus_lit0)
9978 if (con0 == 0)
9979 return
9980 fold_convert_loc (loc, type,
9981 associate_trees (loc, var0, minus_lit0,
9982 MINUS_EXPR, type));
9983 else
9985 con0 = associate_trees (loc, con0, minus_lit0,
9986 MINUS_EXPR, type);
9987 return
9988 fold_convert_loc (loc, type,
9989 associate_trees (loc, var0, con0,
9990 PLUS_EXPR, type));
9994 con0 = associate_trees (loc, con0, lit0, code, type);
9995 return
9996 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9997 code, type));
10001 return NULL_TREE;
10003 case MINUS_EXPR:
10004 /* Pointer simplifications for subtraction, simple reassociations. */
10005 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10007 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10008 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10009 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10011 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10012 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10013 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10014 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10015 return fold_build2_loc (loc, PLUS_EXPR, type,
10016 fold_build2_loc (loc, MINUS_EXPR, type,
10017 arg00, arg10),
10018 fold_build2_loc (loc, MINUS_EXPR, type,
10019 arg01, arg11));
10021 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10022 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10024 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10025 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10026 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10027 fold_convert_loc (loc, type, arg1));
10028 if (tmp)
10029 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10032 /* A - (-B) -> A + B */
10033 if (TREE_CODE (arg1) == NEGATE_EXPR)
10034 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10035 fold_convert_loc (loc, type,
10036 TREE_OPERAND (arg1, 0)));
10037 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10038 if (TREE_CODE (arg0) == NEGATE_EXPR
10039 && (FLOAT_TYPE_P (type)
10040 || INTEGRAL_TYPE_P (type))
10041 && negate_expr_p (arg1)
10042 && reorder_operands_p (arg0, arg1))
10043 return fold_build2_loc (loc, MINUS_EXPR, type,
10044 fold_convert_loc (loc, type,
10045 negate_expr (arg1)),
10046 fold_convert_loc (loc, type,
10047 TREE_OPERAND (arg0, 0)));
10048 /* Convert -A - 1 to ~A. */
10049 if (INTEGRAL_TYPE_P (type)
10050 && TREE_CODE (arg0) == NEGATE_EXPR
10051 && integer_onep (arg1)
10052 && !TYPE_OVERFLOW_TRAPS (type))
10053 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10054 fold_convert_loc (loc, type,
10055 TREE_OPERAND (arg0, 0)));
10057 /* Convert -1 - A to ~A. */
10058 if (INTEGRAL_TYPE_P (type)
10059 && integer_all_onesp (arg0))
10060 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10063 /* X - (X / CST) * CST is X % CST. */
10064 if (INTEGRAL_TYPE_P (type)
10065 && TREE_CODE (arg1) == MULT_EXPR
10066 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10067 && operand_equal_p (arg0,
10068 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10069 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10070 TREE_OPERAND (arg1, 1), 0))
10071 return
10072 fold_convert_loc (loc, type,
10073 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10074 arg0, TREE_OPERAND (arg1, 1)));
10076 if (! FLOAT_TYPE_P (type))
10078 if (integer_zerop (arg0))
10079 return negate_expr (fold_convert_loc (loc, type, arg1));
10080 if (integer_zerop (arg1))
10081 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10083 /* Fold A - (A & B) into ~B & A. */
10084 if (!TREE_SIDE_EFFECTS (arg0)
10085 && TREE_CODE (arg1) == BIT_AND_EXPR)
10087 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10089 tree arg10 = fold_convert_loc (loc, type,
10090 TREE_OPERAND (arg1, 0));
10091 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10092 fold_build1_loc (loc, BIT_NOT_EXPR,
10093 type, arg10),
10094 fold_convert_loc (loc, type, arg0));
10096 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10098 tree arg11 = fold_convert_loc (loc,
10099 type, TREE_OPERAND (arg1, 1));
10100 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10101 fold_build1_loc (loc, BIT_NOT_EXPR,
10102 type, arg11),
10103 fold_convert_loc (loc, type, arg0));
10107 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10108 any power of 2 minus 1. */
10109 if (TREE_CODE (arg0) == BIT_AND_EXPR
10110 && TREE_CODE (arg1) == BIT_AND_EXPR
10111 && operand_equal_p (TREE_OPERAND (arg0, 0),
10112 TREE_OPERAND (arg1, 0), 0))
10114 tree mask0 = TREE_OPERAND (arg0, 1);
10115 tree mask1 = TREE_OPERAND (arg1, 1);
10116 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10118 if (operand_equal_p (tem, mask1, 0))
10120 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10121 TREE_OPERAND (arg0, 0), mask1);
10122 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10127 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10128 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10129 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10131 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10132 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10133 (-ARG1 + ARG0) reduces to -ARG1. */
10134 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10135 return negate_expr (fold_convert_loc (loc, type, arg1));
10137 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10138 __complex__ ( x, -y ). This is not the same for SNaNs or if
10139 signed zeros are involved. */
10140 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10141 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10142 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10144 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10145 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10146 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10147 bool arg0rz = false, arg0iz = false;
10148 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10149 || (arg0i && (arg0iz = real_zerop (arg0i))))
10151 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10152 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10153 if (arg0rz && arg1i && real_zerop (arg1i))
10155 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10156 arg1r ? arg1r
10157 : build1 (REALPART_EXPR, rtype, arg1));
10158 tree ip = arg0i ? arg0i
10159 : build1 (IMAGPART_EXPR, rtype, arg0);
10160 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10162 else if (arg0iz && arg1r && real_zerop (arg1r))
10164 tree rp = arg0r ? arg0r
10165 : build1 (REALPART_EXPR, rtype, arg0);
10166 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10167 arg1i ? arg1i
10168 : build1 (IMAGPART_EXPR, rtype, arg1));
10169 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10174 /* Fold &x - &x. This can happen from &x.foo - &x.
10175 This is unsafe for certain floats even in non-IEEE formats.
10176 In IEEE, it is unsafe because it does wrong for NaNs.
10177 Also note that operand_equal_p is always false if an operand
10178 is volatile. */
10180 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10181 && operand_equal_p (arg0, arg1, 0))
10182 return fold_convert_loc (loc, type, integer_zero_node);
10184 /* A - B -> A + (-B) if B is easily negatable. */
10185 if (negate_expr_p (arg1)
10186 && ((FLOAT_TYPE_P (type)
10187 /* Avoid this transformation if B is a positive REAL_CST. */
10188 && (TREE_CODE (arg1) != REAL_CST
10189 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10190 || INTEGRAL_TYPE_P (type)))
10191 return fold_build2_loc (loc, PLUS_EXPR, type,
10192 fold_convert_loc (loc, type, arg0),
10193 fold_convert_loc (loc, type,
10194 negate_expr (arg1)));
10196 /* Try folding difference of addresses. */
10198 HOST_WIDE_INT diff;
10200 if ((TREE_CODE (arg0) == ADDR_EXPR
10201 || TREE_CODE (arg1) == ADDR_EXPR)
10202 && ptr_difference_const (arg0, arg1, &diff))
10203 return build_int_cst_type (type, diff);
10206 /* Fold &a[i] - &a[j] to i-j. */
10207 if (TREE_CODE (arg0) == ADDR_EXPR
10208 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10209 && TREE_CODE (arg1) == ADDR_EXPR
10210 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10212 tree aref0 = TREE_OPERAND (arg0, 0);
10213 tree aref1 = TREE_OPERAND (arg1, 0);
10214 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10215 TREE_OPERAND (aref1, 0), 0))
10217 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10218 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10219 tree esz = array_ref_element_size (aref0);
10220 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10221 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10222 fold_convert_loc (loc, type, esz));
10227 if (FLOAT_TYPE_P (type)
10228 && flag_unsafe_math_optimizations
10229 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10230 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10231 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10232 return tem;
10234 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10235 same or one. Make sure type is not saturating.
10236 fold_plusminus_mult_expr will re-associate. */
10237 if ((TREE_CODE (arg0) == MULT_EXPR
10238 || TREE_CODE (arg1) == MULT_EXPR)
10239 && !TYPE_SATURATING (type)
10240 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10242 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10243 if (tem)
10244 return tem;
10247 goto associate;
10249 case MULT_EXPR:
10250 /* (-A) * (-B) -> A * B */
10251 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10252 return fold_build2_loc (loc, MULT_EXPR, type,
10253 fold_convert_loc (loc, type,
10254 TREE_OPERAND (arg0, 0)),
10255 fold_convert_loc (loc, type,
10256 negate_expr (arg1)));
10257 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10258 return fold_build2_loc (loc, MULT_EXPR, type,
10259 fold_convert_loc (loc, type,
10260 negate_expr (arg0)),
10261 fold_convert_loc (loc, type,
10262 TREE_OPERAND (arg1, 0)));
10264 if (! FLOAT_TYPE_P (type))
10266 if (integer_zerop (arg1))
10267 return omit_one_operand_loc (loc, type, arg1, arg0);
10268 if (integer_onep (arg1))
10269 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10270 /* Transform x * -1 into -x. Make sure to do the negation
10271 on the original operand with conversions not stripped
10272 because we can only strip non-sign-changing conversions. */
10273 if (integer_all_onesp (arg1))
10274 return fold_convert_loc (loc, type, negate_expr (op0));
10275 /* Transform x * -C into -x * C if x is easily negatable. */
10276 if (TREE_CODE (arg1) == INTEGER_CST
10277 && tree_int_cst_sgn (arg1) == -1
10278 && negate_expr_p (arg0)
10279 && (tem = negate_expr (arg1)) != arg1
10280 && !TREE_OVERFLOW (tem))
10281 return fold_build2_loc (loc, MULT_EXPR, type,
10282 fold_convert_loc (loc, type,
10283 negate_expr (arg0)),
10284 tem);
10286 /* (a * (1 << b)) is (a << b) */
10287 if (TREE_CODE (arg1) == LSHIFT_EXPR
10288 && integer_onep (TREE_OPERAND (arg1, 0)))
10289 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10290 TREE_OPERAND (arg1, 1));
10291 if (TREE_CODE (arg0) == LSHIFT_EXPR
10292 && integer_onep (TREE_OPERAND (arg0, 0)))
10293 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10294 TREE_OPERAND (arg0, 1));
10296 /* (A + A) * C -> A * 2 * C */
10297 if (TREE_CODE (arg0) == PLUS_EXPR
10298 && TREE_CODE (arg1) == INTEGER_CST
10299 && operand_equal_p (TREE_OPERAND (arg0, 0),
10300 TREE_OPERAND (arg0, 1), 0))
10301 return fold_build2_loc (loc, MULT_EXPR, type,
10302 omit_one_operand_loc (loc, type,
10303 TREE_OPERAND (arg0, 0),
10304 TREE_OPERAND (arg0, 1)),
10305 fold_build2_loc (loc, MULT_EXPR, type,
10306 build_int_cst (type, 2) , arg1));
10308 strict_overflow_p = false;
10309 if (TREE_CODE (arg1) == INTEGER_CST
10310 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10311 &strict_overflow_p)))
10313 if (strict_overflow_p)
10314 fold_overflow_warning (("assuming signed overflow does not "
10315 "occur when simplifying "
10316 "multiplication"),
10317 WARN_STRICT_OVERFLOW_MISC);
10318 return fold_convert_loc (loc, type, tem);
10321 /* Optimize z * conj(z) for integer complex numbers. */
10322 if (TREE_CODE (arg0) == CONJ_EXPR
10323 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10324 return fold_mult_zconjz (loc, type, arg1);
10325 if (TREE_CODE (arg1) == CONJ_EXPR
10326 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10327 return fold_mult_zconjz (loc, type, arg0);
10329 else
10331 /* Maybe fold x * 0 to 0. The expressions aren't the same
10332 when x is NaN, since x * 0 is also NaN. Nor are they the
10333 same in modes with signed zeros, since multiplying a
10334 negative value by 0 gives -0, not +0. */
10335 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10336 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10337 && real_zerop (arg1))
10338 return omit_one_operand_loc (loc, type, arg1, arg0);
10339 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10340 Likewise for complex arithmetic with signed zeros. */
10341 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10342 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10343 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10344 && real_onep (arg1))
10345 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10347 /* Transform x * -1.0 into -x. */
10348 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10349 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10350 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10351 && real_minus_onep (arg1))
10352 return fold_convert_loc (loc, type, negate_expr (arg0));
10354 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10355 the result for floating point types due to rounding so it is applied
10356 only if -fassociative-math was specify. */
10357 if (flag_associative_math
10358 && TREE_CODE (arg0) == RDIV_EXPR
10359 && TREE_CODE (arg1) == REAL_CST
10360 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10362 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10363 arg1, 0);
10364 if (tem)
10365 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10366 TREE_OPERAND (arg0, 1));
10369 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10370 if (operand_equal_p (arg0, arg1, 0))
10372 tree tem = fold_strip_sign_ops (arg0);
10373 if (tem != NULL_TREE)
10375 tem = fold_convert_loc (loc, type, tem);
10376 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10380 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10381 This is not the same for NaNs or if signed zeros are
10382 involved. */
10383 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10384 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10385 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10386 && TREE_CODE (arg1) == COMPLEX_CST
10387 && real_zerop (TREE_REALPART (arg1)))
10389 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10390 if (real_onep (TREE_IMAGPART (arg1)))
10391 return
10392 fold_build2_loc (loc, COMPLEX_EXPR, type,
10393 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10394 rtype, arg0)),
10395 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10396 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10397 return
10398 fold_build2_loc (loc, COMPLEX_EXPR, type,
10399 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10400 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10401 rtype, arg0)));
10404 /* Optimize z * conj(z) for floating point complex numbers.
10405 Guarded by flag_unsafe_math_optimizations as non-finite
10406 imaginary components don't produce scalar results. */
10407 if (flag_unsafe_math_optimizations
10408 && TREE_CODE (arg0) == CONJ_EXPR
10409 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10410 return fold_mult_zconjz (loc, type, arg1);
10411 if (flag_unsafe_math_optimizations
10412 && TREE_CODE (arg1) == CONJ_EXPR
10413 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10414 return fold_mult_zconjz (loc, type, arg0);
10416 if (flag_unsafe_math_optimizations)
10418 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10419 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10421 /* Optimizations of root(...)*root(...). */
10422 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10424 tree rootfn, arg;
10425 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10426 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10428 /* Optimize sqrt(x)*sqrt(x) as x. */
10429 if (BUILTIN_SQRT_P (fcode0)
10430 && operand_equal_p (arg00, arg10, 0)
10431 && ! HONOR_SNANS (TYPE_MODE (type)))
10432 return arg00;
10434 /* Optimize root(x)*root(y) as root(x*y). */
10435 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10436 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10437 return build_call_expr_loc (loc, rootfn, 1, arg);
10440 /* Optimize expN(x)*expN(y) as expN(x+y). */
10441 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10443 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10444 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10445 CALL_EXPR_ARG (arg0, 0),
10446 CALL_EXPR_ARG (arg1, 0));
10447 return build_call_expr_loc (loc, expfn, 1, arg);
10450 /* Optimizations of pow(...)*pow(...). */
10451 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10452 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10453 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10455 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10456 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10457 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10458 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10460 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10461 if (operand_equal_p (arg01, arg11, 0))
10463 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10464 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10465 arg00, arg10);
10466 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10469 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10470 if (operand_equal_p (arg00, arg10, 0))
10472 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10473 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10474 arg01, arg11);
10475 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10479 /* Optimize tan(x)*cos(x) as sin(x). */
10480 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10481 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10482 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10483 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10484 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10485 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10486 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10487 CALL_EXPR_ARG (arg1, 0), 0))
10489 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10491 if (sinfn != NULL_TREE)
10492 return build_call_expr_loc (loc, sinfn, 1,
10493 CALL_EXPR_ARG (arg0, 0));
10496 /* Optimize x*pow(x,c) as pow(x,c+1). */
10497 if (fcode1 == BUILT_IN_POW
10498 || fcode1 == BUILT_IN_POWF
10499 || fcode1 == BUILT_IN_POWL)
10501 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10502 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10503 if (TREE_CODE (arg11) == REAL_CST
10504 && !TREE_OVERFLOW (arg11)
10505 && operand_equal_p (arg0, arg10, 0))
10507 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10508 REAL_VALUE_TYPE c;
10509 tree arg;
10511 c = TREE_REAL_CST (arg11);
10512 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10513 arg = build_real (type, c);
10514 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10518 /* Optimize pow(x,c)*x as pow(x,c+1). */
10519 if (fcode0 == BUILT_IN_POW
10520 || fcode0 == BUILT_IN_POWF
10521 || fcode0 == BUILT_IN_POWL)
10523 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10524 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10525 if (TREE_CODE (arg01) == REAL_CST
10526 && !TREE_OVERFLOW (arg01)
10527 && operand_equal_p (arg1, arg00, 0))
10529 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10530 REAL_VALUE_TYPE c;
10531 tree arg;
10533 c = TREE_REAL_CST (arg01);
10534 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10535 arg = build_real (type, c);
10536 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10540 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10541 if (optimize_function_for_speed_p (cfun)
10542 && operand_equal_p (arg0, arg1, 0))
10544 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10546 if (powfn)
10548 tree arg = build_real (type, dconst2);
10549 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10554 goto associate;
10556 case BIT_IOR_EXPR:
10557 bit_ior:
10558 if (integer_all_onesp (arg1))
10559 return omit_one_operand_loc (loc, type, arg1, arg0);
10560 if (integer_zerop (arg1))
10561 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10562 if (operand_equal_p (arg0, arg1, 0))
10563 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10565 /* ~X | X is -1. */
10566 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10567 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10569 t1 = fold_convert_loc (loc, type, integer_zero_node);
10570 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10571 return omit_one_operand_loc (loc, type, t1, arg1);
10574 /* X | ~X is -1. */
10575 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10576 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10578 t1 = fold_convert_loc (loc, type, integer_zero_node);
10579 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10580 return omit_one_operand_loc (loc, type, t1, arg0);
10583 /* Canonicalize (X & C1) | C2. */
10584 if (TREE_CODE (arg0) == BIT_AND_EXPR
10585 && TREE_CODE (arg1) == INTEGER_CST
10586 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10588 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10589 int width = TYPE_PRECISION (type), w;
10590 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10591 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10592 hi2 = TREE_INT_CST_HIGH (arg1);
10593 lo2 = TREE_INT_CST_LOW (arg1);
10595 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10596 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10597 return omit_one_operand_loc (loc, type, arg1,
10598 TREE_OPERAND (arg0, 0));
10600 if (width > HOST_BITS_PER_WIDE_INT)
10602 mhi = (unsigned HOST_WIDE_INT) -1
10603 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10604 mlo = -1;
10606 else
10608 mhi = 0;
10609 mlo = (unsigned HOST_WIDE_INT) -1
10610 >> (HOST_BITS_PER_WIDE_INT - width);
10613 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10614 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10615 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10616 TREE_OPERAND (arg0, 0), arg1);
10618 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10619 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10620 mode which allows further optimizations. */
10621 hi1 &= mhi;
10622 lo1 &= mlo;
10623 hi2 &= mhi;
10624 lo2 &= mlo;
10625 hi3 = hi1 & ~hi2;
10626 lo3 = lo1 & ~lo2;
10627 for (w = BITS_PER_UNIT;
10628 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10629 w <<= 1)
10631 unsigned HOST_WIDE_INT mask
10632 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10633 if (((lo1 | lo2) & mask) == mask
10634 && (lo1 & ~mask) == 0 && hi1 == 0)
10636 hi3 = 0;
10637 lo3 = mask;
10638 break;
10641 if (hi3 != hi1 || lo3 != lo1)
10642 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10643 fold_build2_loc (loc, BIT_AND_EXPR, type,
10644 TREE_OPERAND (arg0, 0),
10645 build_int_cst_wide (type,
10646 lo3, hi3)),
10647 arg1);
10650 /* (X & Y) | Y is (X, Y). */
10651 if (TREE_CODE (arg0) == BIT_AND_EXPR
10652 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10653 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10654 /* (X & Y) | X is (Y, X). */
10655 if (TREE_CODE (arg0) == BIT_AND_EXPR
10656 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10657 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10658 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10659 /* X | (X & Y) is (Y, X). */
10660 if (TREE_CODE (arg1) == BIT_AND_EXPR
10661 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10662 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10663 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10664 /* X | (Y & X) is (Y, X). */
10665 if (TREE_CODE (arg1) == BIT_AND_EXPR
10666 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10667 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10668 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10670 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10671 if (t1 != NULL_TREE)
10672 return t1;
10674 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10676 This results in more efficient code for machines without a NAND
10677 instruction. Combine will canonicalize to the first form
10678 which will allow use of NAND instructions provided by the
10679 backend if they exist. */
10680 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10681 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10683 return
10684 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10685 build2 (BIT_AND_EXPR, type,
10686 fold_convert_loc (loc, type,
10687 TREE_OPERAND (arg0, 0)),
10688 fold_convert_loc (loc, type,
10689 TREE_OPERAND (arg1, 0))));
10692 /* See if this can be simplified into a rotate first. If that
10693 is unsuccessful continue in the association code. */
10694 goto bit_rotate;
10696 case BIT_XOR_EXPR:
10697 if (integer_zerop (arg1))
10698 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10699 if (integer_all_onesp (arg1))
10700 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10701 if (operand_equal_p (arg0, arg1, 0))
10702 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10704 /* ~X ^ X is -1. */
10705 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10706 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10708 t1 = fold_convert_loc (loc, type, integer_zero_node);
10709 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10710 return omit_one_operand_loc (loc, type, t1, arg1);
10713 /* X ^ ~X is -1. */
10714 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10715 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10717 t1 = fold_convert_loc (loc, type, integer_zero_node);
10718 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10719 return omit_one_operand_loc (loc, type, t1, arg0);
10722 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10723 with a constant, and the two constants have no bits in common,
10724 we should treat this as a BIT_IOR_EXPR since this may produce more
10725 simplifications. */
10726 if (TREE_CODE (arg0) == BIT_AND_EXPR
10727 && TREE_CODE (arg1) == BIT_AND_EXPR
10728 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10729 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10730 && integer_zerop (const_binop (BIT_AND_EXPR,
10731 TREE_OPERAND (arg0, 1),
10732 TREE_OPERAND (arg1, 1), 0)))
10734 code = BIT_IOR_EXPR;
10735 goto bit_ior;
10738 /* (X | Y) ^ X -> Y & ~ X*/
10739 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10740 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10742 tree t2 = TREE_OPERAND (arg0, 1);
10743 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10744 arg1);
10745 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10746 fold_convert_loc (loc, type, t2),
10747 fold_convert_loc (loc, type, t1));
10748 return t1;
10751 /* (Y | X) ^ X -> Y & ~ X*/
10752 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10753 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10755 tree t2 = TREE_OPERAND (arg0, 0);
10756 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10757 arg1);
10758 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10759 fold_convert_loc (loc, type, t2),
10760 fold_convert_loc (loc, type, t1));
10761 return t1;
10764 /* X ^ (X | Y) -> Y & ~ X*/
10765 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10766 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10768 tree t2 = TREE_OPERAND (arg1, 1);
10769 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10770 arg0);
10771 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10772 fold_convert_loc (loc, type, t2),
10773 fold_convert_loc (loc, type, t1));
10774 return t1;
10777 /* X ^ (Y | X) -> Y & ~ X*/
10778 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10779 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10781 tree t2 = TREE_OPERAND (arg1, 0);
10782 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10783 arg0);
10784 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10785 fold_convert_loc (loc, type, t2),
10786 fold_convert_loc (loc, type, t1));
10787 return t1;
10790 /* Convert ~X ^ ~Y to X ^ Y. */
10791 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10792 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10793 return fold_build2_loc (loc, code, type,
10794 fold_convert_loc (loc, type,
10795 TREE_OPERAND (arg0, 0)),
10796 fold_convert_loc (loc, type,
10797 TREE_OPERAND (arg1, 0)));
10799 /* Convert ~X ^ C to X ^ ~C. */
10800 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10801 && TREE_CODE (arg1) == INTEGER_CST)
10802 return fold_build2_loc (loc, code, type,
10803 fold_convert_loc (loc, type,
10804 TREE_OPERAND (arg0, 0)),
10805 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10807 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10808 if (TREE_CODE (arg0) == BIT_AND_EXPR
10809 && integer_onep (TREE_OPERAND (arg0, 1))
10810 && integer_onep (arg1))
10811 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10812 build_int_cst (TREE_TYPE (arg0), 0));
10814 /* Fold (X & Y) ^ Y as ~X & Y. */
10815 if (TREE_CODE (arg0) == BIT_AND_EXPR
10816 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10818 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10819 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10820 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10821 fold_convert_loc (loc, type, arg1));
10823 /* Fold (X & Y) ^ X as ~Y & X. */
10824 if (TREE_CODE (arg0) == BIT_AND_EXPR
10825 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10826 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10828 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10829 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10830 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10831 fold_convert_loc (loc, type, arg1));
10833 /* Fold X ^ (X & Y) as X & ~Y. */
10834 if (TREE_CODE (arg1) == BIT_AND_EXPR
10835 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10837 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10838 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10839 fold_convert_loc (loc, type, arg0),
10840 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10842 /* Fold X ^ (Y & X) as ~Y & X. */
10843 if (TREE_CODE (arg1) == BIT_AND_EXPR
10844 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10845 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10847 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10848 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10849 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10850 fold_convert_loc (loc, type, arg0));
10853 /* See if this can be simplified into a rotate first. If that
10854 is unsuccessful continue in the association code. */
10855 goto bit_rotate;
10857 case BIT_AND_EXPR:
10858 if (integer_all_onesp (arg1))
10859 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10860 if (integer_zerop (arg1))
10861 return omit_one_operand_loc (loc, type, arg1, arg0);
10862 if (operand_equal_p (arg0, arg1, 0))
10863 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10865 /* ~X & X is always zero. */
10866 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10867 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10868 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10870 /* X & ~X is always zero. */
10871 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10872 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10873 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10875 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10876 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10877 && TREE_CODE (arg1) == INTEGER_CST
10878 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10880 tree tmp1 = fold_convert_loc (loc, type, arg1);
10881 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10882 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10883 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10884 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
10885 return
10886 fold_convert_loc (loc, type,
10887 fold_build2_loc (loc, BIT_IOR_EXPR,
10888 type, tmp2, tmp3));
10891 /* (X | Y) & Y is (X, Y). */
10892 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10893 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10894 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10895 /* (X | Y) & X is (Y, X). */
10896 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10897 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10898 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10899 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10900 /* X & (X | Y) is (Y, X). */
10901 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10902 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10903 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10904 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10905 /* X & (Y | X) is (Y, X). */
10906 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10907 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10908 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10909 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10911 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10912 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10913 && integer_onep (TREE_OPERAND (arg0, 1))
10914 && integer_onep (arg1))
10916 tem = TREE_OPERAND (arg0, 0);
10917 return fold_build2_loc (loc, EQ_EXPR, type,
10918 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10919 build_int_cst (TREE_TYPE (tem), 1)),
10920 build_int_cst (TREE_TYPE (tem), 0));
10922 /* Fold ~X & 1 as (X & 1) == 0. */
10923 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10924 && integer_onep (arg1))
10926 tem = TREE_OPERAND (arg0, 0);
10927 return fold_build2_loc (loc, EQ_EXPR, type,
10928 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10929 build_int_cst (TREE_TYPE (tem), 1)),
10930 build_int_cst (TREE_TYPE (tem), 0));
10933 /* Fold (X ^ Y) & Y as ~X & Y. */
10934 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10935 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10937 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10938 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10939 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10940 fold_convert_loc (loc, type, arg1));
10942 /* Fold (X ^ Y) & X as ~Y & X. */
10943 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10944 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10945 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10947 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10948 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10949 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10950 fold_convert_loc (loc, type, arg1));
10952 /* Fold X & (X ^ Y) as X & ~Y. */
10953 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10954 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10956 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10957 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10958 fold_convert_loc (loc, type, arg0),
10959 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10961 /* Fold X & (Y ^ X) as ~Y & X. */
10962 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10963 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10964 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10966 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10967 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10968 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10969 fold_convert_loc (loc, type, arg0));
10972 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10973 if (t1 != NULL_TREE)
10974 return t1;
10975 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10976 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10977 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10979 unsigned int prec
10980 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10982 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10983 && (~TREE_INT_CST_LOW (arg1)
10984 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10985 return
10986 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10989 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10991 This results in more efficient code for machines without a NOR
10992 instruction. Combine will canonicalize to the first form
10993 which will allow use of NOR instructions provided by the
10994 backend if they exist. */
10995 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10996 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10998 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10999 build2 (BIT_IOR_EXPR, type,
11000 fold_convert_loc (loc, type,
11001 TREE_OPERAND (arg0, 0)),
11002 fold_convert_loc (loc, type,
11003 TREE_OPERAND (arg1, 0))));
11006 /* If arg0 is derived from the address of an object or function, we may
11007 be able to fold this expression using the object or function's
11008 alignment. */
11009 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11011 unsigned HOST_WIDE_INT modulus, residue;
11012 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11014 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11015 integer_onep (arg1));
11017 /* This works because modulus is a power of 2. If this weren't the
11018 case, we'd have to replace it by its greatest power-of-2
11019 divisor: modulus & -modulus. */
11020 if (low < modulus)
11021 return build_int_cst (type, residue & low);
11024 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11025 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11026 if the new mask might be further optimized. */
11027 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11028 || TREE_CODE (arg0) == RSHIFT_EXPR)
11029 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11030 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11031 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11032 < TYPE_PRECISION (TREE_TYPE (arg0))
11033 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11034 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11036 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11037 unsigned HOST_WIDE_INT mask
11038 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11039 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11040 tree shift_type = TREE_TYPE (arg0);
11042 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11043 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11044 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11045 && TYPE_PRECISION (TREE_TYPE (arg0))
11046 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11048 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11049 tree arg00 = TREE_OPERAND (arg0, 0);
11050 /* See if more bits can be proven as zero because of
11051 zero extension. */
11052 if (TREE_CODE (arg00) == NOP_EXPR
11053 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11055 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11056 if (TYPE_PRECISION (inner_type)
11057 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11058 && TYPE_PRECISION (inner_type) < prec)
11060 prec = TYPE_PRECISION (inner_type);
11061 /* See if we can shorten the right shift. */
11062 if (shiftc < prec)
11063 shift_type = inner_type;
11066 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11067 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11068 zerobits <<= prec - shiftc;
11069 /* For arithmetic shift if sign bit could be set, zerobits
11070 can contain actually sign bits, so no transformation is
11071 possible, unless MASK masks them all away. In that
11072 case the shift needs to be converted into logical shift. */
11073 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11074 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11076 if ((mask & zerobits) == 0)
11077 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11078 else
11079 zerobits = 0;
11083 /* ((X << 16) & 0xff00) is (X, 0). */
11084 if ((mask & zerobits) == mask)
11085 return omit_one_operand_loc (loc, type,
11086 build_int_cst (type, 0), arg0);
11088 newmask = mask | zerobits;
11089 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11091 unsigned int prec;
11093 /* Only do the transformation if NEWMASK is some integer
11094 mode's mask. */
11095 for (prec = BITS_PER_UNIT;
11096 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11097 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11098 break;
11099 if (prec < HOST_BITS_PER_WIDE_INT
11100 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11102 tree newmaskt;
11104 if (shift_type != TREE_TYPE (arg0))
11106 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11107 fold_convert_loc (loc, shift_type,
11108 TREE_OPERAND (arg0, 0)),
11109 TREE_OPERAND (arg0, 1));
11110 tem = fold_convert_loc (loc, type, tem);
11112 else
11113 tem = op0;
11114 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11115 if (!tree_int_cst_equal (newmaskt, arg1))
11116 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11121 goto associate;
11123 case RDIV_EXPR:
11124 /* Don't touch a floating-point divide by zero unless the mode
11125 of the constant can represent infinity. */
11126 if (TREE_CODE (arg1) == REAL_CST
11127 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11128 && real_zerop (arg1))
11129 return NULL_TREE;
11131 /* Optimize A / A to 1.0 if we don't care about
11132 NaNs or Infinities. Skip the transformation
11133 for non-real operands. */
11134 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11135 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11136 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11137 && operand_equal_p (arg0, arg1, 0))
11139 tree r = build_real (TREE_TYPE (arg0), dconst1);
11141 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11144 /* The complex version of the above A / A optimization. */
11145 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11146 && operand_equal_p (arg0, arg1, 0))
11148 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11149 if (! HONOR_NANS (TYPE_MODE (elem_type))
11150 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11152 tree r = build_real (elem_type, dconst1);
11153 /* omit_two_operands will call fold_convert for us. */
11154 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11158 /* (-A) / (-B) -> A / B */
11159 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11160 return fold_build2_loc (loc, RDIV_EXPR, type,
11161 TREE_OPERAND (arg0, 0),
11162 negate_expr (arg1));
11163 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11164 return fold_build2_loc (loc, RDIV_EXPR, type,
11165 negate_expr (arg0),
11166 TREE_OPERAND (arg1, 0));
11168 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11169 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11170 && real_onep (arg1))
11171 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11173 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11174 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11175 && real_minus_onep (arg1))
11176 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11177 negate_expr (arg0)));
11179 /* If ARG1 is a constant, we can convert this to a multiply by the
11180 reciprocal. This does not have the same rounding properties,
11181 so only do this if -freciprocal-math. We can actually
11182 always safely do it if ARG1 is a power of two, but it's hard to
11183 tell if it is or not in a portable manner. */
11184 if (TREE_CODE (arg1) == REAL_CST)
11186 if (flag_reciprocal_math
11187 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11188 arg1, 0)))
11189 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11190 /* Find the reciprocal if optimizing and the result is exact. */
11191 if (optimize)
11193 REAL_VALUE_TYPE r;
11194 r = TREE_REAL_CST (arg1);
11195 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11197 tem = build_real (type, r);
11198 return fold_build2_loc (loc, MULT_EXPR, type,
11199 fold_convert_loc (loc, type, arg0), tem);
11203 /* Convert A/B/C to A/(B*C). */
11204 if (flag_reciprocal_math
11205 && TREE_CODE (arg0) == RDIV_EXPR)
11206 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11207 fold_build2_loc (loc, MULT_EXPR, type,
11208 TREE_OPERAND (arg0, 1), arg1));
11210 /* Convert A/(B/C) to (A/B)*C. */
11211 if (flag_reciprocal_math
11212 && TREE_CODE (arg1) == RDIV_EXPR)
11213 return fold_build2_loc (loc, MULT_EXPR, type,
11214 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11215 TREE_OPERAND (arg1, 0)),
11216 TREE_OPERAND (arg1, 1));
11218 /* Convert C1/(X*C2) into (C1/C2)/X. */
11219 if (flag_reciprocal_math
11220 && TREE_CODE (arg1) == MULT_EXPR
11221 && TREE_CODE (arg0) == REAL_CST
11222 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11224 tree tem = const_binop (RDIV_EXPR, arg0,
11225 TREE_OPERAND (arg1, 1), 0);
11226 if (tem)
11227 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11228 TREE_OPERAND (arg1, 0));
11231 if (flag_unsafe_math_optimizations)
11233 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11234 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11236 /* Optimize sin(x)/cos(x) as tan(x). */
11237 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11238 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11239 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11240 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11241 CALL_EXPR_ARG (arg1, 0), 0))
11243 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11245 if (tanfn != NULL_TREE)
11246 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11249 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11250 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11251 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11252 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11253 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11254 CALL_EXPR_ARG (arg1, 0), 0))
11256 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11258 if (tanfn != NULL_TREE)
11260 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11261 CALL_EXPR_ARG (arg0, 0));
11262 return fold_build2_loc (loc, RDIV_EXPR, type,
11263 build_real (type, dconst1), tmp);
11267 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11268 NaNs or Infinities. */
11269 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11270 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11271 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11273 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11274 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11276 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11277 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11278 && operand_equal_p (arg00, arg01, 0))
11280 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11282 if (cosfn != NULL_TREE)
11283 return build_call_expr_loc (loc, cosfn, 1, arg00);
11287 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11288 NaNs or Infinities. */
11289 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11290 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11291 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11293 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11294 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11296 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11297 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11298 && operand_equal_p (arg00, arg01, 0))
11300 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11302 if (cosfn != NULL_TREE)
11304 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11305 return fold_build2_loc (loc, RDIV_EXPR, type,
11306 build_real (type, dconst1),
11307 tmp);
11312 /* Optimize pow(x,c)/x as pow(x,c-1). */
11313 if (fcode0 == BUILT_IN_POW
11314 || fcode0 == BUILT_IN_POWF
11315 || fcode0 == BUILT_IN_POWL)
11317 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11318 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11319 if (TREE_CODE (arg01) == REAL_CST
11320 && !TREE_OVERFLOW (arg01)
11321 && operand_equal_p (arg1, arg00, 0))
11323 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11324 REAL_VALUE_TYPE c;
11325 tree arg;
11327 c = TREE_REAL_CST (arg01);
11328 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11329 arg = build_real (type, c);
11330 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11334 /* Optimize a/root(b/c) into a*root(c/b). */
11335 if (BUILTIN_ROOT_P (fcode1))
11337 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11339 if (TREE_CODE (rootarg) == RDIV_EXPR)
11341 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11342 tree b = TREE_OPERAND (rootarg, 0);
11343 tree c = TREE_OPERAND (rootarg, 1);
11345 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11347 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11348 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11352 /* Optimize x/expN(y) into x*expN(-y). */
11353 if (BUILTIN_EXPONENT_P (fcode1))
11355 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11356 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11357 arg1 = build_call_expr_loc (loc,
11358 expfn, 1,
11359 fold_convert_loc (loc, type, arg));
11360 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11363 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11364 if (fcode1 == BUILT_IN_POW
11365 || fcode1 == BUILT_IN_POWF
11366 || fcode1 == BUILT_IN_POWL)
11368 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11369 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11370 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11371 tree neg11 = fold_convert_loc (loc, type,
11372 negate_expr (arg11));
11373 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11374 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11377 return NULL_TREE;
11379 case TRUNC_DIV_EXPR:
11380 case FLOOR_DIV_EXPR:
11381 /* Simplify A / (B << N) where A and B are positive and B is
11382 a power of 2, to A >> (N + log2(B)). */
11383 strict_overflow_p = false;
11384 if (TREE_CODE (arg1) == LSHIFT_EXPR
11385 && (TYPE_UNSIGNED (type)
11386 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11388 tree sval = TREE_OPERAND (arg1, 0);
11389 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11391 tree sh_cnt = TREE_OPERAND (arg1, 1);
11392 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11394 if (strict_overflow_p)
11395 fold_overflow_warning (("assuming signed overflow does not "
11396 "occur when simplifying A / (B << N)"),
11397 WARN_STRICT_OVERFLOW_MISC);
11399 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11400 sh_cnt, build_int_cst (NULL_TREE, pow2));
11401 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11402 fold_convert_loc (loc, type, arg0), sh_cnt);
11406 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11407 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11408 if (INTEGRAL_TYPE_P (type)
11409 && TYPE_UNSIGNED (type)
11410 && code == FLOOR_DIV_EXPR)
11411 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11413 /* Fall thru */
11415 case ROUND_DIV_EXPR:
11416 case CEIL_DIV_EXPR:
11417 case EXACT_DIV_EXPR:
11418 if (integer_onep (arg1))
11419 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11420 if (integer_zerop (arg1))
11421 return NULL_TREE;
11422 /* X / -1 is -X. */
11423 if (!TYPE_UNSIGNED (type)
11424 && TREE_CODE (arg1) == INTEGER_CST
11425 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11426 && TREE_INT_CST_HIGH (arg1) == -1)
11427 return fold_convert_loc (loc, type, negate_expr (arg0));
11429 /* Convert -A / -B to A / B when the type is signed and overflow is
11430 undefined. */
11431 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11432 && TREE_CODE (arg0) == NEGATE_EXPR
11433 && negate_expr_p (arg1))
11435 if (INTEGRAL_TYPE_P (type))
11436 fold_overflow_warning (("assuming signed overflow does not occur "
11437 "when distributing negation across "
11438 "division"),
11439 WARN_STRICT_OVERFLOW_MISC);
11440 return fold_build2_loc (loc, code, type,
11441 fold_convert_loc (loc, type,
11442 TREE_OPERAND (arg0, 0)),
11443 fold_convert_loc (loc, type,
11444 negate_expr (arg1)));
11446 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11447 && TREE_CODE (arg1) == NEGATE_EXPR
11448 && negate_expr_p (arg0))
11450 if (INTEGRAL_TYPE_P (type))
11451 fold_overflow_warning (("assuming signed overflow does not occur "
11452 "when distributing negation across "
11453 "division"),
11454 WARN_STRICT_OVERFLOW_MISC);
11455 return fold_build2_loc (loc, code, type,
11456 fold_convert_loc (loc, type,
11457 negate_expr (arg0)),
11458 fold_convert_loc (loc, type,
11459 TREE_OPERAND (arg1, 0)));
11462 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11463 operation, EXACT_DIV_EXPR.
11465 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11466 At one time others generated faster code, it's not clear if they do
11467 after the last round to changes to the DIV code in expmed.c. */
11468 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11469 && multiple_of_p (type, arg0, arg1))
11470 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11472 strict_overflow_p = false;
11473 if (TREE_CODE (arg1) == INTEGER_CST
11474 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11475 &strict_overflow_p)))
11477 if (strict_overflow_p)
11478 fold_overflow_warning (("assuming signed overflow does not occur "
11479 "when simplifying division"),
11480 WARN_STRICT_OVERFLOW_MISC);
11481 return fold_convert_loc (loc, type, tem);
11484 return NULL_TREE;
11486 case CEIL_MOD_EXPR:
11487 case FLOOR_MOD_EXPR:
11488 case ROUND_MOD_EXPR:
11489 case TRUNC_MOD_EXPR:
11490 /* X % 1 is always zero, but be sure to preserve any side
11491 effects in X. */
11492 if (integer_onep (arg1))
11493 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11495 /* X % 0, return X % 0 unchanged so that we can get the
11496 proper warnings and errors. */
11497 if (integer_zerop (arg1))
11498 return NULL_TREE;
11500 /* 0 % X is always zero, but be sure to preserve any side
11501 effects in X. Place this after checking for X == 0. */
11502 if (integer_zerop (arg0))
11503 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11505 /* X % -1 is zero. */
11506 if (!TYPE_UNSIGNED (type)
11507 && TREE_CODE (arg1) == INTEGER_CST
11508 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11509 && TREE_INT_CST_HIGH (arg1) == -1)
11510 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11512 /* X % -C is the same as X % C. */
11513 if (code == TRUNC_MOD_EXPR
11514 && !TYPE_UNSIGNED (type)
11515 && TREE_CODE (arg1) == INTEGER_CST
11516 && !TREE_OVERFLOW (arg1)
11517 && TREE_INT_CST_HIGH (arg1) < 0
11518 && !TYPE_OVERFLOW_TRAPS (type)
11519 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11520 && !sign_bit_p (arg1, arg1))
11521 return fold_build2_loc (loc, code, type,
11522 fold_convert_loc (loc, type, arg0),
11523 fold_convert_loc (loc, type,
11524 negate_expr (arg1)));
11526 /* X % -Y is the same as X % Y. */
11527 if (code == TRUNC_MOD_EXPR
11528 && !TYPE_UNSIGNED (type)
11529 && TREE_CODE (arg1) == NEGATE_EXPR
11530 && !TYPE_OVERFLOW_TRAPS (type))
11531 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11532 fold_convert_loc (loc, type,
11533 TREE_OPERAND (arg1, 0)));
11535 strict_overflow_p = false;
11536 if (TREE_CODE (arg1) == INTEGER_CST
11537 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11538 &strict_overflow_p)))
11540 if (strict_overflow_p)
11541 fold_overflow_warning (("assuming signed overflow does not occur "
11542 "when simplifying modulus"),
11543 WARN_STRICT_OVERFLOW_MISC);
11544 return fold_convert_loc (loc, type, tem);
11547 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11548 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11549 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11550 && (TYPE_UNSIGNED (type)
11551 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11553 tree c = arg1;
11554 /* Also optimize A % (C << N) where C is a power of 2,
11555 to A & ((C << N) - 1). */
11556 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11557 c = TREE_OPERAND (arg1, 0);
11559 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11561 tree mask
11562 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11563 build_int_cst (TREE_TYPE (arg1), 1));
11564 if (strict_overflow_p)
11565 fold_overflow_warning (("assuming signed overflow does not "
11566 "occur when simplifying "
11567 "X % (power of two)"),
11568 WARN_STRICT_OVERFLOW_MISC);
11569 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11570 fold_convert_loc (loc, type, arg0),
11571 fold_convert_loc (loc, type, mask));
11575 return NULL_TREE;
11577 case LROTATE_EXPR:
11578 case RROTATE_EXPR:
11579 if (integer_all_onesp (arg0))
11580 return omit_one_operand_loc (loc, type, arg0, arg1);
11581 goto shift;
11583 case RSHIFT_EXPR:
11584 /* Optimize -1 >> x for arithmetic right shifts. */
11585 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11586 && tree_expr_nonnegative_p (arg1))
11587 return omit_one_operand_loc (loc, type, arg0, arg1);
11588 /* ... fall through ... */
11590 case LSHIFT_EXPR:
11591 shift:
11592 if (integer_zerop (arg1))
11593 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11594 if (integer_zerop (arg0))
11595 return omit_one_operand_loc (loc, type, arg0, arg1);
11597 /* Since negative shift count is not well-defined,
11598 don't try to compute it in the compiler. */
11599 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11600 return NULL_TREE;
11602 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11603 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11604 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11605 && host_integerp (TREE_OPERAND (arg0, 1), false)
11606 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11608 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11609 + TREE_INT_CST_LOW (arg1));
11611 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11612 being well defined. */
11613 if (low >= TYPE_PRECISION (type))
11615 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11616 low = low % TYPE_PRECISION (type);
11617 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11618 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11619 TREE_OPERAND (arg0, 0));
11620 else
11621 low = TYPE_PRECISION (type) - 1;
11624 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11625 build_int_cst (type, low));
11628 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11629 into x & ((unsigned)-1 >> c) for unsigned types. */
11630 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11631 || (TYPE_UNSIGNED (type)
11632 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11633 && host_integerp (arg1, false)
11634 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11635 && host_integerp (TREE_OPERAND (arg0, 1), false)
11636 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11638 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11639 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11640 tree lshift;
11641 tree arg00;
11643 if (low0 == low1)
11645 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11647 lshift = build_int_cst (type, -1);
11648 lshift = int_const_binop (code, lshift, arg1, 0);
11650 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11654 /* Rewrite an LROTATE_EXPR by a constant into an
11655 RROTATE_EXPR by a new constant. */
11656 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11658 tree tem = build_int_cst (TREE_TYPE (arg1),
11659 TYPE_PRECISION (type));
11660 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11661 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
11664 /* If we have a rotate of a bit operation with the rotate count and
11665 the second operand of the bit operation both constant,
11666 permute the two operations. */
11667 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11668 && (TREE_CODE (arg0) == BIT_AND_EXPR
11669 || TREE_CODE (arg0) == BIT_IOR_EXPR
11670 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11671 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11672 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11673 fold_build2_loc (loc, code, type,
11674 TREE_OPERAND (arg0, 0), arg1),
11675 fold_build2_loc (loc, code, type,
11676 TREE_OPERAND (arg0, 1), arg1));
11678 /* Two consecutive rotates adding up to the precision of the
11679 type can be ignored. */
11680 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11681 && TREE_CODE (arg0) == RROTATE_EXPR
11682 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11683 && TREE_INT_CST_HIGH (arg1) == 0
11684 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11685 && ((TREE_INT_CST_LOW (arg1)
11686 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11687 == (unsigned int) TYPE_PRECISION (type)))
11688 return TREE_OPERAND (arg0, 0);
11690 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11691 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11692 if the latter can be further optimized. */
11693 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11694 && TREE_CODE (arg0) == BIT_AND_EXPR
11695 && TREE_CODE (arg1) == INTEGER_CST
11696 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11698 tree mask = fold_build2_loc (loc, code, type,
11699 fold_convert_loc (loc, type,
11700 TREE_OPERAND (arg0, 1)),
11701 arg1);
11702 tree shift = fold_build2_loc (loc, code, type,
11703 fold_convert_loc (loc, type,
11704 TREE_OPERAND (arg0, 0)),
11705 arg1);
11706 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11707 if (tem)
11708 return tem;
11711 return NULL_TREE;
11713 case MIN_EXPR:
11714 if (operand_equal_p (arg0, arg1, 0))
11715 return omit_one_operand_loc (loc, type, arg0, arg1);
11716 if (INTEGRAL_TYPE_P (type)
11717 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11718 return omit_one_operand_loc (loc, type, arg1, arg0);
11719 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11720 if (tem)
11721 return tem;
11722 goto associate;
11724 case MAX_EXPR:
11725 if (operand_equal_p (arg0, arg1, 0))
11726 return omit_one_operand_loc (loc, type, arg0, arg1);
11727 if (INTEGRAL_TYPE_P (type)
11728 && TYPE_MAX_VALUE (type)
11729 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11730 return omit_one_operand_loc (loc, type, arg1, arg0);
11731 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11732 if (tem)
11733 return tem;
11734 goto associate;
11736 case TRUTH_ANDIF_EXPR:
11737 /* Note that the operands of this must be ints
11738 and their values must be 0 or 1.
11739 ("true" is a fixed value perhaps depending on the language.) */
11740 /* If first arg is constant zero, return it. */
11741 if (integer_zerop (arg0))
11742 return fold_convert_loc (loc, type, arg0);
11743 case TRUTH_AND_EXPR:
11744 /* If either arg is constant true, drop it. */
11745 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11746 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11747 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11748 /* Preserve sequence points. */
11749 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11750 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11751 /* If second arg is constant zero, result is zero, but first arg
11752 must be evaluated. */
11753 if (integer_zerop (arg1))
11754 return omit_one_operand_loc (loc, type, arg1, arg0);
11755 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11756 case will be handled here. */
11757 if (integer_zerop (arg0))
11758 return omit_one_operand_loc (loc, type, arg0, arg1);
11760 /* !X && X is always false. */
11761 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11762 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11763 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11764 /* X && !X is always false. */
11765 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11766 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11767 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11769 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11770 means A >= Y && A != MAX, but in this case we know that
11771 A < X <= MAX. */
11773 if (!TREE_SIDE_EFFECTS (arg0)
11774 && !TREE_SIDE_EFFECTS (arg1))
11776 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11777 if (tem && !operand_equal_p (tem, arg0, 0))
11778 return fold_build2_loc (loc, code, type, tem, arg1);
11780 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11781 if (tem && !operand_equal_p (tem, arg1, 0))
11782 return fold_build2_loc (loc, code, type, arg0, tem);
11785 truth_andor:
11786 /* We only do these simplifications if we are optimizing. */
11787 if (!optimize)
11788 return NULL_TREE;
11790 /* Check for things like (A || B) && (A || C). We can convert this
11791 to A || (B && C). Note that either operator can be any of the four
11792 truth and/or operations and the transformation will still be
11793 valid. Also note that we only care about order for the
11794 ANDIF and ORIF operators. If B contains side effects, this
11795 might change the truth-value of A. */
11796 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11797 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11798 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11799 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11800 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11801 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11803 tree a00 = TREE_OPERAND (arg0, 0);
11804 tree a01 = TREE_OPERAND (arg0, 1);
11805 tree a10 = TREE_OPERAND (arg1, 0);
11806 tree a11 = TREE_OPERAND (arg1, 1);
11807 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11808 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11809 && (code == TRUTH_AND_EXPR
11810 || code == TRUTH_OR_EXPR));
11812 if (operand_equal_p (a00, a10, 0))
11813 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11814 fold_build2_loc (loc, code, type, a01, a11));
11815 else if (commutative && operand_equal_p (a00, a11, 0))
11816 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11817 fold_build2_loc (loc, code, type, a01, a10));
11818 else if (commutative && operand_equal_p (a01, a10, 0))
11819 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
11820 fold_build2_loc (loc, code, type, a00, a11));
11822 /* This case if tricky because we must either have commutative
11823 operators or else A10 must not have side-effects. */
11825 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11826 && operand_equal_p (a01, a11, 0))
11827 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11828 fold_build2_loc (loc, code, type, a00, a10),
11829 a01);
11832 /* See if we can build a range comparison. */
11833 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
11834 return tem;
11836 /* Check for the possibility of merging component references. If our
11837 lhs is another similar operation, try to merge its rhs with our
11838 rhs. Then try to merge our lhs and rhs. */
11839 if (TREE_CODE (arg0) == code
11840 && 0 != (tem = fold_truthop (loc, code, type,
11841 TREE_OPERAND (arg0, 1), arg1)))
11842 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
11844 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
11845 return tem;
11847 return NULL_TREE;
11849 case TRUTH_ORIF_EXPR:
11850 /* Note that the operands of this must be ints
11851 and their values must be 0 or true.
11852 ("true" is a fixed value perhaps depending on the language.) */
11853 /* If first arg is constant true, return it. */
11854 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11855 return fold_convert_loc (loc, type, arg0);
11856 case TRUTH_OR_EXPR:
11857 /* If either arg is constant zero, drop it. */
11858 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11859 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11860 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11861 /* Preserve sequence points. */
11862 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11863 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11864 /* If second arg is constant true, result is true, but we must
11865 evaluate first arg. */
11866 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11867 return omit_one_operand_loc (loc, type, arg1, arg0);
11868 /* Likewise for first arg, but note this only occurs here for
11869 TRUTH_OR_EXPR. */
11870 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11871 return omit_one_operand_loc (loc, type, arg0, arg1);
11873 /* !X || X is always true. */
11874 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11875 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11876 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11877 /* X || !X is always true. */
11878 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11879 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11880 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11882 goto truth_andor;
11884 case TRUTH_XOR_EXPR:
11885 /* If the second arg is constant zero, drop it. */
11886 if (integer_zerop (arg1))
11887 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11888 /* If the second arg is constant true, this is a logical inversion. */
11889 if (integer_onep (arg1))
11891 /* Only call invert_truthvalue if operand is a truth value. */
11892 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11893 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11894 else
11895 tem = invert_truthvalue_loc (loc, arg0);
11896 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
11898 /* Identical arguments cancel to zero. */
11899 if (operand_equal_p (arg0, arg1, 0))
11900 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11902 /* !X ^ X is always true. */
11903 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11904 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11905 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11907 /* X ^ !X is always true. */
11908 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11909 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11910 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11912 return NULL_TREE;
11914 case EQ_EXPR:
11915 case NE_EXPR:
11916 tem = fold_comparison (loc, code, type, op0, op1);
11917 if (tem != NULL_TREE)
11918 return tem;
11920 /* bool_var != 0 becomes bool_var. */
11921 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11922 && code == NE_EXPR)
11923 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11925 /* bool_var == 1 becomes bool_var. */
11926 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11927 && code == EQ_EXPR)
11928 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11930 /* bool_var != 1 becomes !bool_var. */
11931 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11932 && code == NE_EXPR)
11933 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
11934 fold_convert_loc (loc, type, arg0));
11936 /* bool_var == 0 becomes !bool_var. */
11937 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11938 && code == EQ_EXPR)
11939 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
11940 fold_convert_loc (loc, type, arg0));
11942 /* !exp != 0 becomes !exp */
11943 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
11944 && code == NE_EXPR)
11945 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11947 /* If this is an equality comparison of the address of two non-weak,
11948 unaliased symbols neither of which are extern (since we do not
11949 have access to attributes for externs), then we know the result. */
11950 if (TREE_CODE (arg0) == ADDR_EXPR
11951 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11952 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11953 && ! lookup_attribute ("alias",
11954 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11955 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11956 && TREE_CODE (arg1) == ADDR_EXPR
11957 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11958 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11959 && ! lookup_attribute ("alias",
11960 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11961 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11963 /* We know that we're looking at the address of two
11964 non-weak, unaliased, static _DECL nodes.
11966 It is both wasteful and incorrect to call operand_equal_p
11967 to compare the two ADDR_EXPR nodes. It is wasteful in that
11968 all we need to do is test pointer equality for the arguments
11969 to the two ADDR_EXPR nodes. It is incorrect to use
11970 operand_equal_p as that function is NOT equivalent to a
11971 C equality test. It can in fact return false for two
11972 objects which would test as equal using the C equality
11973 operator. */
11974 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11975 return constant_boolean_node (equal
11976 ? code == EQ_EXPR : code != EQ_EXPR,
11977 type);
11980 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11981 a MINUS_EXPR of a constant, we can convert it into a comparison with
11982 a revised constant as long as no overflow occurs. */
11983 if (TREE_CODE (arg1) == INTEGER_CST
11984 && (TREE_CODE (arg0) == PLUS_EXPR
11985 || TREE_CODE (arg0) == MINUS_EXPR)
11986 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11987 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11988 ? MINUS_EXPR : PLUS_EXPR,
11989 fold_convert_loc (loc, TREE_TYPE (arg0),
11990 arg1),
11991 TREE_OPERAND (arg0, 1), 0))
11992 && !TREE_OVERFLOW (tem))
11993 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
11995 /* Similarly for a NEGATE_EXPR. */
11996 if (TREE_CODE (arg0) == NEGATE_EXPR
11997 && TREE_CODE (arg1) == INTEGER_CST
11998 && 0 != (tem = negate_expr (arg1))
11999 && TREE_CODE (tem) == INTEGER_CST
12000 && !TREE_OVERFLOW (tem))
12001 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12003 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12004 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12005 && TREE_CODE (arg1) == INTEGER_CST
12006 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12007 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12008 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12009 fold_convert_loc (loc,
12010 TREE_TYPE (arg0),
12011 arg1),
12012 TREE_OPERAND (arg0, 1)));
12014 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12015 if ((TREE_CODE (arg0) == PLUS_EXPR
12016 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12017 || TREE_CODE (arg0) == MINUS_EXPR)
12018 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12019 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12020 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12022 tree val = TREE_OPERAND (arg0, 1);
12023 return omit_two_operands_loc (loc, type,
12024 fold_build2_loc (loc, code, type,
12025 val,
12026 build_int_cst (TREE_TYPE (val),
12027 0)),
12028 TREE_OPERAND (arg0, 0), arg1);
12031 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12032 if (TREE_CODE (arg0) == MINUS_EXPR
12033 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12034 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12035 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12037 return omit_two_operands_loc (loc, type,
12038 code == NE_EXPR
12039 ? boolean_true_node : boolean_false_node,
12040 TREE_OPERAND (arg0, 1), arg1);
12043 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12044 for !=. Don't do this for ordered comparisons due to overflow. */
12045 if (TREE_CODE (arg0) == MINUS_EXPR
12046 && integer_zerop (arg1))
12047 return fold_build2_loc (loc, code, type,
12048 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12050 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12051 if (TREE_CODE (arg0) == ABS_EXPR
12052 && (integer_zerop (arg1) || real_zerop (arg1)))
12053 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12055 /* If this is an EQ or NE comparison with zero and ARG0 is
12056 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12057 two operations, but the latter can be done in one less insn
12058 on machines that have only two-operand insns or on which a
12059 constant cannot be the first operand. */
12060 if (TREE_CODE (arg0) == BIT_AND_EXPR
12061 && integer_zerop (arg1))
12063 tree arg00 = TREE_OPERAND (arg0, 0);
12064 tree arg01 = TREE_OPERAND (arg0, 1);
12065 if (TREE_CODE (arg00) == LSHIFT_EXPR
12066 && integer_onep (TREE_OPERAND (arg00, 0)))
12068 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12069 arg01, TREE_OPERAND (arg00, 1));
12070 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12071 build_int_cst (TREE_TYPE (arg0), 1));
12072 return fold_build2_loc (loc, code, type,
12073 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12074 arg1);
12076 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12077 && integer_onep (TREE_OPERAND (arg01, 0)))
12079 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12080 arg00, TREE_OPERAND (arg01, 1));
12081 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12082 build_int_cst (TREE_TYPE (arg0), 1));
12083 return fold_build2_loc (loc, code, type,
12084 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12085 arg1);
12089 /* If this is an NE or EQ comparison of zero against the result of a
12090 signed MOD operation whose second operand is a power of 2, make
12091 the MOD operation unsigned since it is simpler and equivalent. */
12092 if (integer_zerop (arg1)
12093 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12094 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12095 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12096 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12097 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12098 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12100 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12101 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12102 fold_convert_loc (loc, newtype,
12103 TREE_OPERAND (arg0, 0)),
12104 fold_convert_loc (loc, newtype,
12105 TREE_OPERAND (arg0, 1)));
12107 return fold_build2_loc (loc, code, type, newmod,
12108 fold_convert_loc (loc, newtype, arg1));
12111 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12112 C1 is a valid shift constant, and C2 is a power of two, i.e.
12113 a single bit. */
12114 if (TREE_CODE (arg0) == BIT_AND_EXPR
12115 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12116 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12117 == INTEGER_CST
12118 && integer_pow2p (TREE_OPERAND (arg0, 1))
12119 && integer_zerop (arg1))
12121 tree itype = TREE_TYPE (arg0);
12122 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12123 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12125 /* Check for a valid shift count. */
12126 if (TREE_INT_CST_HIGH (arg001) == 0
12127 && TREE_INT_CST_LOW (arg001) < prec)
12129 tree arg01 = TREE_OPERAND (arg0, 1);
12130 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12131 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12132 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12133 can be rewritten as (X & (C2 << C1)) != 0. */
12134 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12136 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12137 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12138 return fold_build2_loc (loc, code, type, tem, arg1);
12140 /* Otherwise, for signed (arithmetic) shifts,
12141 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12142 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12143 else if (!TYPE_UNSIGNED (itype))
12144 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12145 arg000, build_int_cst (itype, 0));
12146 /* Otherwise, of unsigned (logical) shifts,
12147 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12148 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12149 else
12150 return omit_one_operand_loc (loc, type,
12151 code == EQ_EXPR ? integer_one_node
12152 : integer_zero_node,
12153 arg000);
12157 /* If this is an NE comparison of zero with an AND of one, remove the
12158 comparison since the AND will give the correct value. */
12159 if (code == NE_EXPR
12160 && integer_zerop (arg1)
12161 && TREE_CODE (arg0) == BIT_AND_EXPR
12162 && integer_onep (TREE_OPERAND (arg0, 1)))
12163 return fold_convert_loc (loc, type, arg0);
12165 /* If we have (A & C) == C where C is a power of 2, convert this into
12166 (A & C) != 0. Similarly for NE_EXPR. */
12167 if (TREE_CODE (arg0) == BIT_AND_EXPR
12168 && integer_pow2p (TREE_OPERAND (arg0, 1))
12169 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12170 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12171 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12172 integer_zero_node));
12174 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12175 bit, then fold the expression into A < 0 or A >= 0. */
12176 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12177 if (tem)
12178 return tem;
12180 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12181 Similarly for NE_EXPR. */
12182 if (TREE_CODE (arg0) == BIT_AND_EXPR
12183 && TREE_CODE (arg1) == INTEGER_CST
12184 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12186 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12187 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12188 TREE_OPERAND (arg0, 1));
12189 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12190 arg1, notc);
12191 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12192 if (integer_nonzerop (dandnotc))
12193 return omit_one_operand_loc (loc, type, rslt, arg0);
12196 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12197 Similarly for NE_EXPR. */
12198 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12199 && TREE_CODE (arg1) == INTEGER_CST
12200 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12202 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12203 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12204 TREE_OPERAND (arg0, 1), notd);
12205 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12206 if (integer_nonzerop (candnotd))
12207 return omit_one_operand_loc (loc, type, rslt, arg0);
12210 /* If this is a comparison of a field, we may be able to simplify it. */
12211 if ((TREE_CODE (arg0) == COMPONENT_REF
12212 || TREE_CODE (arg0) == BIT_FIELD_REF)
12213 /* Handle the constant case even without -O
12214 to make sure the warnings are given. */
12215 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12217 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12218 if (t1)
12219 return t1;
12222 /* Optimize comparisons of strlen vs zero to a compare of the
12223 first character of the string vs zero. To wit,
12224 strlen(ptr) == 0 => *ptr == 0
12225 strlen(ptr) != 0 => *ptr != 0
12226 Other cases should reduce to one of these two (or a constant)
12227 due to the return value of strlen being unsigned. */
12228 if (TREE_CODE (arg0) == CALL_EXPR
12229 && integer_zerop (arg1))
12231 tree fndecl = get_callee_fndecl (arg0);
12233 if (fndecl
12234 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12235 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12236 && call_expr_nargs (arg0) == 1
12237 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12239 tree iref = build_fold_indirect_ref_loc (loc,
12240 CALL_EXPR_ARG (arg0, 0));
12241 return fold_build2_loc (loc, code, type, iref,
12242 build_int_cst (TREE_TYPE (iref), 0));
12246 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12247 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12248 if (TREE_CODE (arg0) == RSHIFT_EXPR
12249 && integer_zerop (arg1)
12250 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12252 tree arg00 = TREE_OPERAND (arg0, 0);
12253 tree arg01 = TREE_OPERAND (arg0, 1);
12254 tree itype = TREE_TYPE (arg00);
12255 if (TREE_INT_CST_HIGH (arg01) == 0
12256 && TREE_INT_CST_LOW (arg01)
12257 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12259 if (TYPE_UNSIGNED (itype))
12261 itype = signed_type_for (itype);
12262 arg00 = fold_convert_loc (loc, itype, arg00);
12264 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12265 type, arg00, build_int_cst (itype, 0));
12269 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12270 if (integer_zerop (arg1)
12271 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12272 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12273 TREE_OPERAND (arg0, 1));
12275 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12276 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12277 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12278 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12279 build_int_cst (TREE_TYPE (arg1), 0));
12280 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12281 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12282 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12283 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12284 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12285 build_int_cst (TREE_TYPE (arg1), 0));
12287 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12288 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12289 && TREE_CODE (arg1) == INTEGER_CST
12290 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12291 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12292 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12293 TREE_OPERAND (arg0, 1), arg1));
12295 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12296 (X & C) == 0 when C is a single bit. */
12297 if (TREE_CODE (arg0) == BIT_AND_EXPR
12298 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12299 && integer_zerop (arg1)
12300 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12302 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12303 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12304 TREE_OPERAND (arg0, 1));
12305 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12306 type, tem, arg1);
12309 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12310 constant C is a power of two, i.e. a single bit. */
12311 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12312 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12313 && integer_zerop (arg1)
12314 && integer_pow2p (TREE_OPERAND (arg0, 1))
12315 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12316 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12318 tree arg00 = TREE_OPERAND (arg0, 0);
12319 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12320 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12323 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12324 when is C is a power of two, i.e. a single bit. */
12325 if (TREE_CODE (arg0) == BIT_AND_EXPR
12326 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12327 && integer_zerop (arg1)
12328 && integer_pow2p (TREE_OPERAND (arg0, 1))
12329 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12330 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12332 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12333 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12334 arg000, TREE_OPERAND (arg0, 1));
12335 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12336 tem, build_int_cst (TREE_TYPE (tem), 0));
12339 if (integer_zerop (arg1)
12340 && tree_expr_nonzero_p (arg0))
12342 tree res = constant_boolean_node (code==NE_EXPR, type);
12343 return omit_one_operand_loc (loc, type, res, arg0);
12346 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12347 if (TREE_CODE (arg0) == NEGATE_EXPR
12348 && TREE_CODE (arg1) == NEGATE_EXPR)
12349 return fold_build2_loc (loc, code, type,
12350 TREE_OPERAND (arg0, 0),
12351 TREE_OPERAND (arg1, 0));
12353 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12354 if (TREE_CODE (arg0) == BIT_AND_EXPR
12355 && TREE_CODE (arg1) == BIT_AND_EXPR)
12357 tree arg00 = TREE_OPERAND (arg0, 0);
12358 tree arg01 = TREE_OPERAND (arg0, 1);
12359 tree arg10 = TREE_OPERAND (arg1, 0);
12360 tree arg11 = TREE_OPERAND (arg1, 1);
12361 tree itype = TREE_TYPE (arg0);
12363 if (operand_equal_p (arg01, arg11, 0))
12364 return fold_build2_loc (loc, code, type,
12365 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12366 fold_build2_loc (loc,
12367 BIT_XOR_EXPR, itype,
12368 arg00, arg10),
12369 arg01),
12370 build_int_cst (itype, 0));
12372 if (operand_equal_p (arg01, arg10, 0))
12373 return fold_build2_loc (loc, code, type,
12374 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12375 fold_build2_loc (loc,
12376 BIT_XOR_EXPR, itype,
12377 arg00, arg11),
12378 arg01),
12379 build_int_cst (itype, 0));
12381 if (operand_equal_p (arg00, arg11, 0))
12382 return fold_build2_loc (loc, code, type,
12383 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12384 fold_build2_loc (loc,
12385 BIT_XOR_EXPR, itype,
12386 arg01, arg10),
12387 arg00),
12388 build_int_cst (itype, 0));
12390 if (operand_equal_p (arg00, arg10, 0))
12391 return fold_build2_loc (loc, code, type,
12392 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12393 fold_build2_loc (loc,
12394 BIT_XOR_EXPR, itype,
12395 arg01, arg11),
12396 arg00),
12397 build_int_cst (itype, 0));
12400 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12401 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12403 tree arg00 = TREE_OPERAND (arg0, 0);
12404 tree arg01 = TREE_OPERAND (arg0, 1);
12405 tree arg10 = TREE_OPERAND (arg1, 0);
12406 tree arg11 = TREE_OPERAND (arg1, 1);
12407 tree itype = TREE_TYPE (arg0);
12409 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12410 operand_equal_p guarantees no side-effects so we don't need
12411 to use omit_one_operand on Z. */
12412 if (operand_equal_p (arg01, arg11, 0))
12413 return fold_build2_loc (loc, code, type, arg00, arg10);
12414 if (operand_equal_p (arg01, arg10, 0))
12415 return fold_build2_loc (loc, code, type, arg00, arg11);
12416 if (operand_equal_p (arg00, arg11, 0))
12417 return fold_build2_loc (loc, code, type, arg01, arg10);
12418 if (operand_equal_p (arg00, arg10, 0))
12419 return fold_build2_loc (loc, code, type, arg01, arg11);
12421 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12422 if (TREE_CODE (arg01) == INTEGER_CST
12423 && TREE_CODE (arg11) == INTEGER_CST)
12424 return fold_build2_loc (loc, code, type,
12425 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
12426 fold_build2_loc (loc,
12427 BIT_XOR_EXPR, itype,
12428 arg01, arg11)),
12429 arg10);
12432 /* Attempt to simplify equality/inequality comparisons of complex
12433 values. Only lower the comparison if the result is known or
12434 can be simplified to a single scalar comparison. */
12435 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12436 || TREE_CODE (arg0) == COMPLEX_CST)
12437 && (TREE_CODE (arg1) == COMPLEX_EXPR
12438 || TREE_CODE (arg1) == COMPLEX_CST))
12440 tree real0, imag0, real1, imag1;
12441 tree rcond, icond;
12443 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12445 real0 = TREE_OPERAND (arg0, 0);
12446 imag0 = TREE_OPERAND (arg0, 1);
12448 else
12450 real0 = TREE_REALPART (arg0);
12451 imag0 = TREE_IMAGPART (arg0);
12454 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12456 real1 = TREE_OPERAND (arg1, 0);
12457 imag1 = TREE_OPERAND (arg1, 1);
12459 else
12461 real1 = TREE_REALPART (arg1);
12462 imag1 = TREE_IMAGPART (arg1);
12465 rcond = fold_binary_loc (loc, code, type, real0, real1);
12466 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12468 if (integer_zerop (rcond))
12470 if (code == EQ_EXPR)
12471 return omit_two_operands_loc (loc, type, boolean_false_node,
12472 imag0, imag1);
12473 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12475 else
12477 if (code == NE_EXPR)
12478 return omit_two_operands_loc (loc, type, boolean_true_node,
12479 imag0, imag1);
12480 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12484 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12485 if (icond && TREE_CODE (icond) == INTEGER_CST)
12487 if (integer_zerop (icond))
12489 if (code == EQ_EXPR)
12490 return omit_two_operands_loc (loc, type, boolean_false_node,
12491 real0, real1);
12492 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12494 else
12496 if (code == NE_EXPR)
12497 return omit_two_operands_loc (loc, type, boolean_true_node,
12498 real0, real1);
12499 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12504 return NULL_TREE;
12506 case LT_EXPR:
12507 case GT_EXPR:
12508 case LE_EXPR:
12509 case GE_EXPR:
12510 tem = fold_comparison (loc, code, type, op0, op1);
12511 if (tem != NULL_TREE)
12512 return tem;
12514 /* Transform comparisons of the form X +- C CMP X. */
12515 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12516 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12517 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12518 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12519 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12520 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12522 tree arg01 = TREE_OPERAND (arg0, 1);
12523 enum tree_code code0 = TREE_CODE (arg0);
12524 int is_positive;
12526 if (TREE_CODE (arg01) == REAL_CST)
12527 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12528 else
12529 is_positive = tree_int_cst_sgn (arg01);
12531 /* (X - c) > X becomes false. */
12532 if (code == GT_EXPR
12533 && ((code0 == MINUS_EXPR && is_positive >= 0)
12534 || (code0 == PLUS_EXPR && is_positive <= 0)))
12536 if (TREE_CODE (arg01) == INTEGER_CST
12537 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12538 fold_overflow_warning (("assuming signed overflow does not "
12539 "occur when assuming that (X - c) > X "
12540 "is always false"),
12541 WARN_STRICT_OVERFLOW_ALL);
12542 return constant_boolean_node (0, type);
12545 /* Likewise (X + c) < X becomes false. */
12546 if (code == LT_EXPR
12547 && ((code0 == PLUS_EXPR && is_positive >= 0)
12548 || (code0 == MINUS_EXPR && is_positive <= 0)))
12550 if (TREE_CODE (arg01) == INTEGER_CST
12551 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12552 fold_overflow_warning (("assuming signed overflow does not "
12553 "occur when assuming that "
12554 "(X + c) < X is always false"),
12555 WARN_STRICT_OVERFLOW_ALL);
12556 return constant_boolean_node (0, type);
12559 /* Convert (X - c) <= X to true. */
12560 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12561 && code == LE_EXPR
12562 && ((code0 == MINUS_EXPR && is_positive >= 0)
12563 || (code0 == PLUS_EXPR && is_positive <= 0)))
12565 if (TREE_CODE (arg01) == INTEGER_CST
12566 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12567 fold_overflow_warning (("assuming signed overflow does not "
12568 "occur when assuming that "
12569 "(X - c) <= X is always true"),
12570 WARN_STRICT_OVERFLOW_ALL);
12571 return constant_boolean_node (1, type);
12574 /* Convert (X + c) >= X to true. */
12575 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12576 && code == GE_EXPR
12577 && ((code0 == PLUS_EXPR && is_positive >= 0)
12578 || (code0 == MINUS_EXPR && is_positive <= 0)))
12580 if (TREE_CODE (arg01) == INTEGER_CST
12581 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12582 fold_overflow_warning (("assuming signed overflow does not "
12583 "occur when assuming that "
12584 "(X + c) >= X is always true"),
12585 WARN_STRICT_OVERFLOW_ALL);
12586 return constant_boolean_node (1, type);
12589 if (TREE_CODE (arg01) == INTEGER_CST)
12591 /* Convert X + c > X and X - c < X to true for integers. */
12592 if (code == GT_EXPR
12593 && ((code0 == PLUS_EXPR && is_positive > 0)
12594 || (code0 == MINUS_EXPR && is_positive < 0)))
12596 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12597 fold_overflow_warning (("assuming signed overflow does "
12598 "not occur when assuming that "
12599 "(X + c) > X is always true"),
12600 WARN_STRICT_OVERFLOW_ALL);
12601 return constant_boolean_node (1, type);
12604 if (code == LT_EXPR
12605 && ((code0 == MINUS_EXPR && is_positive > 0)
12606 || (code0 == PLUS_EXPR && is_positive < 0)))
12608 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12609 fold_overflow_warning (("assuming signed overflow does "
12610 "not occur when assuming that "
12611 "(X - c) < X is always true"),
12612 WARN_STRICT_OVERFLOW_ALL);
12613 return constant_boolean_node (1, type);
12616 /* Convert X + c <= X and X - c >= X to false for integers. */
12617 if (code == LE_EXPR
12618 && ((code0 == PLUS_EXPR && is_positive > 0)
12619 || (code0 == MINUS_EXPR && is_positive < 0)))
12621 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12622 fold_overflow_warning (("assuming signed overflow does "
12623 "not occur when assuming that "
12624 "(X + c) <= X is always false"),
12625 WARN_STRICT_OVERFLOW_ALL);
12626 return constant_boolean_node (0, type);
12629 if (code == GE_EXPR
12630 && ((code0 == MINUS_EXPR && is_positive > 0)
12631 || (code0 == PLUS_EXPR && is_positive < 0)))
12633 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12634 fold_overflow_warning (("assuming signed overflow does "
12635 "not occur when assuming that "
12636 "(X - c) >= X is always false"),
12637 WARN_STRICT_OVERFLOW_ALL);
12638 return constant_boolean_node (0, type);
12643 /* Comparisons with the highest or lowest possible integer of
12644 the specified precision will have known values. */
12646 tree arg1_type = TREE_TYPE (arg1);
12647 unsigned int width = TYPE_PRECISION (arg1_type);
12649 if (TREE_CODE (arg1) == INTEGER_CST
12650 && width <= 2 * HOST_BITS_PER_WIDE_INT
12651 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12653 HOST_WIDE_INT signed_max_hi;
12654 unsigned HOST_WIDE_INT signed_max_lo;
12655 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12657 if (width <= HOST_BITS_PER_WIDE_INT)
12659 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12660 - 1;
12661 signed_max_hi = 0;
12662 max_hi = 0;
12664 if (TYPE_UNSIGNED (arg1_type))
12666 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12667 min_lo = 0;
12668 min_hi = 0;
12670 else
12672 max_lo = signed_max_lo;
12673 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12674 min_hi = -1;
12677 else
12679 width -= HOST_BITS_PER_WIDE_INT;
12680 signed_max_lo = -1;
12681 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12682 - 1;
12683 max_lo = -1;
12684 min_lo = 0;
12686 if (TYPE_UNSIGNED (arg1_type))
12688 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12689 min_hi = 0;
12691 else
12693 max_hi = signed_max_hi;
12694 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12698 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12699 && TREE_INT_CST_LOW (arg1) == max_lo)
12700 switch (code)
12702 case GT_EXPR:
12703 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12705 case GE_EXPR:
12706 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12708 case LE_EXPR:
12709 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12711 case LT_EXPR:
12712 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12714 /* The GE_EXPR and LT_EXPR cases above are not normally
12715 reached because of previous transformations. */
12717 default:
12718 break;
12720 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12721 == max_hi
12722 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12723 switch (code)
12725 case GT_EXPR:
12726 arg1 = const_binop (PLUS_EXPR, arg1,
12727 build_int_cst (TREE_TYPE (arg1), 1), 0);
12728 return fold_build2_loc (loc, EQ_EXPR, type,
12729 fold_convert_loc (loc,
12730 TREE_TYPE (arg1), arg0),
12731 arg1);
12732 case LE_EXPR:
12733 arg1 = const_binop (PLUS_EXPR, arg1,
12734 build_int_cst (TREE_TYPE (arg1), 1), 0);
12735 return fold_build2_loc (loc, NE_EXPR, type,
12736 fold_convert_loc (loc, TREE_TYPE (arg1),
12737 arg0),
12738 arg1);
12739 default:
12740 break;
12742 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12743 == min_hi
12744 && TREE_INT_CST_LOW (arg1) == min_lo)
12745 switch (code)
12747 case LT_EXPR:
12748 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12750 case LE_EXPR:
12751 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12753 case GE_EXPR:
12754 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12756 case GT_EXPR:
12757 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12759 default:
12760 break;
12762 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12763 == min_hi
12764 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12765 switch (code)
12767 case GE_EXPR:
12768 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12769 return fold_build2_loc (loc, NE_EXPR, type,
12770 fold_convert_loc (loc,
12771 TREE_TYPE (arg1), arg0),
12772 arg1);
12773 case LT_EXPR:
12774 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12775 return fold_build2_loc (loc, EQ_EXPR, type,
12776 fold_convert_loc (loc, TREE_TYPE (arg1),
12777 arg0),
12778 arg1);
12779 default:
12780 break;
12783 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12784 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12785 && TYPE_UNSIGNED (arg1_type)
12786 /* We will flip the signedness of the comparison operator
12787 associated with the mode of arg1, so the sign bit is
12788 specified by this mode. Check that arg1 is the signed
12789 max associated with this sign bit. */
12790 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12791 /* signed_type does not work on pointer types. */
12792 && INTEGRAL_TYPE_P (arg1_type))
12794 /* The following case also applies to X < signed_max+1
12795 and X >= signed_max+1 because previous transformations. */
12796 if (code == LE_EXPR || code == GT_EXPR)
12798 tree st;
12799 st = signed_type_for (TREE_TYPE (arg1));
12800 return fold_build2_loc (loc,
12801 code == LE_EXPR ? GE_EXPR : LT_EXPR,
12802 type, fold_convert_loc (loc, st, arg0),
12803 build_int_cst (st, 0));
12809 /* If we are comparing an ABS_EXPR with a constant, we can
12810 convert all the cases into explicit comparisons, but they may
12811 well not be faster than doing the ABS and one comparison.
12812 But ABS (X) <= C is a range comparison, which becomes a subtraction
12813 and a comparison, and is probably faster. */
12814 if (code == LE_EXPR
12815 && TREE_CODE (arg1) == INTEGER_CST
12816 && TREE_CODE (arg0) == ABS_EXPR
12817 && ! TREE_SIDE_EFFECTS (arg0)
12818 && (0 != (tem = negate_expr (arg1)))
12819 && TREE_CODE (tem) == INTEGER_CST
12820 && !TREE_OVERFLOW (tem))
12821 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12822 build2 (GE_EXPR, type,
12823 TREE_OPERAND (arg0, 0), tem),
12824 build2 (LE_EXPR, type,
12825 TREE_OPERAND (arg0, 0), arg1));
12827 /* Convert ABS_EXPR<x> >= 0 to true. */
12828 strict_overflow_p = false;
12829 if (code == GE_EXPR
12830 && (integer_zerop (arg1)
12831 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12832 && real_zerop (arg1)))
12833 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12835 if (strict_overflow_p)
12836 fold_overflow_warning (("assuming signed overflow does not occur "
12837 "when simplifying comparison of "
12838 "absolute value and zero"),
12839 WARN_STRICT_OVERFLOW_CONDITIONAL);
12840 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12843 /* Convert ABS_EXPR<x> < 0 to false. */
12844 strict_overflow_p = false;
12845 if (code == LT_EXPR
12846 && (integer_zerop (arg1) || real_zerop (arg1))
12847 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12849 if (strict_overflow_p)
12850 fold_overflow_warning (("assuming signed overflow does not occur "
12851 "when simplifying comparison of "
12852 "absolute value and zero"),
12853 WARN_STRICT_OVERFLOW_CONDITIONAL);
12854 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12857 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12858 and similarly for >= into !=. */
12859 if ((code == LT_EXPR || code == GE_EXPR)
12860 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12861 && TREE_CODE (arg1) == LSHIFT_EXPR
12862 && integer_onep (TREE_OPERAND (arg1, 0)))
12864 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12865 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12866 TREE_OPERAND (arg1, 1)),
12867 build_int_cst (TREE_TYPE (arg0), 0));
12868 goto fold_binary_exit;
12871 if ((code == LT_EXPR || code == GE_EXPR)
12872 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12873 && CONVERT_EXPR_P (arg1)
12874 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12875 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12877 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12878 fold_convert_loc (loc, TREE_TYPE (arg0),
12879 build2 (RSHIFT_EXPR,
12880 TREE_TYPE (arg0), arg0,
12881 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12882 1))),
12883 build_int_cst (TREE_TYPE (arg0), 0));
12884 goto fold_binary_exit;
12887 return NULL_TREE;
12889 case UNORDERED_EXPR:
12890 case ORDERED_EXPR:
12891 case UNLT_EXPR:
12892 case UNLE_EXPR:
12893 case UNGT_EXPR:
12894 case UNGE_EXPR:
12895 case UNEQ_EXPR:
12896 case LTGT_EXPR:
12897 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12899 t1 = fold_relational_const (code, type, arg0, arg1);
12900 if (t1 != NULL_TREE)
12901 return t1;
12904 /* If the first operand is NaN, the result is constant. */
12905 if (TREE_CODE (arg0) == REAL_CST
12906 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12907 && (code != LTGT_EXPR || ! flag_trapping_math))
12909 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12910 ? integer_zero_node
12911 : integer_one_node;
12912 return omit_one_operand_loc (loc, type, t1, arg1);
12915 /* If the second operand is NaN, the result is constant. */
12916 if (TREE_CODE (arg1) == REAL_CST
12917 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12918 && (code != LTGT_EXPR || ! flag_trapping_math))
12920 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12921 ? integer_zero_node
12922 : integer_one_node;
12923 return omit_one_operand_loc (loc, type, t1, arg0);
12926 /* Simplify unordered comparison of something with itself. */
12927 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12928 && operand_equal_p (arg0, arg1, 0))
12929 return constant_boolean_node (1, type);
12931 if (code == LTGT_EXPR
12932 && !flag_trapping_math
12933 && operand_equal_p (arg0, arg1, 0))
12934 return constant_boolean_node (0, type);
12936 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12938 tree targ0 = strip_float_extensions (arg0);
12939 tree targ1 = strip_float_extensions (arg1);
12940 tree newtype = TREE_TYPE (targ0);
12942 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12943 newtype = TREE_TYPE (targ1);
12945 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12946 return fold_build2_loc (loc, code, type,
12947 fold_convert_loc (loc, newtype, targ0),
12948 fold_convert_loc (loc, newtype, targ1));
12951 return NULL_TREE;
12953 case COMPOUND_EXPR:
12954 /* When pedantic, a compound expression can be neither an lvalue
12955 nor an integer constant expression. */
12956 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12957 return NULL_TREE;
12958 /* Don't let (0, 0) be null pointer constant. */
12959 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12960 : fold_convert_loc (loc, type, arg1);
12961 return pedantic_non_lvalue_loc (loc, tem);
12963 case COMPLEX_EXPR:
12964 if ((TREE_CODE (arg0) == REAL_CST
12965 && TREE_CODE (arg1) == REAL_CST)
12966 || (TREE_CODE (arg0) == INTEGER_CST
12967 && TREE_CODE (arg1) == INTEGER_CST))
12968 return build_complex (type, arg0, arg1);
12969 return NULL_TREE;
12971 case ASSERT_EXPR:
12972 /* An ASSERT_EXPR should never be passed to fold_binary. */
12973 gcc_unreachable ();
12975 default:
12976 return NULL_TREE;
12977 } /* switch (code) */
12978 fold_binary_exit:
12979 protected_set_expr_location (tem, loc);
12980 return tem;
12983 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12984 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
12985 of GOTO_EXPR. */
12987 static tree
12988 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
12990 switch (TREE_CODE (*tp))
12992 case LABEL_EXPR:
12993 return *tp;
12995 case GOTO_EXPR:
12996 *walk_subtrees = 0;
12998 /* ... fall through ... */
13000 default:
13001 return NULL_TREE;
13005 /* Return whether the sub-tree ST contains a label which is accessible from
13006 outside the sub-tree. */
13008 static bool
13009 contains_label_p (tree st)
13011 return
13012 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13015 /* Fold a ternary expression of code CODE and type TYPE with operands
13016 OP0, OP1, and OP2. Return the folded expression if folding is
13017 successful. Otherwise, return NULL_TREE. */
13019 tree
13020 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13021 tree op0, tree op1, tree op2)
13023 tree tem;
13024 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13025 enum tree_code_class kind = TREE_CODE_CLASS (code);
13027 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13028 && TREE_CODE_LENGTH (code) == 3);
13030 /* Strip any conversions that don't change the mode. This is safe
13031 for every expression, except for a comparison expression because
13032 its signedness is derived from its operands. So, in the latter
13033 case, only strip conversions that don't change the signedness.
13035 Note that this is done as an internal manipulation within the
13036 constant folder, in order to find the simplest representation of
13037 the arguments so that their form can be studied. In any cases,
13038 the appropriate type conversions should be put back in the tree
13039 that will get out of the constant folder. */
13040 if (op0)
13042 arg0 = op0;
13043 STRIP_NOPS (arg0);
13046 if (op1)
13048 arg1 = op1;
13049 STRIP_NOPS (arg1);
13052 switch (code)
13054 case COMPONENT_REF:
13055 if (TREE_CODE (arg0) == CONSTRUCTOR
13056 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13058 unsigned HOST_WIDE_INT idx;
13059 tree field, value;
13060 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13061 if (field == arg1)
13062 return value;
13064 return NULL_TREE;
13066 case COND_EXPR:
13067 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13068 so all simple results must be passed through pedantic_non_lvalue. */
13069 if (TREE_CODE (arg0) == INTEGER_CST)
13071 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13072 tem = integer_zerop (arg0) ? op2 : op1;
13073 /* Only optimize constant conditions when the selected branch
13074 has the same type as the COND_EXPR. This avoids optimizing
13075 away "c ? x : throw", where the throw has a void type.
13076 Avoid throwing away that operand which contains label. */
13077 if ((!TREE_SIDE_EFFECTS (unused_op)
13078 || !contains_label_p (unused_op))
13079 && (! VOID_TYPE_P (TREE_TYPE (tem))
13080 || VOID_TYPE_P (type)))
13081 return pedantic_non_lvalue_loc (loc, tem);
13082 return NULL_TREE;
13084 if (operand_equal_p (arg1, op2, 0))
13085 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13087 /* If we have A op B ? A : C, we may be able to convert this to a
13088 simpler expression, depending on the operation and the values
13089 of B and C. Signed zeros prevent all of these transformations,
13090 for reasons given above each one.
13092 Also try swapping the arguments and inverting the conditional. */
13093 if (COMPARISON_CLASS_P (arg0)
13094 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13095 arg1, TREE_OPERAND (arg0, 1))
13096 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13098 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13099 if (tem)
13100 return tem;
13103 if (COMPARISON_CLASS_P (arg0)
13104 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13105 op2,
13106 TREE_OPERAND (arg0, 1))
13107 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13109 tem = fold_truth_not_expr (loc, arg0);
13110 if (tem && COMPARISON_CLASS_P (tem))
13112 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13113 if (tem)
13114 return tem;
13118 /* If the second operand is simpler than the third, swap them
13119 since that produces better jump optimization results. */
13120 if (truth_value_p (TREE_CODE (arg0))
13121 && tree_swap_operands_p (op1, op2, false))
13123 /* See if this can be inverted. If it can't, possibly because
13124 it was a floating-point inequality comparison, don't do
13125 anything. */
13126 tem = fold_truth_not_expr (loc, arg0);
13127 if (tem)
13128 return fold_build3_loc (loc, code, type, tem, op2, op1);
13131 /* Convert A ? 1 : 0 to simply A. */
13132 if (integer_onep (op1)
13133 && integer_zerop (op2)
13134 /* If we try to convert OP0 to our type, the
13135 call to fold will try to move the conversion inside
13136 a COND, which will recurse. In that case, the COND_EXPR
13137 is probably the best choice, so leave it alone. */
13138 && type == TREE_TYPE (arg0))
13139 return pedantic_non_lvalue_loc (loc, arg0);
13141 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13142 over COND_EXPR in cases such as floating point comparisons. */
13143 if (integer_zerop (op1)
13144 && integer_onep (op2)
13145 && truth_value_p (TREE_CODE (arg0)))
13146 return pedantic_non_lvalue_loc (loc,
13147 fold_convert_loc (loc, type,
13148 invert_truthvalue_loc (loc,
13149 arg0)));
13151 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13152 if (TREE_CODE (arg0) == LT_EXPR
13153 && integer_zerop (TREE_OPERAND (arg0, 1))
13154 && integer_zerop (op2)
13155 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13157 /* sign_bit_p only checks ARG1 bits within A's precision.
13158 If <sign bit of A> has wider type than A, bits outside
13159 of A's precision in <sign bit of A> need to be checked.
13160 If they are all 0, this optimization needs to be done
13161 in unsigned A's type, if they are all 1 in signed A's type,
13162 otherwise this can't be done. */
13163 if (TYPE_PRECISION (TREE_TYPE (tem))
13164 < TYPE_PRECISION (TREE_TYPE (arg1))
13165 && TYPE_PRECISION (TREE_TYPE (tem))
13166 < TYPE_PRECISION (type))
13168 unsigned HOST_WIDE_INT mask_lo;
13169 HOST_WIDE_INT mask_hi;
13170 int inner_width, outer_width;
13171 tree tem_type;
13173 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13174 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13175 if (outer_width > TYPE_PRECISION (type))
13176 outer_width = TYPE_PRECISION (type);
13178 if (outer_width > HOST_BITS_PER_WIDE_INT)
13180 mask_hi = ((unsigned HOST_WIDE_INT) -1
13181 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13182 mask_lo = -1;
13184 else
13186 mask_hi = 0;
13187 mask_lo = ((unsigned HOST_WIDE_INT) -1
13188 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13190 if (inner_width > HOST_BITS_PER_WIDE_INT)
13192 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13193 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13194 mask_lo = 0;
13196 else
13197 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13198 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13200 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13201 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13203 tem_type = signed_type_for (TREE_TYPE (tem));
13204 tem = fold_convert_loc (loc, tem_type, tem);
13206 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13207 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13209 tem_type = unsigned_type_for (TREE_TYPE (tem));
13210 tem = fold_convert_loc (loc, tem_type, tem);
13212 else
13213 tem = NULL;
13216 if (tem)
13217 return
13218 fold_convert_loc (loc, type,
13219 fold_build2_loc (loc, BIT_AND_EXPR,
13220 TREE_TYPE (tem), tem,
13221 fold_convert_loc (loc,
13222 TREE_TYPE (tem),
13223 arg1)));
13226 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13227 already handled above. */
13228 if (TREE_CODE (arg0) == BIT_AND_EXPR
13229 && integer_onep (TREE_OPERAND (arg0, 1))
13230 && integer_zerop (op2)
13231 && integer_pow2p (arg1))
13233 tree tem = TREE_OPERAND (arg0, 0);
13234 STRIP_NOPS (tem);
13235 if (TREE_CODE (tem) == RSHIFT_EXPR
13236 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13237 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13238 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13239 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13240 TREE_OPERAND (tem, 0), arg1);
13243 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13244 is probably obsolete because the first operand should be a
13245 truth value (that's why we have the two cases above), but let's
13246 leave it in until we can confirm this for all front-ends. */
13247 if (integer_zerop (op2)
13248 && TREE_CODE (arg0) == NE_EXPR
13249 && integer_zerop (TREE_OPERAND (arg0, 1))
13250 && integer_pow2p (arg1)
13251 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13252 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13253 arg1, OEP_ONLY_CONST))
13254 return pedantic_non_lvalue_loc (loc,
13255 fold_convert_loc (loc, type,
13256 TREE_OPERAND (arg0, 0)));
13258 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13259 if (integer_zerop (op2)
13260 && truth_value_p (TREE_CODE (arg0))
13261 && truth_value_p (TREE_CODE (arg1)))
13262 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13263 fold_convert_loc (loc, type, arg0),
13264 arg1);
13266 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13267 if (integer_onep (op2)
13268 && truth_value_p (TREE_CODE (arg0))
13269 && truth_value_p (TREE_CODE (arg1)))
13271 /* Only perform transformation if ARG0 is easily inverted. */
13272 tem = fold_truth_not_expr (loc, arg0);
13273 if (tem)
13274 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13275 fold_convert_loc (loc, type, tem),
13276 arg1);
13279 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13280 if (integer_zerop (arg1)
13281 && truth_value_p (TREE_CODE (arg0))
13282 && truth_value_p (TREE_CODE (op2)))
13284 /* Only perform transformation if ARG0 is easily inverted. */
13285 tem = fold_truth_not_expr (loc, arg0);
13286 if (tem)
13287 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13288 fold_convert_loc (loc, type, tem),
13289 op2);
13292 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13293 if (integer_onep (arg1)
13294 && truth_value_p (TREE_CODE (arg0))
13295 && truth_value_p (TREE_CODE (op2)))
13296 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13297 fold_convert_loc (loc, type, arg0),
13298 op2);
13300 return NULL_TREE;
13302 case CALL_EXPR:
13303 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13304 of fold_ternary on them. */
13305 gcc_unreachable ();
13307 case BIT_FIELD_REF:
13308 if ((TREE_CODE (arg0) == VECTOR_CST
13309 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13310 && type == TREE_TYPE (TREE_TYPE (arg0)))
13312 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13313 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13315 if (width != 0
13316 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13317 && (idx % width) == 0
13318 && (idx = idx / width)
13319 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13321 tree elements = NULL_TREE;
13323 if (TREE_CODE (arg0) == VECTOR_CST)
13324 elements = TREE_VECTOR_CST_ELTS (arg0);
13325 else
13327 unsigned HOST_WIDE_INT idx;
13328 tree value;
13330 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13331 elements = tree_cons (NULL_TREE, value, elements);
13333 while (idx-- > 0 && elements)
13334 elements = TREE_CHAIN (elements);
13335 if (elements)
13336 return TREE_VALUE (elements);
13337 else
13338 return fold_convert_loc (loc, type, integer_zero_node);
13342 /* A bit-field-ref that referenced the full argument can be stripped. */
13343 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13344 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13345 && integer_zerop (op2))
13346 return fold_convert_loc (loc, type, arg0);
13348 return NULL_TREE;
13350 default:
13351 return NULL_TREE;
13352 } /* switch (code) */
13355 /* Perform constant folding and related simplification of EXPR.
13356 The related simplifications include x*1 => x, x*0 => 0, etc.,
13357 and application of the associative law.
13358 NOP_EXPR conversions may be removed freely (as long as we
13359 are careful not to change the type of the overall expression).
13360 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13361 but we can constant-fold them if they have constant operands. */
13363 #ifdef ENABLE_FOLD_CHECKING
13364 # define fold(x) fold_1 (x)
13365 static tree fold_1 (tree);
13366 static
13367 #endif
13368 tree
13369 fold (tree expr)
13371 const tree t = expr;
13372 enum tree_code code = TREE_CODE (t);
13373 enum tree_code_class kind = TREE_CODE_CLASS (code);
13374 tree tem;
13375 location_t loc = EXPR_LOCATION (expr);
13377 /* Return right away if a constant. */
13378 if (kind == tcc_constant)
13379 return t;
13381 /* CALL_EXPR-like objects with variable numbers of operands are
13382 treated specially. */
13383 if (kind == tcc_vl_exp)
13385 if (code == CALL_EXPR)
13387 tem = fold_call_expr (loc, expr, false);
13388 return tem ? tem : expr;
13390 return expr;
13393 if (IS_EXPR_CODE_CLASS (kind))
13395 tree type = TREE_TYPE (t);
13396 tree op0, op1, op2;
13398 switch (TREE_CODE_LENGTH (code))
13400 case 1:
13401 op0 = TREE_OPERAND (t, 0);
13402 tem = fold_unary_loc (loc, code, type, op0);
13403 return tem ? tem : expr;
13404 case 2:
13405 op0 = TREE_OPERAND (t, 0);
13406 op1 = TREE_OPERAND (t, 1);
13407 tem = fold_binary_loc (loc, code, type, op0, op1);
13408 return tem ? tem : expr;
13409 case 3:
13410 op0 = TREE_OPERAND (t, 0);
13411 op1 = TREE_OPERAND (t, 1);
13412 op2 = TREE_OPERAND (t, 2);
13413 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13414 return tem ? tem : expr;
13415 default:
13416 break;
13420 switch (code)
13422 case ARRAY_REF:
13424 tree op0 = TREE_OPERAND (t, 0);
13425 tree op1 = TREE_OPERAND (t, 1);
13427 if (TREE_CODE (op1) == INTEGER_CST
13428 && TREE_CODE (op0) == CONSTRUCTOR
13429 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13431 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13432 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13433 unsigned HOST_WIDE_INT begin = 0;
13435 /* Find a matching index by means of a binary search. */
13436 while (begin != end)
13438 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13439 tree index = VEC_index (constructor_elt, elts, middle)->index;
13441 if (TREE_CODE (index) == INTEGER_CST
13442 && tree_int_cst_lt (index, op1))
13443 begin = middle + 1;
13444 else if (TREE_CODE (index) == INTEGER_CST
13445 && tree_int_cst_lt (op1, index))
13446 end = middle;
13447 else if (TREE_CODE (index) == RANGE_EXPR
13448 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13449 begin = middle + 1;
13450 else if (TREE_CODE (index) == RANGE_EXPR
13451 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13452 end = middle;
13453 else
13454 return VEC_index (constructor_elt, elts, middle)->value;
13458 return t;
13461 case CONST_DECL:
13462 return fold (DECL_INITIAL (t));
13464 default:
13465 return t;
13466 } /* switch (code) */
13469 #ifdef ENABLE_FOLD_CHECKING
13470 #undef fold
13472 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13473 static void fold_check_failed (const_tree, const_tree);
13474 void print_fold_checksum (const_tree);
13476 /* When --enable-checking=fold, compute a digest of expr before
13477 and after actual fold call to see if fold did not accidentally
13478 change original expr. */
13480 tree
13481 fold (tree expr)
13483 tree ret;
13484 struct md5_ctx ctx;
13485 unsigned char checksum_before[16], checksum_after[16];
13486 htab_t ht;
13488 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13489 md5_init_ctx (&ctx);
13490 fold_checksum_tree (expr, &ctx, ht);
13491 md5_finish_ctx (&ctx, checksum_before);
13492 htab_empty (ht);
13494 ret = fold_1 (expr);
13496 md5_init_ctx (&ctx);
13497 fold_checksum_tree (expr, &ctx, ht);
13498 md5_finish_ctx (&ctx, checksum_after);
13499 htab_delete (ht);
13501 if (memcmp (checksum_before, checksum_after, 16))
13502 fold_check_failed (expr, ret);
13504 return ret;
13507 void
13508 print_fold_checksum (const_tree expr)
13510 struct md5_ctx ctx;
13511 unsigned char checksum[16], cnt;
13512 htab_t ht;
13514 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13515 md5_init_ctx (&ctx);
13516 fold_checksum_tree (expr, &ctx, ht);
13517 md5_finish_ctx (&ctx, checksum);
13518 htab_delete (ht);
13519 for (cnt = 0; cnt < 16; ++cnt)
13520 fprintf (stderr, "%02x", checksum[cnt]);
13521 putc ('\n', stderr);
13524 static void
13525 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13527 internal_error ("fold check: original tree changed by fold");
13530 static void
13531 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13533 const void **slot;
13534 enum tree_code code;
13535 union tree_node buf;
13536 int i, len;
13538 recursive_label:
13540 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13541 <= sizeof (struct tree_function_decl))
13542 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13543 if (expr == NULL)
13544 return;
13545 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13546 if (*slot != NULL)
13547 return;
13548 *slot = expr;
13549 code = TREE_CODE (expr);
13550 if (TREE_CODE_CLASS (code) == tcc_declaration
13551 && DECL_ASSEMBLER_NAME_SET_P (expr))
13553 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13554 memcpy ((char *) &buf, expr, tree_size (expr));
13555 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13556 expr = (tree) &buf;
13558 else if (TREE_CODE_CLASS (code) == tcc_type
13559 && (TYPE_POINTER_TO (expr)
13560 || TYPE_REFERENCE_TO (expr)
13561 || TYPE_CACHED_VALUES_P (expr)
13562 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13563 || TYPE_NEXT_VARIANT (expr)))
13565 /* Allow these fields to be modified. */
13566 tree tmp;
13567 memcpy ((char *) &buf, expr, tree_size (expr));
13568 expr = tmp = (tree) &buf;
13569 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13570 TYPE_POINTER_TO (tmp) = NULL;
13571 TYPE_REFERENCE_TO (tmp) = NULL;
13572 TYPE_NEXT_VARIANT (tmp) = NULL;
13573 if (TYPE_CACHED_VALUES_P (tmp))
13575 TYPE_CACHED_VALUES_P (tmp) = 0;
13576 TYPE_CACHED_VALUES (tmp) = NULL;
13579 md5_process_bytes (expr, tree_size (expr), ctx);
13580 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13581 if (TREE_CODE_CLASS (code) != tcc_type
13582 && TREE_CODE_CLASS (code) != tcc_declaration
13583 && code != TREE_LIST
13584 && code != SSA_NAME)
13585 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13586 switch (TREE_CODE_CLASS (code))
13588 case tcc_constant:
13589 switch (code)
13591 case STRING_CST:
13592 md5_process_bytes (TREE_STRING_POINTER (expr),
13593 TREE_STRING_LENGTH (expr), ctx);
13594 break;
13595 case COMPLEX_CST:
13596 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13597 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13598 break;
13599 case VECTOR_CST:
13600 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13601 break;
13602 default:
13603 break;
13605 break;
13606 case tcc_exceptional:
13607 switch (code)
13609 case TREE_LIST:
13610 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13611 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13612 expr = TREE_CHAIN (expr);
13613 goto recursive_label;
13614 break;
13615 case TREE_VEC:
13616 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13617 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13618 break;
13619 default:
13620 break;
13622 break;
13623 case tcc_expression:
13624 case tcc_reference:
13625 case tcc_comparison:
13626 case tcc_unary:
13627 case tcc_binary:
13628 case tcc_statement:
13629 case tcc_vl_exp:
13630 len = TREE_OPERAND_LENGTH (expr);
13631 for (i = 0; i < len; ++i)
13632 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13633 break;
13634 case tcc_declaration:
13635 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13636 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13637 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13639 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13640 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13641 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13642 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13643 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13645 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13646 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13648 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13650 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13651 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13652 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13654 break;
13655 case tcc_type:
13656 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13657 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13658 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13659 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13660 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13661 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13662 if (INTEGRAL_TYPE_P (expr)
13663 || SCALAR_FLOAT_TYPE_P (expr))
13665 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13666 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13668 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13669 if (TREE_CODE (expr) == RECORD_TYPE
13670 || TREE_CODE (expr) == UNION_TYPE
13671 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13672 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13673 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13674 break;
13675 default:
13676 break;
13680 /* Helper function for outputting the checksum of a tree T. When
13681 debugging with gdb, you can "define mynext" to be "next" followed
13682 by "call debug_fold_checksum (op0)", then just trace down till the
13683 outputs differ. */
13685 void
13686 debug_fold_checksum (const_tree t)
13688 int i;
13689 unsigned char checksum[16];
13690 struct md5_ctx ctx;
13691 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13693 md5_init_ctx (&ctx);
13694 fold_checksum_tree (t, &ctx, ht);
13695 md5_finish_ctx (&ctx, checksum);
13696 htab_empty (ht);
13698 for (i = 0; i < 16; i++)
13699 fprintf (stderr, "%d ", checksum[i]);
13701 fprintf (stderr, "\n");
13704 #endif
13706 /* Fold a unary tree expression with code CODE of type TYPE with an
13707 operand OP0. LOC is the location of the resulting expression.
13708 Return a folded expression if successful. Otherwise, return a tree
13709 expression with code CODE of type TYPE with an operand OP0. */
13711 tree
13712 fold_build1_stat_loc (location_t loc,
13713 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13715 tree tem;
13716 #ifdef ENABLE_FOLD_CHECKING
13717 unsigned char checksum_before[16], checksum_after[16];
13718 struct md5_ctx ctx;
13719 htab_t ht;
13721 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13722 md5_init_ctx (&ctx);
13723 fold_checksum_tree (op0, &ctx, ht);
13724 md5_finish_ctx (&ctx, checksum_before);
13725 htab_empty (ht);
13726 #endif
13728 tem = fold_unary_loc (loc, code, type, op0);
13729 if (!tem)
13731 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13732 SET_EXPR_LOCATION (tem, loc);
13735 #ifdef ENABLE_FOLD_CHECKING
13736 md5_init_ctx (&ctx);
13737 fold_checksum_tree (op0, &ctx, ht);
13738 md5_finish_ctx (&ctx, checksum_after);
13739 htab_delete (ht);
13741 if (memcmp (checksum_before, checksum_after, 16))
13742 fold_check_failed (op0, tem);
13743 #endif
13744 return tem;
13747 /* Fold a binary tree expression with code CODE of type TYPE with
13748 operands OP0 and OP1. LOC is the location of the resulting
13749 expression. Return a folded expression if successful. Otherwise,
13750 return a tree expression with code CODE of type TYPE with operands
13751 OP0 and OP1. */
13753 tree
13754 fold_build2_stat_loc (location_t loc,
13755 enum tree_code code, tree type, tree op0, tree op1
13756 MEM_STAT_DECL)
13758 tree tem;
13759 #ifdef ENABLE_FOLD_CHECKING
13760 unsigned char checksum_before_op0[16],
13761 checksum_before_op1[16],
13762 checksum_after_op0[16],
13763 checksum_after_op1[16];
13764 struct md5_ctx ctx;
13765 htab_t ht;
13767 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13768 md5_init_ctx (&ctx);
13769 fold_checksum_tree (op0, &ctx, ht);
13770 md5_finish_ctx (&ctx, checksum_before_op0);
13771 htab_empty (ht);
13773 md5_init_ctx (&ctx);
13774 fold_checksum_tree (op1, &ctx, ht);
13775 md5_finish_ctx (&ctx, checksum_before_op1);
13776 htab_empty (ht);
13777 #endif
13779 tem = fold_binary_loc (loc, code, type, op0, op1);
13780 if (!tem)
13782 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13783 SET_EXPR_LOCATION (tem, loc);
13786 #ifdef ENABLE_FOLD_CHECKING
13787 md5_init_ctx (&ctx);
13788 fold_checksum_tree (op0, &ctx, ht);
13789 md5_finish_ctx (&ctx, checksum_after_op0);
13790 htab_empty (ht);
13792 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13793 fold_check_failed (op0, tem);
13795 md5_init_ctx (&ctx);
13796 fold_checksum_tree (op1, &ctx, ht);
13797 md5_finish_ctx (&ctx, checksum_after_op1);
13798 htab_delete (ht);
13800 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13801 fold_check_failed (op1, tem);
13802 #endif
13803 return tem;
13806 /* Fold a ternary tree expression with code CODE of type TYPE with
13807 operands OP0, OP1, and OP2. Return a folded expression if
13808 successful. Otherwise, return a tree expression with code CODE of
13809 type TYPE with operands OP0, OP1, and OP2. */
13811 tree
13812 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
13813 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13815 tree tem;
13816 #ifdef ENABLE_FOLD_CHECKING
13817 unsigned char checksum_before_op0[16],
13818 checksum_before_op1[16],
13819 checksum_before_op2[16],
13820 checksum_after_op0[16],
13821 checksum_after_op1[16],
13822 checksum_after_op2[16];
13823 struct md5_ctx ctx;
13824 htab_t ht;
13826 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13827 md5_init_ctx (&ctx);
13828 fold_checksum_tree (op0, &ctx, ht);
13829 md5_finish_ctx (&ctx, checksum_before_op0);
13830 htab_empty (ht);
13832 md5_init_ctx (&ctx);
13833 fold_checksum_tree (op1, &ctx, ht);
13834 md5_finish_ctx (&ctx, checksum_before_op1);
13835 htab_empty (ht);
13837 md5_init_ctx (&ctx);
13838 fold_checksum_tree (op2, &ctx, ht);
13839 md5_finish_ctx (&ctx, checksum_before_op2);
13840 htab_empty (ht);
13841 #endif
13843 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13844 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13845 if (!tem)
13847 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13848 SET_EXPR_LOCATION (tem, loc);
13851 #ifdef ENABLE_FOLD_CHECKING
13852 md5_init_ctx (&ctx);
13853 fold_checksum_tree (op0, &ctx, ht);
13854 md5_finish_ctx (&ctx, checksum_after_op0);
13855 htab_empty (ht);
13857 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13858 fold_check_failed (op0, tem);
13860 md5_init_ctx (&ctx);
13861 fold_checksum_tree (op1, &ctx, ht);
13862 md5_finish_ctx (&ctx, checksum_after_op1);
13863 htab_empty (ht);
13865 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13866 fold_check_failed (op1, tem);
13868 md5_init_ctx (&ctx);
13869 fold_checksum_tree (op2, &ctx, ht);
13870 md5_finish_ctx (&ctx, checksum_after_op2);
13871 htab_delete (ht);
13873 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13874 fold_check_failed (op2, tem);
13875 #endif
13876 return tem;
13879 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13880 arguments in ARGARRAY, and a null static chain.
13881 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13882 of type TYPE from the given operands as constructed by build_call_array. */
13884 tree
13885 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13886 int nargs, tree *argarray)
13888 tree tem;
13889 #ifdef ENABLE_FOLD_CHECKING
13890 unsigned char checksum_before_fn[16],
13891 checksum_before_arglist[16],
13892 checksum_after_fn[16],
13893 checksum_after_arglist[16];
13894 struct md5_ctx ctx;
13895 htab_t ht;
13896 int i;
13898 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13899 md5_init_ctx (&ctx);
13900 fold_checksum_tree (fn, &ctx, ht);
13901 md5_finish_ctx (&ctx, checksum_before_fn);
13902 htab_empty (ht);
13904 md5_init_ctx (&ctx);
13905 for (i = 0; i < nargs; i++)
13906 fold_checksum_tree (argarray[i], &ctx, ht);
13907 md5_finish_ctx (&ctx, checksum_before_arglist);
13908 htab_empty (ht);
13909 #endif
13911 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13913 #ifdef ENABLE_FOLD_CHECKING
13914 md5_init_ctx (&ctx);
13915 fold_checksum_tree (fn, &ctx, ht);
13916 md5_finish_ctx (&ctx, checksum_after_fn);
13917 htab_empty (ht);
13919 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13920 fold_check_failed (fn, tem);
13922 md5_init_ctx (&ctx);
13923 for (i = 0; i < nargs; i++)
13924 fold_checksum_tree (argarray[i], &ctx, ht);
13925 md5_finish_ctx (&ctx, checksum_after_arglist);
13926 htab_delete (ht);
13928 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13929 fold_check_failed (NULL_TREE, tem);
13930 #endif
13931 return tem;
13934 /* Perform constant folding and related simplification of initializer
13935 expression EXPR. These behave identically to "fold_buildN" but ignore
13936 potential run-time traps and exceptions that fold must preserve. */
13938 #define START_FOLD_INIT \
13939 int saved_signaling_nans = flag_signaling_nans;\
13940 int saved_trapping_math = flag_trapping_math;\
13941 int saved_rounding_math = flag_rounding_math;\
13942 int saved_trapv = flag_trapv;\
13943 int saved_folding_initializer = folding_initializer;\
13944 flag_signaling_nans = 0;\
13945 flag_trapping_math = 0;\
13946 flag_rounding_math = 0;\
13947 flag_trapv = 0;\
13948 folding_initializer = 1;
13950 #define END_FOLD_INIT \
13951 flag_signaling_nans = saved_signaling_nans;\
13952 flag_trapping_math = saved_trapping_math;\
13953 flag_rounding_math = saved_rounding_math;\
13954 flag_trapv = saved_trapv;\
13955 folding_initializer = saved_folding_initializer;
13957 tree
13958 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13959 tree type, tree op)
13961 tree result;
13962 START_FOLD_INIT;
13964 result = fold_build1_loc (loc, code, type, op);
13966 END_FOLD_INIT;
13967 return result;
13970 tree
13971 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13972 tree type, tree op0, tree op1)
13974 tree result;
13975 START_FOLD_INIT;
13977 result = fold_build2_loc (loc, code, type, op0, op1);
13979 END_FOLD_INIT;
13980 return result;
13983 tree
13984 fold_build3_initializer_loc (location_t loc, enum tree_code code,
13985 tree type, tree op0, tree op1, tree op2)
13987 tree result;
13988 START_FOLD_INIT;
13990 result = fold_build3_loc (loc, code, type, op0, op1, op2);
13992 END_FOLD_INIT;
13993 return result;
13996 tree
13997 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13998 int nargs, tree *argarray)
14000 tree result;
14001 START_FOLD_INIT;
14003 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14005 END_FOLD_INIT;
14006 return result;
14009 #undef START_FOLD_INIT
14010 #undef END_FOLD_INIT
14012 /* Determine if first argument is a multiple of second argument. Return 0 if
14013 it is not, or we cannot easily determined it to be.
14015 An example of the sort of thing we care about (at this point; this routine
14016 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14017 fold cases do now) is discovering that
14019 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14021 is a multiple of
14023 SAVE_EXPR (J * 8)
14025 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14027 This code also handles discovering that
14029 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14031 is a multiple of 8 so we don't have to worry about dealing with a
14032 possible remainder.
14034 Note that we *look* inside a SAVE_EXPR only to determine how it was
14035 calculated; it is not safe for fold to do much of anything else with the
14036 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14037 at run time. For example, the latter example above *cannot* be implemented
14038 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14039 evaluation time of the original SAVE_EXPR is not necessarily the same at
14040 the time the new expression is evaluated. The only optimization of this
14041 sort that would be valid is changing
14043 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14045 divided by 8 to
14047 SAVE_EXPR (I) * SAVE_EXPR (J)
14049 (where the same SAVE_EXPR (J) is used in the original and the
14050 transformed version). */
14053 multiple_of_p (tree type, const_tree top, const_tree bottom)
14055 if (operand_equal_p (top, bottom, 0))
14056 return 1;
14058 if (TREE_CODE (type) != INTEGER_TYPE)
14059 return 0;
14061 switch (TREE_CODE (top))
14063 case BIT_AND_EXPR:
14064 /* Bitwise and provides a power of two multiple. If the mask is
14065 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14066 if (!integer_pow2p (bottom))
14067 return 0;
14068 /* FALLTHRU */
14070 case MULT_EXPR:
14071 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14072 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14074 case PLUS_EXPR:
14075 case MINUS_EXPR:
14076 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14077 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14079 case LSHIFT_EXPR:
14080 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14082 tree op1, t1;
14084 op1 = TREE_OPERAND (top, 1);
14085 /* const_binop may not detect overflow correctly,
14086 so check for it explicitly here. */
14087 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14088 > TREE_INT_CST_LOW (op1)
14089 && TREE_INT_CST_HIGH (op1) == 0
14090 && 0 != (t1 = fold_convert (type,
14091 const_binop (LSHIFT_EXPR,
14092 size_one_node,
14093 op1, 0)))
14094 && !TREE_OVERFLOW (t1))
14095 return multiple_of_p (type, t1, bottom);
14097 return 0;
14099 case NOP_EXPR:
14100 /* Can't handle conversions from non-integral or wider integral type. */
14101 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14102 || (TYPE_PRECISION (type)
14103 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14104 return 0;
14106 /* .. fall through ... */
14108 case SAVE_EXPR:
14109 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14111 case COND_EXPR:
14112 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14113 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14115 case INTEGER_CST:
14116 if (TREE_CODE (bottom) != INTEGER_CST
14117 || integer_zerop (bottom)
14118 || (TYPE_UNSIGNED (type)
14119 && (tree_int_cst_sgn (top) < 0
14120 || tree_int_cst_sgn (bottom) < 0)))
14121 return 0;
14122 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14123 top, bottom, 0));
14125 default:
14126 return 0;
14130 /* Return true if CODE or TYPE is known to be non-negative. */
14132 static bool
14133 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14135 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14136 && truth_value_p (code))
14137 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14138 have a signed:1 type (where the value is -1 and 0). */
14139 return true;
14140 return false;
14143 /* Return true if (CODE OP0) is known to be non-negative. If the return
14144 value is based on the assumption that signed overflow is undefined,
14145 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14146 *STRICT_OVERFLOW_P. */
14148 bool
14149 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14150 bool *strict_overflow_p)
14152 if (TYPE_UNSIGNED (type))
14153 return true;
14155 switch (code)
14157 case ABS_EXPR:
14158 /* We can't return 1 if flag_wrapv is set because
14159 ABS_EXPR<INT_MIN> = INT_MIN. */
14160 if (!INTEGRAL_TYPE_P (type))
14161 return true;
14162 if (TYPE_OVERFLOW_UNDEFINED (type))
14164 *strict_overflow_p = true;
14165 return true;
14167 break;
14169 case NON_LVALUE_EXPR:
14170 case FLOAT_EXPR:
14171 case FIX_TRUNC_EXPR:
14172 return tree_expr_nonnegative_warnv_p (op0,
14173 strict_overflow_p);
14175 case NOP_EXPR:
14177 tree inner_type = TREE_TYPE (op0);
14178 tree outer_type = type;
14180 if (TREE_CODE (outer_type) == REAL_TYPE)
14182 if (TREE_CODE (inner_type) == REAL_TYPE)
14183 return tree_expr_nonnegative_warnv_p (op0,
14184 strict_overflow_p);
14185 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14187 if (TYPE_UNSIGNED (inner_type))
14188 return true;
14189 return tree_expr_nonnegative_warnv_p (op0,
14190 strict_overflow_p);
14193 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14195 if (TREE_CODE (inner_type) == REAL_TYPE)
14196 return tree_expr_nonnegative_warnv_p (op0,
14197 strict_overflow_p);
14198 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14199 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14200 && TYPE_UNSIGNED (inner_type);
14203 break;
14205 default:
14206 return tree_simple_nonnegative_warnv_p (code, type);
14209 /* We don't know sign of `t', so be conservative and return false. */
14210 return false;
14213 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14214 value is based on the assumption that signed overflow is undefined,
14215 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14216 *STRICT_OVERFLOW_P. */
14218 bool
14219 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14220 tree op1, bool *strict_overflow_p)
14222 if (TYPE_UNSIGNED (type))
14223 return true;
14225 switch (code)
14227 case POINTER_PLUS_EXPR:
14228 case PLUS_EXPR:
14229 if (FLOAT_TYPE_P (type))
14230 return (tree_expr_nonnegative_warnv_p (op0,
14231 strict_overflow_p)
14232 && tree_expr_nonnegative_warnv_p (op1,
14233 strict_overflow_p));
14235 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14236 both unsigned and at least 2 bits shorter than the result. */
14237 if (TREE_CODE (type) == INTEGER_TYPE
14238 && TREE_CODE (op0) == NOP_EXPR
14239 && TREE_CODE (op1) == NOP_EXPR)
14241 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14242 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14243 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14244 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14246 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14247 TYPE_PRECISION (inner2)) + 1;
14248 return prec < TYPE_PRECISION (type);
14251 break;
14253 case MULT_EXPR:
14254 if (FLOAT_TYPE_P (type))
14256 /* x * x for floating point x is always non-negative. */
14257 if (operand_equal_p (op0, op1, 0))
14258 return true;
14259 return (tree_expr_nonnegative_warnv_p (op0,
14260 strict_overflow_p)
14261 && tree_expr_nonnegative_warnv_p (op1,
14262 strict_overflow_p));
14265 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14266 both unsigned and their total bits is shorter than the result. */
14267 if (TREE_CODE (type) == INTEGER_TYPE
14268 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14269 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14271 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14272 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14273 : TREE_TYPE (op0);
14274 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14275 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14276 : TREE_TYPE (op1);
14278 bool unsigned0 = TYPE_UNSIGNED (inner0);
14279 bool unsigned1 = TYPE_UNSIGNED (inner1);
14281 if (TREE_CODE (op0) == INTEGER_CST)
14282 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14284 if (TREE_CODE (op1) == INTEGER_CST)
14285 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14287 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14288 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14290 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14291 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14292 : TYPE_PRECISION (inner0);
14294 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14295 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14296 : TYPE_PRECISION (inner1);
14298 return precision0 + precision1 < TYPE_PRECISION (type);
14301 return false;
14303 case BIT_AND_EXPR:
14304 case MAX_EXPR:
14305 return (tree_expr_nonnegative_warnv_p (op0,
14306 strict_overflow_p)
14307 || tree_expr_nonnegative_warnv_p (op1,
14308 strict_overflow_p));
14310 case BIT_IOR_EXPR:
14311 case BIT_XOR_EXPR:
14312 case MIN_EXPR:
14313 case RDIV_EXPR:
14314 case TRUNC_DIV_EXPR:
14315 case CEIL_DIV_EXPR:
14316 case FLOOR_DIV_EXPR:
14317 case ROUND_DIV_EXPR:
14318 return (tree_expr_nonnegative_warnv_p (op0,
14319 strict_overflow_p)
14320 && tree_expr_nonnegative_warnv_p (op1,
14321 strict_overflow_p));
14323 case TRUNC_MOD_EXPR:
14324 case CEIL_MOD_EXPR:
14325 case FLOOR_MOD_EXPR:
14326 case ROUND_MOD_EXPR:
14327 return tree_expr_nonnegative_warnv_p (op0,
14328 strict_overflow_p);
14329 default:
14330 return tree_simple_nonnegative_warnv_p (code, type);
14333 /* We don't know sign of `t', so be conservative and return false. */
14334 return false;
14337 /* Return true if T is known to be non-negative. If the return
14338 value is based on the assumption that signed overflow is undefined,
14339 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14340 *STRICT_OVERFLOW_P. */
14342 bool
14343 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14345 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14346 return true;
14348 switch (TREE_CODE (t))
14350 case INTEGER_CST:
14351 return tree_int_cst_sgn (t) >= 0;
14353 case REAL_CST:
14354 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14356 case FIXED_CST:
14357 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14359 case COND_EXPR:
14360 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14361 strict_overflow_p)
14362 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14363 strict_overflow_p));
14364 default:
14365 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14366 TREE_TYPE (t));
14368 /* We don't know sign of `t', so be conservative and return false. */
14369 return false;
14372 /* Return true if T is known to be non-negative. If the return
14373 value is based on the assumption that signed overflow is undefined,
14374 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14375 *STRICT_OVERFLOW_P. */
14377 bool
14378 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14379 tree arg0, tree arg1, bool *strict_overflow_p)
14381 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14382 switch (DECL_FUNCTION_CODE (fndecl))
14384 CASE_FLT_FN (BUILT_IN_ACOS):
14385 CASE_FLT_FN (BUILT_IN_ACOSH):
14386 CASE_FLT_FN (BUILT_IN_CABS):
14387 CASE_FLT_FN (BUILT_IN_COSH):
14388 CASE_FLT_FN (BUILT_IN_ERFC):
14389 CASE_FLT_FN (BUILT_IN_EXP):
14390 CASE_FLT_FN (BUILT_IN_EXP10):
14391 CASE_FLT_FN (BUILT_IN_EXP2):
14392 CASE_FLT_FN (BUILT_IN_FABS):
14393 CASE_FLT_FN (BUILT_IN_FDIM):
14394 CASE_FLT_FN (BUILT_IN_HYPOT):
14395 CASE_FLT_FN (BUILT_IN_POW10):
14396 CASE_INT_FN (BUILT_IN_FFS):
14397 CASE_INT_FN (BUILT_IN_PARITY):
14398 CASE_INT_FN (BUILT_IN_POPCOUNT):
14399 case BUILT_IN_BSWAP32:
14400 case BUILT_IN_BSWAP64:
14401 /* Always true. */
14402 return true;
14404 CASE_FLT_FN (BUILT_IN_SQRT):
14405 /* sqrt(-0.0) is -0.0. */
14406 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14407 return true;
14408 return tree_expr_nonnegative_warnv_p (arg0,
14409 strict_overflow_p);
14411 CASE_FLT_FN (BUILT_IN_ASINH):
14412 CASE_FLT_FN (BUILT_IN_ATAN):
14413 CASE_FLT_FN (BUILT_IN_ATANH):
14414 CASE_FLT_FN (BUILT_IN_CBRT):
14415 CASE_FLT_FN (BUILT_IN_CEIL):
14416 CASE_FLT_FN (BUILT_IN_ERF):
14417 CASE_FLT_FN (BUILT_IN_EXPM1):
14418 CASE_FLT_FN (BUILT_IN_FLOOR):
14419 CASE_FLT_FN (BUILT_IN_FMOD):
14420 CASE_FLT_FN (BUILT_IN_FREXP):
14421 CASE_FLT_FN (BUILT_IN_LCEIL):
14422 CASE_FLT_FN (BUILT_IN_LDEXP):
14423 CASE_FLT_FN (BUILT_IN_LFLOOR):
14424 CASE_FLT_FN (BUILT_IN_LLCEIL):
14425 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14426 CASE_FLT_FN (BUILT_IN_LLRINT):
14427 CASE_FLT_FN (BUILT_IN_LLROUND):
14428 CASE_FLT_FN (BUILT_IN_LRINT):
14429 CASE_FLT_FN (BUILT_IN_LROUND):
14430 CASE_FLT_FN (BUILT_IN_MODF):
14431 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14432 CASE_FLT_FN (BUILT_IN_RINT):
14433 CASE_FLT_FN (BUILT_IN_ROUND):
14434 CASE_FLT_FN (BUILT_IN_SCALB):
14435 CASE_FLT_FN (BUILT_IN_SCALBLN):
14436 CASE_FLT_FN (BUILT_IN_SCALBN):
14437 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14438 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14439 CASE_FLT_FN (BUILT_IN_SINH):
14440 CASE_FLT_FN (BUILT_IN_TANH):
14441 CASE_FLT_FN (BUILT_IN_TRUNC):
14442 /* True if the 1st argument is nonnegative. */
14443 return tree_expr_nonnegative_warnv_p (arg0,
14444 strict_overflow_p);
14446 CASE_FLT_FN (BUILT_IN_FMAX):
14447 /* True if the 1st OR 2nd arguments are nonnegative. */
14448 return (tree_expr_nonnegative_warnv_p (arg0,
14449 strict_overflow_p)
14450 || (tree_expr_nonnegative_warnv_p (arg1,
14451 strict_overflow_p)));
14453 CASE_FLT_FN (BUILT_IN_FMIN):
14454 /* True if the 1st AND 2nd arguments are nonnegative. */
14455 return (tree_expr_nonnegative_warnv_p (arg0,
14456 strict_overflow_p)
14457 && (tree_expr_nonnegative_warnv_p (arg1,
14458 strict_overflow_p)));
14460 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14461 /* True if the 2nd argument is nonnegative. */
14462 return tree_expr_nonnegative_warnv_p (arg1,
14463 strict_overflow_p);
14465 CASE_FLT_FN (BUILT_IN_POWI):
14466 /* True if the 1st argument is nonnegative or the second
14467 argument is an even integer. */
14468 if (TREE_CODE (arg1) == INTEGER_CST
14469 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14470 return true;
14471 return tree_expr_nonnegative_warnv_p (arg0,
14472 strict_overflow_p);
14474 CASE_FLT_FN (BUILT_IN_POW):
14475 /* True if the 1st argument is nonnegative or the second
14476 argument is an even integer valued real. */
14477 if (TREE_CODE (arg1) == REAL_CST)
14479 REAL_VALUE_TYPE c;
14480 HOST_WIDE_INT n;
14482 c = TREE_REAL_CST (arg1);
14483 n = real_to_integer (&c);
14484 if ((n & 1) == 0)
14486 REAL_VALUE_TYPE cint;
14487 real_from_integer (&cint, VOIDmode, n,
14488 n < 0 ? -1 : 0, 0);
14489 if (real_identical (&c, &cint))
14490 return true;
14493 return tree_expr_nonnegative_warnv_p (arg0,
14494 strict_overflow_p);
14496 default:
14497 break;
14499 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14500 type);
14503 /* Return true if T is known to be non-negative. If the return
14504 value is based on the assumption that signed overflow is undefined,
14505 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14506 *STRICT_OVERFLOW_P. */
14508 bool
14509 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14511 enum tree_code code = TREE_CODE (t);
14512 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14513 return true;
14515 switch (code)
14517 case TARGET_EXPR:
14519 tree temp = TARGET_EXPR_SLOT (t);
14520 t = TARGET_EXPR_INITIAL (t);
14522 /* If the initializer is non-void, then it's a normal expression
14523 that will be assigned to the slot. */
14524 if (!VOID_TYPE_P (t))
14525 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14527 /* Otherwise, the initializer sets the slot in some way. One common
14528 way is an assignment statement at the end of the initializer. */
14529 while (1)
14531 if (TREE_CODE (t) == BIND_EXPR)
14532 t = expr_last (BIND_EXPR_BODY (t));
14533 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14534 || TREE_CODE (t) == TRY_CATCH_EXPR)
14535 t = expr_last (TREE_OPERAND (t, 0));
14536 else if (TREE_CODE (t) == STATEMENT_LIST)
14537 t = expr_last (t);
14538 else
14539 break;
14541 if (TREE_CODE (t) == MODIFY_EXPR
14542 && TREE_OPERAND (t, 0) == temp)
14543 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14544 strict_overflow_p);
14546 return false;
14549 case CALL_EXPR:
14551 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14552 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14554 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14555 get_callee_fndecl (t),
14556 arg0,
14557 arg1,
14558 strict_overflow_p);
14560 case COMPOUND_EXPR:
14561 case MODIFY_EXPR:
14562 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14563 strict_overflow_p);
14564 case BIND_EXPR:
14565 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14566 strict_overflow_p);
14567 case SAVE_EXPR:
14568 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14569 strict_overflow_p);
14571 default:
14572 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14573 TREE_TYPE (t));
14576 /* We don't know sign of `t', so be conservative and return false. */
14577 return false;
14580 /* Return true if T is known to be non-negative. If the return
14581 value is based on the assumption that signed overflow is undefined,
14582 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14583 *STRICT_OVERFLOW_P. */
14585 bool
14586 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14588 enum tree_code code;
14589 if (t == error_mark_node)
14590 return false;
14592 code = TREE_CODE (t);
14593 switch (TREE_CODE_CLASS (code))
14595 case tcc_binary:
14596 case tcc_comparison:
14597 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14598 TREE_TYPE (t),
14599 TREE_OPERAND (t, 0),
14600 TREE_OPERAND (t, 1),
14601 strict_overflow_p);
14603 case tcc_unary:
14604 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14605 TREE_TYPE (t),
14606 TREE_OPERAND (t, 0),
14607 strict_overflow_p);
14609 case tcc_constant:
14610 case tcc_declaration:
14611 case tcc_reference:
14612 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14614 default:
14615 break;
14618 switch (code)
14620 case TRUTH_AND_EXPR:
14621 case TRUTH_OR_EXPR:
14622 case TRUTH_XOR_EXPR:
14623 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14624 TREE_TYPE (t),
14625 TREE_OPERAND (t, 0),
14626 TREE_OPERAND (t, 1),
14627 strict_overflow_p);
14628 case TRUTH_NOT_EXPR:
14629 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14630 TREE_TYPE (t),
14631 TREE_OPERAND (t, 0),
14632 strict_overflow_p);
14634 case COND_EXPR:
14635 case CONSTRUCTOR:
14636 case OBJ_TYPE_REF:
14637 case ASSERT_EXPR:
14638 case ADDR_EXPR:
14639 case WITH_SIZE_EXPR:
14640 case SSA_NAME:
14641 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14643 default:
14644 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14648 /* Return true if `t' is known to be non-negative. Handle warnings
14649 about undefined signed overflow. */
14651 bool
14652 tree_expr_nonnegative_p (tree t)
14654 bool ret, strict_overflow_p;
14656 strict_overflow_p = false;
14657 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14658 if (strict_overflow_p)
14659 fold_overflow_warning (("assuming signed overflow does not occur when "
14660 "determining that expression is always "
14661 "non-negative"),
14662 WARN_STRICT_OVERFLOW_MISC);
14663 return ret;
14667 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14668 For floating point we further ensure that T is not denormal.
14669 Similar logic is present in nonzero_address in rtlanal.h.
14671 If the return value is based on the assumption that signed overflow
14672 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14673 change *STRICT_OVERFLOW_P. */
14675 bool
14676 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14677 bool *strict_overflow_p)
14679 switch (code)
14681 case ABS_EXPR:
14682 return tree_expr_nonzero_warnv_p (op0,
14683 strict_overflow_p);
14685 case NOP_EXPR:
14687 tree inner_type = TREE_TYPE (op0);
14688 tree outer_type = type;
14690 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14691 && tree_expr_nonzero_warnv_p (op0,
14692 strict_overflow_p));
14694 break;
14696 case NON_LVALUE_EXPR:
14697 return tree_expr_nonzero_warnv_p (op0,
14698 strict_overflow_p);
14700 default:
14701 break;
14704 return false;
14707 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14708 For floating point we further ensure that T is not denormal.
14709 Similar logic is present in nonzero_address in rtlanal.h.
14711 If the return value is based on the assumption that signed overflow
14712 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14713 change *STRICT_OVERFLOW_P. */
14715 bool
14716 tree_binary_nonzero_warnv_p (enum tree_code code,
14717 tree type,
14718 tree op0,
14719 tree op1, bool *strict_overflow_p)
14721 bool sub_strict_overflow_p;
14722 switch (code)
14724 case POINTER_PLUS_EXPR:
14725 case PLUS_EXPR:
14726 if (TYPE_OVERFLOW_UNDEFINED (type))
14728 /* With the presence of negative values it is hard
14729 to say something. */
14730 sub_strict_overflow_p = false;
14731 if (!tree_expr_nonnegative_warnv_p (op0,
14732 &sub_strict_overflow_p)
14733 || !tree_expr_nonnegative_warnv_p (op1,
14734 &sub_strict_overflow_p))
14735 return false;
14736 /* One of operands must be positive and the other non-negative. */
14737 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14738 overflows, on a twos-complement machine the sum of two
14739 nonnegative numbers can never be zero. */
14740 return (tree_expr_nonzero_warnv_p (op0,
14741 strict_overflow_p)
14742 || tree_expr_nonzero_warnv_p (op1,
14743 strict_overflow_p));
14745 break;
14747 case MULT_EXPR:
14748 if (TYPE_OVERFLOW_UNDEFINED (type))
14750 if (tree_expr_nonzero_warnv_p (op0,
14751 strict_overflow_p)
14752 && tree_expr_nonzero_warnv_p (op1,
14753 strict_overflow_p))
14755 *strict_overflow_p = true;
14756 return true;
14759 break;
14761 case MIN_EXPR:
14762 sub_strict_overflow_p = false;
14763 if (tree_expr_nonzero_warnv_p (op0,
14764 &sub_strict_overflow_p)
14765 && tree_expr_nonzero_warnv_p (op1,
14766 &sub_strict_overflow_p))
14768 if (sub_strict_overflow_p)
14769 *strict_overflow_p = true;
14771 break;
14773 case MAX_EXPR:
14774 sub_strict_overflow_p = false;
14775 if (tree_expr_nonzero_warnv_p (op0,
14776 &sub_strict_overflow_p))
14778 if (sub_strict_overflow_p)
14779 *strict_overflow_p = true;
14781 /* When both operands are nonzero, then MAX must be too. */
14782 if (tree_expr_nonzero_warnv_p (op1,
14783 strict_overflow_p))
14784 return true;
14786 /* MAX where operand 0 is positive is positive. */
14787 return tree_expr_nonnegative_warnv_p (op0,
14788 strict_overflow_p);
14790 /* MAX where operand 1 is positive is positive. */
14791 else if (tree_expr_nonzero_warnv_p (op1,
14792 &sub_strict_overflow_p)
14793 && tree_expr_nonnegative_warnv_p (op1,
14794 &sub_strict_overflow_p))
14796 if (sub_strict_overflow_p)
14797 *strict_overflow_p = true;
14798 return true;
14800 break;
14802 case BIT_IOR_EXPR:
14803 return (tree_expr_nonzero_warnv_p (op1,
14804 strict_overflow_p)
14805 || tree_expr_nonzero_warnv_p (op0,
14806 strict_overflow_p));
14808 default:
14809 break;
14812 return false;
14815 /* Return true when T is an address and is known to be nonzero.
14816 For floating point we further ensure that T is not denormal.
14817 Similar logic is present in nonzero_address in rtlanal.h.
14819 If the return value is based on the assumption that signed overflow
14820 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14821 change *STRICT_OVERFLOW_P. */
14823 bool
14824 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14826 bool sub_strict_overflow_p;
14827 switch (TREE_CODE (t))
14829 case INTEGER_CST:
14830 return !integer_zerop (t);
14832 case ADDR_EXPR:
14834 tree base = get_base_address (TREE_OPERAND (t, 0));
14836 if (!base)
14837 return false;
14839 /* Weak declarations may link to NULL. Other things may also be NULL
14840 so protect with -fdelete-null-pointer-checks; but not variables
14841 allocated on the stack. */
14842 if (DECL_P (base)
14843 && (flag_delete_null_pointer_checks
14844 || (TREE_CODE (base) == VAR_DECL && !TREE_STATIC (base))))
14845 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
14847 /* Constants are never weak. */
14848 if (CONSTANT_CLASS_P (base))
14849 return true;
14851 return false;
14854 case COND_EXPR:
14855 sub_strict_overflow_p = false;
14856 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14857 &sub_strict_overflow_p)
14858 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14859 &sub_strict_overflow_p))
14861 if (sub_strict_overflow_p)
14862 *strict_overflow_p = true;
14863 return true;
14865 break;
14867 default:
14868 break;
14870 return false;
14873 /* Return true when T is an address and is known to be nonzero.
14874 For floating point we further ensure that T is not denormal.
14875 Similar logic is present in nonzero_address in rtlanal.h.
14877 If the return value is based on the assumption that signed overflow
14878 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14879 change *STRICT_OVERFLOW_P. */
14881 bool
14882 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14884 tree type = TREE_TYPE (t);
14885 enum tree_code code;
14887 /* Doing something useful for floating point would need more work. */
14888 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14889 return false;
14891 code = TREE_CODE (t);
14892 switch (TREE_CODE_CLASS (code))
14894 case tcc_unary:
14895 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14896 strict_overflow_p);
14897 case tcc_binary:
14898 case tcc_comparison:
14899 return tree_binary_nonzero_warnv_p (code, type,
14900 TREE_OPERAND (t, 0),
14901 TREE_OPERAND (t, 1),
14902 strict_overflow_p);
14903 case tcc_constant:
14904 case tcc_declaration:
14905 case tcc_reference:
14906 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14908 default:
14909 break;
14912 switch (code)
14914 case TRUTH_NOT_EXPR:
14915 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14916 strict_overflow_p);
14918 case TRUTH_AND_EXPR:
14919 case TRUTH_OR_EXPR:
14920 case TRUTH_XOR_EXPR:
14921 return tree_binary_nonzero_warnv_p (code, type,
14922 TREE_OPERAND (t, 0),
14923 TREE_OPERAND (t, 1),
14924 strict_overflow_p);
14926 case COND_EXPR:
14927 case CONSTRUCTOR:
14928 case OBJ_TYPE_REF:
14929 case ASSERT_EXPR:
14930 case ADDR_EXPR:
14931 case WITH_SIZE_EXPR:
14932 case SSA_NAME:
14933 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14935 case COMPOUND_EXPR:
14936 case MODIFY_EXPR:
14937 case BIND_EXPR:
14938 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14939 strict_overflow_p);
14941 case SAVE_EXPR:
14942 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14943 strict_overflow_p);
14945 case CALL_EXPR:
14946 return alloca_call_p (t);
14948 default:
14949 break;
14951 return false;
14954 /* Return true when T is an address and is known to be nonzero.
14955 Handle warnings about undefined signed overflow. */
14957 bool
14958 tree_expr_nonzero_p (tree t)
14960 bool ret, strict_overflow_p;
14962 strict_overflow_p = false;
14963 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
14964 if (strict_overflow_p)
14965 fold_overflow_warning (("assuming signed overflow does not occur when "
14966 "determining that expression is always "
14967 "non-zero"),
14968 WARN_STRICT_OVERFLOW_MISC);
14969 return ret;
14972 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14973 attempt to fold the expression to a constant without modifying TYPE,
14974 OP0 or OP1.
14976 If the expression could be simplified to a constant, then return
14977 the constant. If the expression would not be simplified to a
14978 constant, then return NULL_TREE. */
14980 tree
14981 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14983 tree tem = fold_binary (code, type, op0, op1);
14984 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14987 /* Given the components of a unary expression CODE, TYPE and OP0,
14988 attempt to fold the expression to a constant without modifying
14989 TYPE or OP0.
14991 If the expression could be simplified to a constant, then return
14992 the constant. If the expression would not be simplified to a
14993 constant, then return NULL_TREE. */
14995 tree
14996 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14998 tree tem = fold_unary (code, type, op0);
14999 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15002 /* If EXP represents referencing an element in a constant string
15003 (either via pointer arithmetic or array indexing), return the
15004 tree representing the value accessed, otherwise return NULL. */
15006 tree
15007 fold_read_from_constant_string (tree exp)
15009 if ((TREE_CODE (exp) == INDIRECT_REF
15010 || TREE_CODE (exp) == ARRAY_REF)
15011 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15013 tree exp1 = TREE_OPERAND (exp, 0);
15014 tree index;
15015 tree string;
15016 location_t loc = EXPR_LOCATION (exp);
15018 if (TREE_CODE (exp) == INDIRECT_REF)
15019 string = string_constant (exp1, &index);
15020 else
15022 tree low_bound = array_ref_low_bound (exp);
15023 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15025 /* Optimize the special-case of a zero lower bound.
15027 We convert the low_bound to sizetype to avoid some problems
15028 with constant folding. (E.g. suppose the lower bound is 1,
15029 and its mode is QI. Without the conversion,l (ARRAY
15030 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15031 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15032 if (! integer_zerop (low_bound))
15033 index = size_diffop_loc (loc, index,
15034 fold_convert_loc (loc, sizetype, low_bound));
15036 string = exp1;
15039 if (string
15040 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15041 && TREE_CODE (string) == STRING_CST
15042 && TREE_CODE (index) == INTEGER_CST
15043 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15044 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15045 == MODE_INT)
15046 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15047 return build_int_cst_type (TREE_TYPE (exp),
15048 (TREE_STRING_POINTER (string)
15049 [TREE_INT_CST_LOW (index)]));
15051 return NULL;
15054 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15055 an integer constant, real, or fixed-point constant.
15057 TYPE is the type of the result. */
15059 static tree
15060 fold_negate_const (tree arg0, tree type)
15062 tree t = NULL_TREE;
15064 switch (TREE_CODE (arg0))
15066 case INTEGER_CST:
15068 unsigned HOST_WIDE_INT low;
15069 HOST_WIDE_INT high;
15070 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15071 TREE_INT_CST_HIGH (arg0),
15072 &low, &high);
15073 t = force_fit_type_double (type, low, high, 1,
15074 (overflow | TREE_OVERFLOW (arg0))
15075 && !TYPE_UNSIGNED (type));
15076 break;
15079 case REAL_CST:
15080 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15081 break;
15083 case FIXED_CST:
15085 FIXED_VALUE_TYPE f;
15086 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15087 &(TREE_FIXED_CST (arg0)), NULL,
15088 TYPE_SATURATING (type));
15089 t = build_fixed (type, f);
15090 /* Propagate overflow flags. */
15091 if (overflow_p | TREE_OVERFLOW (arg0))
15092 TREE_OVERFLOW (t) = 1;
15093 break;
15096 default:
15097 gcc_unreachable ();
15100 return t;
15103 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15104 an integer constant or real constant.
15106 TYPE is the type of the result. */
15108 tree
15109 fold_abs_const (tree arg0, tree type)
15111 tree t = NULL_TREE;
15113 switch (TREE_CODE (arg0))
15115 case INTEGER_CST:
15116 /* If the value is unsigned, then the absolute value is
15117 the same as the ordinary value. */
15118 if (TYPE_UNSIGNED (type))
15119 t = arg0;
15120 /* Similarly, if the value is non-negative. */
15121 else if (INT_CST_LT (integer_minus_one_node, arg0))
15122 t = arg0;
15123 /* If the value is negative, then the absolute value is
15124 its negation. */
15125 else
15127 unsigned HOST_WIDE_INT low;
15128 HOST_WIDE_INT high;
15129 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15130 TREE_INT_CST_HIGH (arg0),
15131 &low, &high);
15132 t = force_fit_type_double (type, low, high, -1,
15133 overflow | TREE_OVERFLOW (arg0));
15135 break;
15137 case REAL_CST:
15138 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15139 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15140 else
15141 t = arg0;
15142 break;
15144 default:
15145 gcc_unreachable ();
15148 return t;
15151 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15152 constant. TYPE is the type of the result. */
15154 static tree
15155 fold_not_const (tree arg0, tree type)
15157 tree t = NULL_TREE;
15159 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15161 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15162 ~TREE_INT_CST_HIGH (arg0), 0,
15163 TREE_OVERFLOW (arg0));
15165 return t;
15168 /* Given CODE, a relational operator, the target type, TYPE and two
15169 constant operands OP0 and OP1, return the result of the
15170 relational operation. If the result is not a compile time
15171 constant, then return NULL_TREE. */
15173 static tree
15174 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15176 int result, invert;
15178 /* From here on, the only cases we handle are when the result is
15179 known to be a constant. */
15181 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15183 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15184 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15186 /* Handle the cases where either operand is a NaN. */
15187 if (real_isnan (c0) || real_isnan (c1))
15189 switch (code)
15191 case EQ_EXPR:
15192 case ORDERED_EXPR:
15193 result = 0;
15194 break;
15196 case NE_EXPR:
15197 case UNORDERED_EXPR:
15198 case UNLT_EXPR:
15199 case UNLE_EXPR:
15200 case UNGT_EXPR:
15201 case UNGE_EXPR:
15202 case UNEQ_EXPR:
15203 result = 1;
15204 break;
15206 case LT_EXPR:
15207 case LE_EXPR:
15208 case GT_EXPR:
15209 case GE_EXPR:
15210 case LTGT_EXPR:
15211 if (flag_trapping_math)
15212 return NULL_TREE;
15213 result = 0;
15214 break;
15216 default:
15217 gcc_unreachable ();
15220 return constant_boolean_node (result, type);
15223 return constant_boolean_node (real_compare (code, c0, c1), type);
15226 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15228 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15229 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15230 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15233 /* Handle equality/inequality of complex constants. */
15234 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15236 tree rcond = fold_relational_const (code, type,
15237 TREE_REALPART (op0),
15238 TREE_REALPART (op1));
15239 tree icond = fold_relational_const (code, type,
15240 TREE_IMAGPART (op0),
15241 TREE_IMAGPART (op1));
15242 if (code == EQ_EXPR)
15243 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15244 else if (code == NE_EXPR)
15245 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15246 else
15247 return NULL_TREE;
15250 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15252 To compute GT, swap the arguments and do LT.
15253 To compute GE, do LT and invert the result.
15254 To compute LE, swap the arguments, do LT and invert the result.
15255 To compute NE, do EQ and invert the result.
15257 Therefore, the code below must handle only EQ and LT. */
15259 if (code == LE_EXPR || code == GT_EXPR)
15261 tree tem = op0;
15262 op0 = op1;
15263 op1 = tem;
15264 code = swap_tree_comparison (code);
15267 /* Note that it is safe to invert for real values here because we
15268 have already handled the one case that it matters. */
15270 invert = 0;
15271 if (code == NE_EXPR || code == GE_EXPR)
15273 invert = 1;
15274 code = invert_tree_comparison (code, false);
15277 /* Compute a result for LT or EQ if args permit;
15278 Otherwise return T. */
15279 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15281 if (code == EQ_EXPR)
15282 result = tree_int_cst_equal (op0, op1);
15283 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15284 result = INT_CST_LT_UNSIGNED (op0, op1);
15285 else
15286 result = INT_CST_LT (op0, op1);
15288 else
15289 return NULL_TREE;
15291 if (invert)
15292 result ^= 1;
15293 return constant_boolean_node (result, type);
15296 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15297 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15298 itself. */
15300 tree
15301 fold_build_cleanup_point_expr (tree type, tree expr)
15303 /* If the expression does not have side effects then we don't have to wrap
15304 it with a cleanup point expression. */
15305 if (!TREE_SIDE_EFFECTS (expr))
15306 return expr;
15308 /* If the expression is a return, check to see if the expression inside the
15309 return has no side effects or the right hand side of the modify expression
15310 inside the return. If either don't have side effects set we don't need to
15311 wrap the expression in a cleanup point expression. Note we don't check the
15312 left hand side of the modify because it should always be a return decl. */
15313 if (TREE_CODE (expr) == RETURN_EXPR)
15315 tree op = TREE_OPERAND (expr, 0);
15316 if (!op || !TREE_SIDE_EFFECTS (op))
15317 return expr;
15318 op = TREE_OPERAND (op, 1);
15319 if (!TREE_SIDE_EFFECTS (op))
15320 return expr;
15323 return build1 (CLEANUP_POINT_EXPR, type, expr);
15326 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15327 of an indirection through OP0, or NULL_TREE if no simplification is
15328 possible. */
15330 tree
15331 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15333 tree sub = op0;
15334 tree subtype;
15336 STRIP_NOPS (sub);
15337 subtype = TREE_TYPE (sub);
15338 if (!POINTER_TYPE_P (subtype))
15339 return NULL_TREE;
15341 if (TREE_CODE (sub) == ADDR_EXPR)
15343 tree op = TREE_OPERAND (sub, 0);
15344 tree optype = TREE_TYPE (op);
15345 /* *&CONST_DECL -> to the value of the const decl. */
15346 if (TREE_CODE (op) == CONST_DECL)
15347 return DECL_INITIAL (op);
15348 /* *&p => p; make sure to handle *&"str"[cst] here. */
15349 if (type == optype)
15351 tree fop = fold_read_from_constant_string (op);
15352 if (fop)
15353 return fop;
15354 else
15355 return op;
15357 /* *(foo *)&fooarray => fooarray[0] */
15358 else if (TREE_CODE (optype) == ARRAY_TYPE
15359 && type == TREE_TYPE (optype))
15361 tree type_domain = TYPE_DOMAIN (optype);
15362 tree min_val = size_zero_node;
15363 if (type_domain && TYPE_MIN_VALUE (type_domain))
15364 min_val = TYPE_MIN_VALUE (type_domain);
15365 op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15366 SET_EXPR_LOCATION (op0, loc);
15367 return op0;
15369 /* *(foo *)&complexfoo => __real__ complexfoo */
15370 else if (TREE_CODE (optype) == COMPLEX_TYPE
15371 && type == TREE_TYPE (optype))
15372 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15373 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15374 else if (TREE_CODE (optype) == VECTOR_TYPE
15375 && type == TREE_TYPE (optype))
15377 tree part_width = TYPE_SIZE (type);
15378 tree index = bitsize_int (0);
15379 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15383 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15384 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15385 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15387 tree op00 = TREE_OPERAND (sub, 0);
15388 tree op01 = TREE_OPERAND (sub, 1);
15389 tree op00type;
15391 STRIP_NOPS (op00);
15392 op00type = TREE_TYPE (op00);
15393 if (TREE_CODE (op00) == ADDR_EXPR
15394 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15395 && type == TREE_TYPE (TREE_TYPE (op00type)))
15397 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15398 tree part_width = TYPE_SIZE (type);
15399 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15400 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15401 tree index = bitsize_int (indexi);
15403 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15404 return fold_build3_loc (loc,
15405 BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15406 part_width, index);
15412 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15413 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15414 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15416 tree op00 = TREE_OPERAND (sub, 0);
15417 tree op01 = TREE_OPERAND (sub, 1);
15418 tree op00type;
15420 STRIP_NOPS (op00);
15421 op00type = TREE_TYPE (op00);
15422 if (TREE_CODE (op00) == ADDR_EXPR
15423 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15424 && type == TREE_TYPE (TREE_TYPE (op00type)))
15426 tree size = TYPE_SIZE_UNIT (type);
15427 if (tree_int_cst_equal (size, op01))
15428 return fold_build1_loc (loc, IMAGPART_EXPR, type,
15429 TREE_OPERAND (op00, 0));
15433 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15434 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15435 && type == TREE_TYPE (TREE_TYPE (subtype)))
15437 tree type_domain;
15438 tree min_val = size_zero_node;
15439 sub = build_fold_indirect_ref_loc (loc, sub);
15440 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15441 if (type_domain && TYPE_MIN_VALUE (type_domain))
15442 min_val = TYPE_MIN_VALUE (type_domain);
15443 op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15444 SET_EXPR_LOCATION (op0, loc);
15445 return op0;
15448 return NULL_TREE;
15451 /* Builds an expression for an indirection through T, simplifying some
15452 cases. */
15454 tree
15455 build_fold_indirect_ref_loc (location_t loc, tree t)
15457 tree type = TREE_TYPE (TREE_TYPE (t));
15458 tree sub = fold_indirect_ref_1 (loc, type, t);
15460 if (sub)
15461 return sub;
15463 t = build1 (INDIRECT_REF, type, t);
15464 SET_EXPR_LOCATION (t, loc);
15465 return t;
15468 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15470 tree
15471 fold_indirect_ref_loc (location_t loc, tree t)
15473 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15475 if (sub)
15476 return sub;
15477 else
15478 return t;
15481 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15482 whose result is ignored. The type of the returned tree need not be
15483 the same as the original expression. */
15485 tree
15486 fold_ignored_result (tree t)
15488 if (!TREE_SIDE_EFFECTS (t))
15489 return integer_zero_node;
15491 for (;;)
15492 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15494 case tcc_unary:
15495 t = TREE_OPERAND (t, 0);
15496 break;
15498 case tcc_binary:
15499 case tcc_comparison:
15500 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15501 t = TREE_OPERAND (t, 0);
15502 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15503 t = TREE_OPERAND (t, 1);
15504 else
15505 return t;
15506 break;
15508 case tcc_expression:
15509 switch (TREE_CODE (t))
15511 case COMPOUND_EXPR:
15512 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15513 return t;
15514 t = TREE_OPERAND (t, 0);
15515 break;
15517 case COND_EXPR:
15518 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15519 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15520 return t;
15521 t = TREE_OPERAND (t, 0);
15522 break;
15524 default:
15525 return t;
15527 break;
15529 default:
15530 return t;
15534 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15535 This can only be applied to objects of a sizetype. */
15537 tree
15538 round_up_loc (location_t loc, tree value, int divisor)
15540 tree div = NULL_TREE;
15542 gcc_assert (divisor > 0);
15543 if (divisor == 1)
15544 return value;
15546 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15547 have to do anything. Only do this when we are not given a const,
15548 because in that case, this check is more expensive than just
15549 doing it. */
15550 if (TREE_CODE (value) != INTEGER_CST)
15552 div = build_int_cst (TREE_TYPE (value), divisor);
15554 if (multiple_of_p (TREE_TYPE (value), value, div))
15555 return value;
15558 /* If divisor is a power of two, simplify this to bit manipulation. */
15559 if (divisor == (divisor & -divisor))
15561 if (TREE_CODE (value) == INTEGER_CST)
15563 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15564 unsigned HOST_WIDE_INT high;
15565 bool overflow_p;
15567 if ((low & (divisor - 1)) == 0)
15568 return value;
15570 overflow_p = TREE_OVERFLOW (value);
15571 high = TREE_INT_CST_HIGH (value);
15572 low &= ~(divisor - 1);
15573 low += divisor;
15574 if (low == 0)
15576 high++;
15577 if (high == 0)
15578 overflow_p = true;
15581 return force_fit_type_double (TREE_TYPE (value), low, high,
15582 -1, overflow_p);
15584 else
15586 tree t;
15588 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15589 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15590 t = build_int_cst (TREE_TYPE (value), -divisor);
15591 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15594 else
15596 if (!div)
15597 div = build_int_cst (TREE_TYPE (value), divisor);
15598 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15599 value = size_binop_loc (loc, MULT_EXPR, value, div);
15602 return value;
15605 /* Likewise, but round down. */
15607 tree
15608 round_down_loc (location_t loc, tree value, int divisor)
15610 tree div = NULL_TREE;
15612 gcc_assert (divisor > 0);
15613 if (divisor == 1)
15614 return value;
15616 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15617 have to do anything. Only do this when we are not given a const,
15618 because in that case, this check is more expensive than just
15619 doing it. */
15620 if (TREE_CODE (value) != INTEGER_CST)
15622 div = build_int_cst (TREE_TYPE (value), divisor);
15624 if (multiple_of_p (TREE_TYPE (value), value, div))
15625 return value;
15628 /* If divisor is a power of two, simplify this to bit manipulation. */
15629 if (divisor == (divisor & -divisor))
15631 tree t;
15633 t = build_int_cst (TREE_TYPE (value), -divisor);
15634 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15636 else
15638 if (!div)
15639 div = build_int_cst (TREE_TYPE (value), divisor);
15640 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15641 value = size_binop_loc (loc, MULT_EXPR, value, div);
15644 return value;
15647 /* Returns the pointer to the base of the object addressed by EXP and
15648 extracts the information about the offset of the access, storing it
15649 to PBITPOS and POFFSET. */
15651 static tree
15652 split_address_to_core_and_offset (tree exp,
15653 HOST_WIDE_INT *pbitpos, tree *poffset)
15655 tree core;
15656 enum machine_mode mode;
15657 int unsignedp, volatilep;
15658 HOST_WIDE_INT bitsize;
15659 location_t loc = EXPR_LOCATION (exp);
15661 if (TREE_CODE (exp) == ADDR_EXPR)
15663 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15664 poffset, &mode, &unsignedp, &volatilep,
15665 false);
15666 core = build_fold_addr_expr_loc (loc, core);
15668 else
15670 core = exp;
15671 *pbitpos = 0;
15672 *poffset = NULL_TREE;
15675 return core;
15678 /* Returns true if addresses of E1 and E2 differ by a constant, false
15679 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15681 bool
15682 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15684 tree core1, core2;
15685 HOST_WIDE_INT bitpos1, bitpos2;
15686 tree toffset1, toffset2, tdiff, type;
15688 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15689 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15691 if (bitpos1 % BITS_PER_UNIT != 0
15692 || bitpos2 % BITS_PER_UNIT != 0
15693 || !operand_equal_p (core1, core2, 0))
15694 return false;
15696 if (toffset1 && toffset2)
15698 type = TREE_TYPE (toffset1);
15699 if (type != TREE_TYPE (toffset2))
15700 toffset2 = fold_convert (type, toffset2);
15702 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15703 if (!cst_and_fits_in_hwi (tdiff))
15704 return false;
15706 *diff = int_cst_value (tdiff);
15708 else if (toffset1 || toffset2)
15710 /* If only one of the offsets is non-constant, the difference cannot
15711 be a constant. */
15712 return false;
15714 else
15715 *diff = 0;
15717 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15718 return true;
15721 /* Simplify the floating point expression EXP when the sign of the
15722 result is not significant. Return NULL_TREE if no simplification
15723 is possible. */
15725 tree
15726 fold_strip_sign_ops (tree exp)
15728 tree arg0, arg1;
15729 location_t loc = EXPR_LOCATION (exp);
15731 switch (TREE_CODE (exp))
15733 case ABS_EXPR:
15734 case NEGATE_EXPR:
15735 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15736 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15738 case MULT_EXPR:
15739 case RDIV_EXPR:
15740 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15741 return NULL_TREE;
15742 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15743 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15744 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15745 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
15746 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15747 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15748 break;
15750 case COMPOUND_EXPR:
15751 arg0 = TREE_OPERAND (exp, 0);
15752 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15753 if (arg1)
15754 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15755 break;
15757 case COND_EXPR:
15758 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15759 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15760 if (arg0 || arg1)
15761 return fold_build3_loc (loc,
15762 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15763 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15764 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15765 break;
15767 case CALL_EXPR:
15769 const enum built_in_function fcode = builtin_mathfn_code (exp);
15770 switch (fcode)
15772 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15773 /* Strip copysign function call, return the 1st argument. */
15774 arg0 = CALL_EXPR_ARG (exp, 0);
15775 arg1 = CALL_EXPR_ARG (exp, 1);
15776 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
15778 default:
15779 /* Strip sign ops from the argument of "odd" math functions. */
15780 if (negate_mathfn_p (fcode))
15782 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15783 if (arg0)
15784 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
15786 break;
15789 break;
15791 default:
15792 break;
15794 return NULL_TREE;