./:
[official-gcc.git] / gcc / fold-const.c
blob7d66736b13b6b89e8160c60cb25c2d3ab56c94cf
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type_double.
34 fold takes a tree as argument and returns a simplified tree.
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
43 force_fit_type_double takes a constant, an overflowable flag and a
44 prior overflow indicator. It forces the value to fit the type and
45 sets TREE_OVERFLOW.
47 Note: Since the folders get called on non-gimple code as well as
48 gimple code, we need to handle GIMPLE tuples as well as their
49 corresponding tree equivalents. */
51 #include "config.h"
52 #include "system.h"
53 #include "coretypes.h"
54 #include "tm.h"
55 #include "flags.h"
56 #include "tree.h"
57 #include "real.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "toplev.h"
62 #include "intl.h"
63 #include "ggc.h"
64 #include "hashtab.h"
65 #include "langhooks.h"
66 #include "md5.h"
68 /* Non-zero if we are folding constants inside an initializer; zero
69 otherwise. */
70 int folding_initializer = 0;
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code {
76 COMPCODE_FALSE = 0,
77 COMPCODE_LT = 1,
78 COMPCODE_EQ = 2,
79 COMPCODE_LE = 3,
80 COMPCODE_GT = 4,
81 COMPCODE_LTGT = 5,
82 COMPCODE_GE = 6,
83 COMPCODE_ORD = 7,
84 COMPCODE_UNORD = 8,
85 COMPCODE_UNLT = 9,
86 COMPCODE_UNEQ = 10,
87 COMPCODE_UNLE = 11,
88 COMPCODE_UNGT = 12,
89 COMPCODE_NE = 13,
90 COMPCODE_UNGE = 14,
91 COMPCODE_TRUE = 15
94 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
95 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
96 static bool negate_mathfn_p (enum built_in_function);
97 static bool negate_expr_p (tree);
98 static tree negate_expr (tree);
99 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
100 static tree associate_trees (tree, tree, enum tree_code, tree);
101 static tree const_binop (enum tree_code, tree, tree, int);
102 static enum comparison_code comparison_to_compcode (enum tree_code);
103 static enum tree_code compcode_to_comparison (enum comparison_code);
104 static tree combine_comparisons (enum tree_code, enum tree_code,
105 enum tree_code, tree, tree, tree);
106 static int truth_value_p (enum tree_code);
107 static int operand_equal_for_comparison_p (tree, tree, tree);
108 static int twoval_comparison_p (tree, tree *, tree *, int *);
109 static tree eval_subst (tree, tree, tree, tree, tree);
110 static tree pedantic_omit_one_operand (tree, tree, tree);
111 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
112 static tree make_bit_field_ref (tree, tree, int, int, int);
113 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
114 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
116 tree *, tree *);
117 static int all_ones_mask_p (tree, int);
118 static tree sign_bit_p (tree, tree);
119 static int simple_operand_p (tree);
120 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
121 static tree range_predecessor (tree);
122 static tree range_successor (tree);
123 static tree make_range (tree, int *, tree *, tree *, bool *);
124 static tree build_range_check (tree, tree, int, tree, tree);
125 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
126 tree);
127 static tree fold_range_test (enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree fold_truthop (enum tree_code, tree, tree, tree);
131 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static int multiple_of_p (tree, tree, tree);
135 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
136 tree, tree,
137 tree, tree, int);
138 static bool fold_real_zero_addition_p (tree, tree, int);
139 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
140 tree, tree, tree);
141 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
142 static tree fold_div_compare (enum tree_code, tree, tree, tree);
143 static bool reorder_operands_p (tree, tree);
144 static tree fold_negate_const (tree, tree);
145 static tree fold_not_const (tree, tree);
146 static tree fold_relational_const (enum tree_code, tree, tree, tree);
147 static int native_encode_expr (tree, unsigned char *, int);
148 static tree native_interpret_expr (tree, unsigned char *, int);
151 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
152 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
153 and SUM1. Then this yields nonzero if overflow occurred during the
154 addition.
156 Overflow occurs if A and B have the same sign, but A and SUM differ in
157 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
158 sign. */
159 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
161 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
162 We do that by representing the two-word integer in 4 words, with only
163 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
164 number. The value of the word is LOWPART + HIGHPART * BASE. */
166 #define LOWPART(x) \
167 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
168 #define HIGHPART(x) \
169 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
170 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
172 /* Unpack a two-word integer into 4 words.
173 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
174 WORDS points to the array of HOST_WIDE_INTs. */
176 static void
177 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
179 words[0] = LOWPART (low);
180 words[1] = HIGHPART (low);
181 words[2] = LOWPART (hi);
182 words[3] = HIGHPART (hi);
185 /* Pack an array of 4 words into a two-word integer.
186 WORDS points to the array of words.
187 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
189 static void
190 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
191 HOST_WIDE_INT *hi)
193 *low = words[0] + words[1] * BASE;
194 *hi = words[2] + words[3] * BASE;
197 /* Force the double-word integer L1, H1 to be within the range of the
198 integer type TYPE. Stores the properly truncated and sign-extended
199 double-word integer in *LV, *HV. Returns true if the operation
200 overflows, that is, argument and result are different. */
203 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
204 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
206 unsigned HOST_WIDE_INT low0 = l1;
207 HOST_WIDE_INT high0 = h1;
208 unsigned int prec;
209 int sign_extended_type;
211 if (POINTER_TYPE_P (type)
212 || TREE_CODE (type) == OFFSET_TYPE)
213 prec = POINTER_SIZE;
214 else
215 prec = TYPE_PRECISION (type);
217 /* Size types *are* sign extended. */
218 sign_extended_type = (!TYPE_UNSIGNED (type)
219 || (TREE_CODE (type) == INTEGER_TYPE
220 && TYPE_IS_SIZETYPE (type)));
222 /* First clear all bits that are beyond the type's precision. */
223 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
227 else
229 h1 = 0;
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
234 /* Then do sign extension if necessary. */
235 if (!sign_extended_type)
236 /* No sign extension */;
237 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
238 /* Correct width already. */;
239 else if (prec > HOST_BITS_PER_WIDE_INT)
241 /* Sign extend top half? */
242 if (h1 & ((unsigned HOST_WIDE_INT)1
243 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
244 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
246 else if (prec == HOST_BITS_PER_WIDE_INT)
248 if ((HOST_WIDE_INT)l1 < 0)
249 h1 = -1;
251 else
253 /* Sign extend bottom half? */
254 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
256 h1 = -1;
257 l1 |= (HOST_WIDE_INT)(-1) << prec;
261 *lv = l1;
262 *hv = h1;
264 /* If the value didn't fit, signal overflow. */
265 return l1 != low0 || h1 != high0;
268 /* We force the double-int HIGH:LOW to the range of the type TYPE by
269 sign or zero extending it.
270 OVERFLOWABLE indicates if we are interested
271 in overflow of the value, when >0 we are only interested in signed
272 overflow, for <0 we are interested in any overflow. OVERFLOWED
273 indicates whether overflow has already occurred. CONST_OVERFLOWED
274 indicates whether constant overflow has already occurred. We force
275 T's value to be within range of T's type (by setting to 0 or 1 all
276 the bits outside the type's range). We set TREE_OVERFLOWED if,
277 OVERFLOWED is nonzero,
278 or OVERFLOWABLE is >0 and signed overflow occurs
279 or OVERFLOWABLE is <0 and any overflow occurs
280 We return a new tree node for the extended double-int. The node
281 is shared if no overflow flags are set. */
283 tree
284 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
285 HOST_WIDE_INT high, int overflowable,
286 bool overflowed)
288 int sign_extended_type;
289 bool overflow;
291 /* Size types *are* sign extended. */
292 sign_extended_type = (!TYPE_UNSIGNED (type)
293 || (TREE_CODE (type) == INTEGER_TYPE
294 && TYPE_IS_SIZETYPE (type)));
296 overflow = fit_double_type (low, high, &low, &high, type);
298 /* If we need to set overflow flags, return a new unshared node. */
299 if (overflowed || overflow)
301 if (overflowed
302 || overflowable < 0
303 || (overflowable > 0 && sign_extended_type))
305 tree t = make_node (INTEGER_CST);
306 TREE_INT_CST_LOW (t) = low;
307 TREE_INT_CST_HIGH (t) = high;
308 TREE_TYPE (t) = type;
309 TREE_OVERFLOW (t) = 1;
310 return t;
314 /* Else build a shared node. */
315 return build_int_cst_wide (type, low, high);
318 /* Add two doubleword integers with doubleword result.
319 Return nonzero if the operation overflows according to UNSIGNED_P.
320 Each argument is given as two `HOST_WIDE_INT' pieces.
321 One argument is L1 and H1; the other, L2 and H2.
322 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
325 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
326 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
327 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
328 bool unsigned_p)
330 unsigned HOST_WIDE_INT l;
331 HOST_WIDE_INT h;
333 l = l1 + l2;
334 h = h1 + h2 + (l < l1);
336 *lv = l;
337 *hv = h;
339 if (unsigned_p)
340 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
341 else
342 return OVERFLOW_SUM_SIGN (h1, h2, h);
345 /* Negate a doubleword integer with doubleword result.
346 Return nonzero if the operation overflows, assuming it's signed.
347 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
348 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
351 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
352 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
354 if (l1 == 0)
356 *lv = 0;
357 *hv = - h1;
358 return (*hv & h1) < 0;
360 else
362 *lv = -l1;
363 *hv = ~h1;
364 return 0;
368 /* Multiply two doubleword integers with doubleword result.
369 Return nonzero if the operation overflows according to UNSIGNED_P.
370 Each argument is given as two `HOST_WIDE_INT' pieces.
371 One argument is L1 and H1; the other, L2 and H2.
372 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
375 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
376 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
377 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
378 bool unsigned_p)
380 HOST_WIDE_INT arg1[4];
381 HOST_WIDE_INT arg2[4];
382 HOST_WIDE_INT prod[4 * 2];
383 unsigned HOST_WIDE_INT carry;
384 int i, j, k;
385 unsigned HOST_WIDE_INT toplow, neglow;
386 HOST_WIDE_INT tophigh, neghigh;
388 encode (arg1, l1, h1);
389 encode (arg2, l2, h2);
391 memset (prod, 0, sizeof prod);
393 for (i = 0; i < 4; i++)
395 carry = 0;
396 for (j = 0; j < 4; j++)
398 k = i + j;
399 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
400 carry += arg1[i] * arg2[j];
401 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
402 carry += prod[k];
403 prod[k] = LOWPART (carry);
404 carry = HIGHPART (carry);
406 prod[i + 4] = carry;
409 decode (prod, lv, hv);
410 decode (prod + 4, &toplow, &tophigh);
412 /* Unsigned overflow is immediate. */
413 if (unsigned_p)
414 return (toplow | tophigh) != 0;
416 /* Check for signed overflow by calculating the signed representation of the
417 top half of the result; it should agree with the low half's sign bit. */
418 if (h1 < 0)
420 neg_double (l2, h2, &neglow, &neghigh);
421 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
423 if (h2 < 0)
425 neg_double (l1, h1, &neglow, &neghigh);
426 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
428 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
431 /* Shift the doubleword integer in L1, H1 left by COUNT places
432 keeping only PREC bits of result.
433 Shift right if COUNT is negative.
434 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
435 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
437 void
438 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
439 HOST_WIDE_INT count, unsigned int prec,
440 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
442 unsigned HOST_WIDE_INT signmask;
444 if (count < 0)
446 rshift_double (l1, h1, -count, prec, lv, hv, arith);
447 return;
450 if (SHIFT_COUNT_TRUNCATED)
451 count %= prec;
453 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
455 /* Shifting by the host word size is undefined according to the
456 ANSI standard, so we must handle this as a special case. */
457 *hv = 0;
458 *lv = 0;
460 else if (count >= HOST_BITS_PER_WIDE_INT)
462 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
463 *lv = 0;
465 else
467 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
468 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
469 *lv = l1 << count;
472 /* Sign extend all bits that are beyond the precision. */
474 signmask = -((prec > HOST_BITS_PER_WIDE_INT
475 ? ((unsigned HOST_WIDE_INT) *hv
476 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
477 : (*lv >> (prec - 1))) & 1);
479 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
481 else if (prec >= HOST_BITS_PER_WIDE_INT)
483 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
484 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
486 else
488 *hv = signmask;
489 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
490 *lv |= signmask << prec;
494 /* Shift the doubleword integer in L1, H1 right by COUNT places
495 keeping only PREC bits of result. COUNT must be positive.
496 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
497 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
499 void
500 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
501 HOST_WIDE_INT count, unsigned int prec,
502 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
503 int arith)
505 unsigned HOST_WIDE_INT signmask;
507 signmask = (arith
508 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
509 : 0);
511 if (SHIFT_COUNT_TRUNCATED)
512 count %= prec;
514 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
516 /* Shifting by the host word size is undefined according to the
517 ANSI standard, so we must handle this as a special case. */
518 *hv = 0;
519 *lv = 0;
521 else if (count >= HOST_BITS_PER_WIDE_INT)
523 *hv = 0;
524 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
526 else
528 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
529 *lv = ((l1 >> count)
530 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
533 /* Zero / sign extend all bits that are beyond the precision. */
535 if (count >= (HOST_WIDE_INT)prec)
537 *hv = signmask;
538 *lv = signmask;
540 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
542 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
544 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
545 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
547 else
549 *hv = signmask;
550 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
551 *lv |= signmask << (prec - count);
555 /* Rotate the doubleword integer in L1, H1 left by COUNT places
556 keeping only PREC bits of result.
557 Rotate right if COUNT is negative.
558 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
560 void
561 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
562 HOST_WIDE_INT count, unsigned int prec,
563 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
565 unsigned HOST_WIDE_INT s1l, s2l;
566 HOST_WIDE_INT s1h, s2h;
568 count %= prec;
569 if (count < 0)
570 count += prec;
572 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
573 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
574 *lv = s1l | s2l;
575 *hv = s1h | s2h;
578 /* Rotate the doubleword integer in L1, H1 left by COUNT places
579 keeping only PREC bits of result. COUNT must be positive.
580 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
582 void
583 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
584 HOST_WIDE_INT count, unsigned int prec,
585 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
587 unsigned HOST_WIDE_INT s1l, s2l;
588 HOST_WIDE_INT s1h, s2h;
590 count %= prec;
591 if (count < 0)
592 count += prec;
594 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
595 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
596 *lv = s1l | s2l;
597 *hv = s1h | s2h;
600 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
601 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
602 CODE is a tree code for a kind of division, one of
603 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
604 or EXACT_DIV_EXPR
605 It controls how the quotient is rounded to an integer.
606 Return nonzero if the operation overflows.
607 UNS nonzero says do unsigned division. */
610 div_and_round_double (enum tree_code code, int uns,
611 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
612 HOST_WIDE_INT hnum_orig,
613 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
614 HOST_WIDE_INT hden_orig,
615 unsigned HOST_WIDE_INT *lquo,
616 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
617 HOST_WIDE_INT *hrem)
619 int quo_neg = 0;
620 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
621 HOST_WIDE_INT den[4], quo[4];
622 int i, j;
623 unsigned HOST_WIDE_INT work;
624 unsigned HOST_WIDE_INT carry = 0;
625 unsigned HOST_WIDE_INT lnum = lnum_orig;
626 HOST_WIDE_INT hnum = hnum_orig;
627 unsigned HOST_WIDE_INT lden = lden_orig;
628 HOST_WIDE_INT hden = hden_orig;
629 int overflow = 0;
631 if (hden == 0 && lden == 0)
632 overflow = 1, lden = 1;
634 /* Calculate quotient sign and convert operands to unsigned. */
635 if (!uns)
637 if (hnum < 0)
639 quo_neg = ~ quo_neg;
640 /* (minimum integer) / (-1) is the only overflow case. */
641 if (neg_double (lnum, hnum, &lnum, &hnum)
642 && ((HOST_WIDE_INT) lden & hden) == -1)
643 overflow = 1;
645 if (hden < 0)
647 quo_neg = ~ quo_neg;
648 neg_double (lden, hden, &lden, &hden);
652 if (hnum == 0 && hden == 0)
653 { /* single precision */
654 *hquo = *hrem = 0;
655 /* This unsigned division rounds toward zero. */
656 *lquo = lnum / lden;
657 goto finish_up;
660 if (hnum == 0)
661 { /* trivial case: dividend < divisor */
662 /* hden != 0 already checked. */
663 *hquo = *lquo = 0;
664 *hrem = hnum;
665 *lrem = lnum;
666 goto finish_up;
669 memset (quo, 0, sizeof quo);
671 memset (num, 0, sizeof num); /* to zero 9th element */
672 memset (den, 0, sizeof den);
674 encode (num, lnum, hnum);
675 encode (den, lden, hden);
677 /* Special code for when the divisor < BASE. */
678 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
680 /* hnum != 0 already checked. */
681 for (i = 4 - 1; i >= 0; i--)
683 work = num[i] + carry * BASE;
684 quo[i] = work / lden;
685 carry = work % lden;
688 else
690 /* Full double precision division,
691 with thanks to Don Knuth's "Seminumerical Algorithms". */
692 int num_hi_sig, den_hi_sig;
693 unsigned HOST_WIDE_INT quo_est, scale;
695 /* Find the highest nonzero divisor digit. */
696 for (i = 4 - 1;; i--)
697 if (den[i] != 0)
699 den_hi_sig = i;
700 break;
703 /* Insure that the first digit of the divisor is at least BASE/2.
704 This is required by the quotient digit estimation algorithm. */
706 scale = BASE / (den[den_hi_sig] + 1);
707 if (scale > 1)
708 { /* scale divisor and dividend */
709 carry = 0;
710 for (i = 0; i <= 4 - 1; i++)
712 work = (num[i] * scale) + carry;
713 num[i] = LOWPART (work);
714 carry = HIGHPART (work);
717 num[4] = carry;
718 carry = 0;
719 for (i = 0; i <= 4 - 1; i++)
721 work = (den[i] * scale) + carry;
722 den[i] = LOWPART (work);
723 carry = HIGHPART (work);
724 if (den[i] != 0) den_hi_sig = i;
728 num_hi_sig = 4;
730 /* Main loop */
731 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
733 /* Guess the next quotient digit, quo_est, by dividing the first
734 two remaining dividend digits by the high order quotient digit.
735 quo_est is never low and is at most 2 high. */
736 unsigned HOST_WIDE_INT tmp;
738 num_hi_sig = i + den_hi_sig + 1;
739 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
740 if (num[num_hi_sig] != den[den_hi_sig])
741 quo_est = work / den[den_hi_sig];
742 else
743 quo_est = BASE - 1;
745 /* Refine quo_est so it's usually correct, and at most one high. */
746 tmp = work - quo_est * den[den_hi_sig];
747 if (tmp < BASE
748 && (den[den_hi_sig - 1] * quo_est
749 > (tmp * BASE + num[num_hi_sig - 2])))
750 quo_est--;
752 /* Try QUO_EST as the quotient digit, by multiplying the
753 divisor by QUO_EST and subtracting from the remaining dividend.
754 Keep in mind that QUO_EST is the I - 1st digit. */
756 carry = 0;
757 for (j = 0; j <= den_hi_sig; j++)
759 work = quo_est * den[j] + carry;
760 carry = HIGHPART (work);
761 work = num[i + j] - LOWPART (work);
762 num[i + j] = LOWPART (work);
763 carry += HIGHPART (work) != 0;
766 /* If quo_est was high by one, then num[i] went negative and
767 we need to correct things. */
768 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
770 quo_est--;
771 carry = 0; /* add divisor back in */
772 for (j = 0; j <= den_hi_sig; j++)
774 work = num[i + j] + den[j] + carry;
775 carry = HIGHPART (work);
776 num[i + j] = LOWPART (work);
779 num [num_hi_sig] += carry;
782 /* Store the quotient digit. */
783 quo[i] = quo_est;
787 decode (quo, lquo, hquo);
789 finish_up:
790 /* If result is negative, make it so. */
791 if (quo_neg)
792 neg_double (*lquo, *hquo, lquo, hquo);
794 /* Compute trial remainder: rem = num - (quo * den) */
795 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
796 neg_double (*lrem, *hrem, lrem, hrem);
797 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
799 switch (code)
801 case TRUNC_DIV_EXPR:
802 case TRUNC_MOD_EXPR: /* round toward zero */
803 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
804 return overflow;
806 case FLOOR_DIV_EXPR:
807 case FLOOR_MOD_EXPR: /* round toward negative infinity */
808 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
810 /* quo = quo - 1; */
811 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
812 lquo, hquo);
814 else
815 return overflow;
816 break;
818 case CEIL_DIV_EXPR:
819 case CEIL_MOD_EXPR: /* round toward positive infinity */
820 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
822 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
823 lquo, hquo);
825 else
826 return overflow;
827 break;
829 case ROUND_DIV_EXPR:
830 case ROUND_MOD_EXPR: /* round to closest integer */
832 unsigned HOST_WIDE_INT labs_rem = *lrem;
833 HOST_WIDE_INT habs_rem = *hrem;
834 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
835 HOST_WIDE_INT habs_den = hden, htwice;
837 /* Get absolute values. */
838 if (*hrem < 0)
839 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
840 if (hden < 0)
841 neg_double (lden, hden, &labs_den, &habs_den);
843 /* If (2 * abs (lrem) >= abs (lden)) */
844 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
845 labs_rem, habs_rem, &ltwice, &htwice);
847 if (((unsigned HOST_WIDE_INT) habs_den
848 < (unsigned HOST_WIDE_INT) htwice)
849 || (((unsigned HOST_WIDE_INT) habs_den
850 == (unsigned HOST_WIDE_INT) htwice)
851 && (labs_den < ltwice)))
853 if (*hquo < 0)
854 /* quo = quo - 1; */
855 add_double (*lquo, *hquo,
856 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
857 else
858 /* quo = quo + 1; */
859 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
860 lquo, hquo);
862 else
863 return overflow;
865 break;
867 default:
868 gcc_unreachable ();
871 /* Compute true remainder: rem = num - (quo * den) */
872 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
873 neg_double (*lrem, *hrem, lrem, hrem);
874 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
875 return overflow;
878 /* If ARG2 divides ARG1 with zero remainder, carries out the division
879 of type CODE and returns the quotient.
880 Otherwise returns NULL_TREE. */
882 static tree
883 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
885 unsigned HOST_WIDE_INT int1l, int2l;
886 HOST_WIDE_INT int1h, int2h;
887 unsigned HOST_WIDE_INT quol, reml;
888 HOST_WIDE_INT quoh, remh;
889 tree type = TREE_TYPE (arg1);
890 int uns = TYPE_UNSIGNED (type);
892 int1l = TREE_INT_CST_LOW (arg1);
893 int1h = TREE_INT_CST_HIGH (arg1);
894 int2l = TREE_INT_CST_LOW (arg2);
895 int2h = TREE_INT_CST_HIGH (arg2);
897 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
898 &quol, &quoh, &reml, &remh);
899 if (remh != 0 || reml != 0)
900 return NULL_TREE;
902 return build_int_cst_wide (type, quol, quoh);
905 /* This is non-zero if we should defer warnings about undefined
906 overflow. This facility exists because these warnings are a
907 special case. The code to estimate loop iterations does not want
908 to issue any warnings, since it works with expressions which do not
909 occur in user code. Various bits of cleanup code call fold(), but
910 only use the result if it has certain characteristics (e.g., is a
911 constant); that code only wants to issue a warning if the result is
912 used. */
914 static int fold_deferring_overflow_warnings;
916 /* If a warning about undefined overflow is deferred, this is the
917 warning. Note that this may cause us to turn two warnings into
918 one, but that is fine since it is sufficient to only give one
919 warning per expression. */
921 static const char* fold_deferred_overflow_warning;
923 /* If a warning about undefined overflow is deferred, this is the
924 level at which the warning should be emitted. */
926 static enum warn_strict_overflow_code fold_deferred_overflow_code;
928 /* Start deferring overflow warnings. We could use a stack here to
929 permit nested calls, but at present it is not necessary. */
931 void
932 fold_defer_overflow_warnings (void)
934 ++fold_deferring_overflow_warnings;
937 /* Stop deferring overflow warnings. If there is a pending warning,
938 and ISSUE is true, then issue the warning if appropriate. STMT is
939 the statement with which the warning should be associated (used for
940 location information); STMT may be NULL. CODE is the level of the
941 warning--a warn_strict_overflow_code value. This function will use
942 the smaller of CODE and the deferred code when deciding whether to
943 issue the warning. CODE may be zero to mean to always use the
944 deferred code. */
946 void
947 fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
949 const char *warnmsg;
950 location_t locus;
952 gcc_assert (fold_deferring_overflow_warnings > 0);
953 --fold_deferring_overflow_warnings;
954 if (fold_deferring_overflow_warnings > 0)
956 if (fold_deferred_overflow_warning != NULL
957 && code != 0
958 && code < (int) fold_deferred_overflow_code)
959 fold_deferred_overflow_code = code;
960 return;
963 warnmsg = fold_deferred_overflow_warning;
964 fold_deferred_overflow_warning = NULL;
966 if (!issue || warnmsg == NULL)
967 return;
969 /* Use the smallest code level when deciding to issue the
970 warning. */
971 if (code == 0 || code > (int) fold_deferred_overflow_code)
972 code = fold_deferred_overflow_code;
974 if (!issue_strict_overflow_warning (code))
975 return;
977 if (stmt == NULL_TREE || !expr_has_location (stmt))
978 locus = input_location;
979 else
980 locus = expr_location (stmt);
981 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
984 /* Stop deferring overflow warnings, ignoring any deferred
985 warnings. */
987 void
988 fold_undefer_and_ignore_overflow_warnings (void)
990 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
993 /* Whether we are deferring overflow warnings. */
995 bool
996 fold_deferring_overflow_warnings_p (void)
998 return fold_deferring_overflow_warnings > 0;
1001 /* This is called when we fold something based on the fact that signed
1002 overflow is undefined. */
1004 static void
1005 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1007 gcc_assert (!flag_wrapv && !flag_trapv);
1008 if (fold_deferring_overflow_warnings > 0)
1010 if (fold_deferred_overflow_warning == NULL
1011 || wc < fold_deferred_overflow_code)
1013 fold_deferred_overflow_warning = gmsgid;
1014 fold_deferred_overflow_code = wc;
1017 else if (issue_strict_overflow_warning (wc))
1018 warning (OPT_Wstrict_overflow, gmsgid);
1021 /* Return true if the built-in mathematical function specified by CODE
1022 is odd, i.e. -f(x) == f(-x). */
1024 static bool
1025 negate_mathfn_p (enum built_in_function code)
1027 switch (code)
1029 CASE_FLT_FN (BUILT_IN_ASIN):
1030 CASE_FLT_FN (BUILT_IN_ASINH):
1031 CASE_FLT_FN (BUILT_IN_ATAN):
1032 CASE_FLT_FN (BUILT_IN_ATANH):
1033 CASE_FLT_FN (BUILT_IN_CASIN):
1034 CASE_FLT_FN (BUILT_IN_CASINH):
1035 CASE_FLT_FN (BUILT_IN_CATAN):
1036 CASE_FLT_FN (BUILT_IN_CATANH):
1037 CASE_FLT_FN (BUILT_IN_CBRT):
1038 CASE_FLT_FN (BUILT_IN_CPROJ):
1039 CASE_FLT_FN (BUILT_IN_CSIN):
1040 CASE_FLT_FN (BUILT_IN_CSINH):
1041 CASE_FLT_FN (BUILT_IN_CTAN):
1042 CASE_FLT_FN (BUILT_IN_CTANH):
1043 CASE_FLT_FN (BUILT_IN_ERF):
1044 CASE_FLT_FN (BUILT_IN_LLROUND):
1045 CASE_FLT_FN (BUILT_IN_LROUND):
1046 CASE_FLT_FN (BUILT_IN_ROUND):
1047 CASE_FLT_FN (BUILT_IN_SIN):
1048 CASE_FLT_FN (BUILT_IN_SINH):
1049 CASE_FLT_FN (BUILT_IN_TAN):
1050 CASE_FLT_FN (BUILT_IN_TANH):
1051 CASE_FLT_FN (BUILT_IN_TRUNC):
1052 return true;
1054 CASE_FLT_FN (BUILT_IN_LLRINT):
1055 CASE_FLT_FN (BUILT_IN_LRINT):
1056 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1057 CASE_FLT_FN (BUILT_IN_RINT):
1058 return !flag_rounding_math;
1060 default:
1061 break;
1063 return false;
1066 /* Check whether we may negate an integer constant T without causing
1067 overflow. */
1069 bool
1070 may_negate_without_overflow_p (tree t)
1072 unsigned HOST_WIDE_INT val;
1073 unsigned int prec;
1074 tree type;
1076 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1078 type = TREE_TYPE (t);
1079 if (TYPE_UNSIGNED (type))
1080 return false;
1082 prec = TYPE_PRECISION (type);
1083 if (prec > HOST_BITS_PER_WIDE_INT)
1085 if (TREE_INT_CST_LOW (t) != 0)
1086 return true;
1087 prec -= HOST_BITS_PER_WIDE_INT;
1088 val = TREE_INT_CST_HIGH (t);
1090 else
1091 val = TREE_INT_CST_LOW (t);
1092 if (prec < HOST_BITS_PER_WIDE_INT)
1093 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1094 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1097 /* Determine whether an expression T can be cheaply negated using
1098 the function negate_expr without introducing undefined overflow. */
1100 static bool
1101 negate_expr_p (tree t)
1103 tree type;
1105 if (t == 0)
1106 return false;
1108 type = TREE_TYPE (t);
1110 STRIP_SIGN_NOPS (t);
1111 switch (TREE_CODE (t))
1113 case INTEGER_CST:
1114 if (TYPE_OVERFLOW_WRAPS (type))
1115 return true;
1117 /* Check that -CST will not overflow type. */
1118 return may_negate_without_overflow_p (t);
1119 case BIT_NOT_EXPR:
1120 return (INTEGRAL_TYPE_P (type)
1121 && TYPE_OVERFLOW_WRAPS (type));
1123 case REAL_CST:
1124 case NEGATE_EXPR:
1125 return true;
1127 case COMPLEX_CST:
1128 return negate_expr_p (TREE_REALPART (t))
1129 && negate_expr_p (TREE_IMAGPART (t));
1131 case COMPLEX_EXPR:
1132 return negate_expr_p (TREE_OPERAND (t, 0))
1133 && negate_expr_p (TREE_OPERAND (t, 1));
1135 case CONJ_EXPR:
1136 return negate_expr_p (TREE_OPERAND (t, 0));
1138 case PLUS_EXPR:
1139 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1140 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1141 return false;
1142 /* -(A + B) -> (-B) - A. */
1143 if (negate_expr_p (TREE_OPERAND (t, 1))
1144 && reorder_operands_p (TREE_OPERAND (t, 0),
1145 TREE_OPERAND (t, 1)))
1146 return true;
1147 /* -(A + B) -> (-A) - B. */
1148 return negate_expr_p (TREE_OPERAND (t, 0));
1150 case MINUS_EXPR:
1151 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1152 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1153 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1154 && reorder_operands_p (TREE_OPERAND (t, 0),
1155 TREE_OPERAND (t, 1));
1157 case MULT_EXPR:
1158 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1159 break;
1161 /* Fall through. */
1163 case RDIV_EXPR:
1164 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1165 return negate_expr_p (TREE_OPERAND (t, 1))
1166 || negate_expr_p (TREE_OPERAND (t, 0));
1167 break;
1169 case TRUNC_DIV_EXPR:
1170 case ROUND_DIV_EXPR:
1171 case FLOOR_DIV_EXPR:
1172 case CEIL_DIV_EXPR:
1173 case EXACT_DIV_EXPR:
1174 /* In general we can't negate A / B, because if A is INT_MIN and
1175 B is 1, we may turn this into INT_MIN / -1 which is undefined
1176 and actually traps on some architectures. But if overflow is
1177 undefined, we can negate, because - (INT_MIN / 1) is an
1178 overflow. */
1179 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1180 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1181 break;
1182 return negate_expr_p (TREE_OPERAND (t, 1))
1183 || negate_expr_p (TREE_OPERAND (t, 0));
1185 case NOP_EXPR:
1186 /* Negate -((double)float) as (double)(-float). */
1187 if (TREE_CODE (type) == REAL_TYPE)
1189 tree tem = strip_float_extensions (t);
1190 if (tem != t)
1191 return negate_expr_p (tem);
1193 break;
1195 case CALL_EXPR:
1196 /* Negate -f(x) as f(-x). */
1197 if (negate_mathfn_p (builtin_mathfn_code (t)))
1198 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1199 break;
1201 case RSHIFT_EXPR:
1202 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1203 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1205 tree op1 = TREE_OPERAND (t, 1);
1206 if (TREE_INT_CST_HIGH (op1) == 0
1207 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1208 == TREE_INT_CST_LOW (op1))
1209 return true;
1211 break;
1213 default:
1214 break;
1216 return false;
1219 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1220 simplification is possible.
1221 If negate_expr_p would return true for T, NULL_TREE will never be
1222 returned. */
1224 static tree
1225 fold_negate_expr (tree t)
1227 tree type = TREE_TYPE (t);
1228 tree tem;
1230 switch (TREE_CODE (t))
1232 /* Convert - (~A) to A + 1. */
1233 case BIT_NOT_EXPR:
1234 if (INTEGRAL_TYPE_P (type))
1235 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1236 build_int_cst (type, 1));
1237 break;
1239 case INTEGER_CST:
1240 tem = fold_negate_const (t, type);
1241 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1242 || !TYPE_OVERFLOW_TRAPS (type))
1243 return tem;
1244 break;
1246 case REAL_CST:
1247 tem = fold_negate_const (t, type);
1248 /* Two's complement FP formats, such as c4x, may overflow. */
1249 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1250 return tem;
1251 break;
1253 case COMPLEX_CST:
1255 tree rpart = negate_expr (TREE_REALPART (t));
1256 tree ipart = negate_expr (TREE_IMAGPART (t));
1258 if ((TREE_CODE (rpart) == REAL_CST
1259 && TREE_CODE (ipart) == REAL_CST)
1260 || (TREE_CODE (rpart) == INTEGER_CST
1261 && TREE_CODE (ipart) == INTEGER_CST))
1262 return build_complex (type, rpart, ipart);
1264 break;
1266 case COMPLEX_EXPR:
1267 if (negate_expr_p (t))
1268 return fold_build2 (COMPLEX_EXPR, type,
1269 fold_negate_expr (TREE_OPERAND (t, 0)),
1270 fold_negate_expr (TREE_OPERAND (t, 1)));
1271 break;
1273 case CONJ_EXPR:
1274 if (negate_expr_p (t))
1275 return fold_build1 (CONJ_EXPR, type,
1276 fold_negate_expr (TREE_OPERAND (t, 0)));
1277 break;
1279 case NEGATE_EXPR:
1280 return TREE_OPERAND (t, 0);
1282 case PLUS_EXPR:
1283 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1284 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1286 /* -(A + B) -> (-B) - A. */
1287 if (negate_expr_p (TREE_OPERAND (t, 1))
1288 && reorder_operands_p (TREE_OPERAND (t, 0),
1289 TREE_OPERAND (t, 1)))
1291 tem = negate_expr (TREE_OPERAND (t, 1));
1292 return fold_build2 (MINUS_EXPR, type,
1293 tem, TREE_OPERAND (t, 0));
1296 /* -(A + B) -> (-A) - B. */
1297 if (negate_expr_p (TREE_OPERAND (t, 0)))
1299 tem = negate_expr (TREE_OPERAND (t, 0));
1300 return fold_build2 (MINUS_EXPR, type,
1301 tem, TREE_OPERAND (t, 1));
1304 break;
1306 case MINUS_EXPR:
1307 /* - (A - B) -> B - A */
1308 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1309 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1310 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1311 return fold_build2 (MINUS_EXPR, type,
1312 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1313 break;
1315 case MULT_EXPR:
1316 if (TYPE_UNSIGNED (type))
1317 break;
1319 /* Fall through. */
1321 case RDIV_EXPR:
1322 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1324 tem = TREE_OPERAND (t, 1);
1325 if (negate_expr_p (tem))
1326 return fold_build2 (TREE_CODE (t), type,
1327 TREE_OPERAND (t, 0), negate_expr (tem));
1328 tem = TREE_OPERAND (t, 0);
1329 if (negate_expr_p (tem))
1330 return fold_build2 (TREE_CODE (t), type,
1331 negate_expr (tem), TREE_OPERAND (t, 1));
1333 break;
1335 case TRUNC_DIV_EXPR:
1336 case ROUND_DIV_EXPR:
1337 case FLOOR_DIV_EXPR:
1338 case CEIL_DIV_EXPR:
1339 case EXACT_DIV_EXPR:
1340 /* In general we can't negate A / B, because if A is INT_MIN and
1341 B is 1, we may turn this into INT_MIN / -1 which is undefined
1342 and actually traps on some architectures. But if overflow is
1343 undefined, we can negate, because - (INT_MIN / 1) is an
1344 overflow. */
1345 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1347 const char * const warnmsg = G_("assuming signed overflow does not "
1348 "occur when negating a division");
1349 tem = TREE_OPERAND (t, 1);
1350 if (negate_expr_p (tem))
1352 if (INTEGRAL_TYPE_P (type)
1353 && (TREE_CODE (tem) != INTEGER_CST
1354 || integer_onep (tem)))
1355 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1356 return fold_build2 (TREE_CODE (t), type,
1357 TREE_OPERAND (t, 0), negate_expr (tem));
1359 tem = TREE_OPERAND (t, 0);
1360 if (negate_expr_p (tem))
1362 if (INTEGRAL_TYPE_P (type)
1363 && (TREE_CODE (tem) != INTEGER_CST
1364 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1365 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1366 return fold_build2 (TREE_CODE (t), type,
1367 negate_expr (tem), TREE_OPERAND (t, 1));
1370 break;
1372 case NOP_EXPR:
1373 /* Convert -((double)float) into (double)(-float). */
1374 if (TREE_CODE (type) == REAL_TYPE)
1376 tem = strip_float_extensions (t);
1377 if (tem != t && negate_expr_p (tem))
1378 return negate_expr (tem);
1380 break;
1382 case CALL_EXPR:
1383 /* Negate -f(x) as f(-x). */
1384 if (negate_mathfn_p (builtin_mathfn_code (t))
1385 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1387 tree fndecl, arg, arglist;
1389 fndecl = get_callee_fndecl (t);
1390 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1391 arglist = build_tree_list (NULL_TREE, arg);
1392 return build_function_call_expr (fndecl, arglist);
1394 break;
1396 case RSHIFT_EXPR:
1397 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1398 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1400 tree op1 = TREE_OPERAND (t, 1);
1401 if (TREE_INT_CST_HIGH (op1) == 0
1402 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1403 == TREE_INT_CST_LOW (op1))
1405 tree ntype = TYPE_UNSIGNED (type)
1406 ? lang_hooks.types.signed_type (type)
1407 : lang_hooks.types.unsigned_type (type);
1408 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1409 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1410 return fold_convert (type, temp);
1413 break;
1415 default:
1416 break;
1419 return NULL_TREE;
1422 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1423 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1424 return NULL_TREE. */
1426 static tree
1427 negate_expr (tree t)
1429 tree type, tem;
1431 if (t == NULL_TREE)
1432 return NULL_TREE;
1434 type = TREE_TYPE (t);
1435 STRIP_SIGN_NOPS (t);
1437 tem = fold_negate_expr (t);
1438 if (!tem)
1439 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1440 return fold_convert (type, tem);
1443 /* Split a tree IN into a constant, literal and variable parts that could be
1444 combined with CODE to make IN. "constant" means an expression with
1445 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1446 commutative arithmetic operation. Store the constant part into *CONP,
1447 the literal in *LITP and return the variable part. If a part isn't
1448 present, set it to null. If the tree does not decompose in this way,
1449 return the entire tree as the variable part and the other parts as null.
1451 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1452 case, we negate an operand that was subtracted. Except if it is a
1453 literal for which we use *MINUS_LITP instead.
1455 If NEGATE_P is true, we are negating all of IN, again except a literal
1456 for which we use *MINUS_LITP instead.
1458 If IN is itself a literal or constant, return it as appropriate.
1460 Note that we do not guarantee that any of the three values will be the
1461 same type as IN, but they will have the same signedness and mode. */
1463 static tree
1464 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1465 tree *minus_litp, int negate_p)
1467 tree var = 0;
1469 *conp = 0;
1470 *litp = 0;
1471 *minus_litp = 0;
1473 /* Strip any conversions that don't change the machine mode or signedness. */
1474 STRIP_SIGN_NOPS (in);
1476 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1477 *litp = in;
1478 else if (TREE_CODE (in) == code
1479 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1480 /* We can associate addition and subtraction together (even
1481 though the C standard doesn't say so) for integers because
1482 the value is not affected. For reals, the value might be
1483 affected, so we can't. */
1484 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1485 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1487 tree op0 = TREE_OPERAND (in, 0);
1488 tree op1 = TREE_OPERAND (in, 1);
1489 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1490 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1492 /* First see if either of the operands is a literal, then a constant. */
1493 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1494 *litp = op0, op0 = 0;
1495 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1496 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1498 if (op0 != 0 && TREE_CONSTANT (op0))
1499 *conp = op0, op0 = 0;
1500 else if (op1 != 0 && TREE_CONSTANT (op1))
1501 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1503 /* If we haven't dealt with either operand, this is not a case we can
1504 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1505 if (op0 != 0 && op1 != 0)
1506 var = in;
1507 else if (op0 != 0)
1508 var = op0;
1509 else
1510 var = op1, neg_var_p = neg1_p;
1512 /* Now do any needed negations. */
1513 if (neg_litp_p)
1514 *minus_litp = *litp, *litp = 0;
1515 if (neg_conp_p)
1516 *conp = negate_expr (*conp);
1517 if (neg_var_p)
1518 var = negate_expr (var);
1520 else if (TREE_CONSTANT (in))
1521 *conp = in;
1522 else
1523 var = in;
1525 if (negate_p)
1527 if (*litp)
1528 *minus_litp = *litp, *litp = 0;
1529 else if (*minus_litp)
1530 *litp = *minus_litp, *minus_litp = 0;
1531 *conp = negate_expr (*conp);
1532 var = negate_expr (var);
1535 return var;
1538 /* Re-associate trees split by the above function. T1 and T2 are either
1539 expressions to associate or null. Return the new expression, if any. If
1540 we build an operation, do it in TYPE and with CODE. */
1542 static tree
1543 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1545 if (t1 == 0)
1546 return t2;
1547 else if (t2 == 0)
1548 return t1;
1550 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1551 try to fold this since we will have infinite recursion. But do
1552 deal with any NEGATE_EXPRs. */
1553 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1554 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1556 if (code == PLUS_EXPR)
1558 if (TREE_CODE (t1) == NEGATE_EXPR)
1559 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1560 fold_convert (type, TREE_OPERAND (t1, 0)));
1561 else if (TREE_CODE (t2) == NEGATE_EXPR)
1562 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1563 fold_convert (type, TREE_OPERAND (t2, 0)));
1564 else if (integer_zerop (t2))
1565 return fold_convert (type, t1);
1567 else if (code == MINUS_EXPR)
1569 if (integer_zerop (t2))
1570 return fold_convert (type, t1);
1573 return build2 (code, type, fold_convert (type, t1),
1574 fold_convert (type, t2));
1577 return fold_build2 (code, type, fold_convert (type, t1),
1578 fold_convert (type, t2));
1581 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1582 for use in int_const_binop, size_binop and size_diffop. */
1584 static bool
1585 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1587 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1588 return false;
1589 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1590 return false;
1592 switch (code)
1594 case LSHIFT_EXPR:
1595 case RSHIFT_EXPR:
1596 case LROTATE_EXPR:
1597 case RROTATE_EXPR:
1598 return true;
1600 default:
1601 break;
1604 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1605 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1606 && TYPE_MODE (type1) == TYPE_MODE (type2);
1610 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1611 to produce a new constant. Return NULL_TREE if we don't know how
1612 to evaluate CODE at compile-time.
1614 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1616 tree
1617 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1619 unsigned HOST_WIDE_INT int1l, int2l;
1620 HOST_WIDE_INT int1h, int2h;
1621 unsigned HOST_WIDE_INT low;
1622 HOST_WIDE_INT hi;
1623 unsigned HOST_WIDE_INT garbagel;
1624 HOST_WIDE_INT garbageh;
1625 tree t;
1626 tree type = TREE_TYPE (arg1);
1627 int uns = TYPE_UNSIGNED (type);
1628 int is_sizetype
1629 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1630 int overflow = 0;
1632 int1l = TREE_INT_CST_LOW (arg1);
1633 int1h = TREE_INT_CST_HIGH (arg1);
1634 int2l = TREE_INT_CST_LOW (arg2);
1635 int2h = TREE_INT_CST_HIGH (arg2);
1637 switch (code)
1639 case BIT_IOR_EXPR:
1640 low = int1l | int2l, hi = int1h | int2h;
1641 break;
1643 case BIT_XOR_EXPR:
1644 low = int1l ^ int2l, hi = int1h ^ int2h;
1645 break;
1647 case BIT_AND_EXPR:
1648 low = int1l & int2l, hi = int1h & int2h;
1649 break;
1651 case RSHIFT_EXPR:
1652 int2l = -int2l;
1653 case LSHIFT_EXPR:
1654 /* It's unclear from the C standard whether shifts can overflow.
1655 The following code ignores overflow; perhaps a C standard
1656 interpretation ruling is needed. */
1657 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1658 &low, &hi, !uns);
1659 break;
1661 case RROTATE_EXPR:
1662 int2l = - int2l;
1663 case LROTATE_EXPR:
1664 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1665 &low, &hi);
1666 break;
1668 case PLUS_EXPR:
1669 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1670 break;
1672 case MINUS_EXPR:
1673 neg_double (int2l, int2h, &low, &hi);
1674 add_double (int1l, int1h, low, hi, &low, &hi);
1675 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1676 break;
1678 case MULT_EXPR:
1679 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1680 break;
1682 case TRUNC_DIV_EXPR:
1683 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1684 case EXACT_DIV_EXPR:
1685 /* This is a shortcut for a common special case. */
1686 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1687 && !TREE_OVERFLOW (arg1)
1688 && !TREE_OVERFLOW (arg2)
1689 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1691 if (code == CEIL_DIV_EXPR)
1692 int1l += int2l - 1;
1694 low = int1l / int2l, hi = 0;
1695 break;
1698 /* ... fall through ... */
1700 case ROUND_DIV_EXPR:
1701 if (int2h == 0 && int2l == 0)
1702 return NULL_TREE;
1703 if (int2h == 0 && int2l == 1)
1705 low = int1l, hi = int1h;
1706 break;
1708 if (int1l == int2l && int1h == int2h
1709 && ! (int1l == 0 && int1h == 0))
1711 low = 1, hi = 0;
1712 break;
1714 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1715 &low, &hi, &garbagel, &garbageh);
1716 break;
1718 case TRUNC_MOD_EXPR:
1719 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1720 /* This is a shortcut for a common special case. */
1721 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1722 && !TREE_OVERFLOW (arg1)
1723 && !TREE_OVERFLOW (arg2)
1724 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1726 if (code == CEIL_MOD_EXPR)
1727 int1l += int2l - 1;
1728 low = int1l % int2l, hi = 0;
1729 break;
1732 /* ... fall through ... */
1734 case ROUND_MOD_EXPR:
1735 if (int2h == 0 && int2l == 0)
1736 return NULL_TREE;
1737 overflow = div_and_round_double (code, uns,
1738 int1l, int1h, int2l, int2h,
1739 &garbagel, &garbageh, &low, &hi);
1740 break;
1742 case MIN_EXPR:
1743 case MAX_EXPR:
1744 if (uns)
1745 low = (((unsigned HOST_WIDE_INT) int1h
1746 < (unsigned HOST_WIDE_INT) int2h)
1747 || (((unsigned HOST_WIDE_INT) int1h
1748 == (unsigned HOST_WIDE_INT) int2h)
1749 && int1l < int2l));
1750 else
1751 low = (int1h < int2h
1752 || (int1h == int2h && int1l < int2l));
1754 if (low == (code == MIN_EXPR))
1755 low = int1l, hi = int1h;
1756 else
1757 low = int2l, hi = int2h;
1758 break;
1760 default:
1761 return NULL_TREE;
1764 if (notrunc)
1766 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1768 /* Propagate overflow flags ourselves. */
1769 if (((!uns || is_sizetype) && overflow)
1770 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1772 t = copy_node (t);
1773 TREE_OVERFLOW (t) = 1;
1776 else
1777 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1778 ((!uns || is_sizetype) && overflow)
1779 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1781 return t;
1784 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1785 constant. We assume ARG1 and ARG2 have the same data type, or at least
1786 are the same kind of constant and the same machine mode. Return zero if
1787 combining the constants is not allowed in the current operating mode.
1789 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1791 static tree
1792 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1794 /* Sanity check for the recursive cases. */
1795 if (!arg1 || !arg2)
1796 return NULL_TREE;
1798 STRIP_NOPS (arg1);
1799 STRIP_NOPS (arg2);
1801 if (TREE_CODE (arg1) == INTEGER_CST)
1802 return int_const_binop (code, arg1, arg2, notrunc);
1804 if (TREE_CODE (arg1) == REAL_CST)
1806 enum machine_mode mode;
1807 REAL_VALUE_TYPE d1;
1808 REAL_VALUE_TYPE d2;
1809 REAL_VALUE_TYPE value;
1810 REAL_VALUE_TYPE result;
1811 bool inexact;
1812 tree t, type;
1814 /* The following codes are handled by real_arithmetic. */
1815 switch (code)
1817 case PLUS_EXPR:
1818 case MINUS_EXPR:
1819 case MULT_EXPR:
1820 case RDIV_EXPR:
1821 case MIN_EXPR:
1822 case MAX_EXPR:
1823 break;
1825 default:
1826 return NULL_TREE;
1829 d1 = TREE_REAL_CST (arg1);
1830 d2 = TREE_REAL_CST (arg2);
1832 type = TREE_TYPE (arg1);
1833 mode = TYPE_MODE (type);
1835 /* Don't perform operation if we honor signaling NaNs and
1836 either operand is a NaN. */
1837 if (HONOR_SNANS (mode)
1838 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1839 return NULL_TREE;
1841 /* Don't perform operation if it would raise a division
1842 by zero exception. */
1843 if (code == RDIV_EXPR
1844 && REAL_VALUES_EQUAL (d2, dconst0)
1845 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1846 return NULL_TREE;
1848 /* If either operand is a NaN, just return it. Otherwise, set up
1849 for floating-point trap; we return an overflow. */
1850 if (REAL_VALUE_ISNAN (d1))
1851 return arg1;
1852 else if (REAL_VALUE_ISNAN (d2))
1853 return arg2;
1855 inexact = real_arithmetic (&value, code, &d1, &d2);
1856 real_convert (&result, mode, &value);
1858 /* Don't constant fold this floating point operation if
1859 the result has overflowed and flag_trapping_math. */
1860 if (flag_trapping_math
1861 && MODE_HAS_INFINITIES (mode)
1862 && REAL_VALUE_ISINF (result)
1863 && !REAL_VALUE_ISINF (d1)
1864 && !REAL_VALUE_ISINF (d2))
1865 return NULL_TREE;
1867 /* Don't constant fold this floating point operation if the
1868 result may dependent upon the run-time rounding mode and
1869 flag_rounding_math is set, or if GCC's software emulation
1870 is unable to accurately represent the result. */
1871 if ((flag_rounding_math
1872 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1873 && !flag_unsafe_math_optimizations))
1874 && (inexact || !real_identical (&result, &value)))
1875 return NULL_TREE;
1877 t = build_real (type, result);
1879 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1880 return t;
1883 if (TREE_CODE (arg1) == COMPLEX_CST)
1885 tree type = TREE_TYPE (arg1);
1886 tree r1 = TREE_REALPART (arg1);
1887 tree i1 = TREE_IMAGPART (arg1);
1888 tree r2 = TREE_REALPART (arg2);
1889 tree i2 = TREE_IMAGPART (arg2);
1890 tree real, imag;
1892 switch (code)
1894 case PLUS_EXPR:
1895 case MINUS_EXPR:
1896 real = const_binop (code, r1, r2, notrunc);
1897 imag = const_binop (code, i1, i2, notrunc);
1898 break;
1900 case MULT_EXPR:
1901 real = const_binop (MINUS_EXPR,
1902 const_binop (MULT_EXPR, r1, r2, notrunc),
1903 const_binop (MULT_EXPR, i1, i2, notrunc),
1904 notrunc);
1905 imag = const_binop (PLUS_EXPR,
1906 const_binop (MULT_EXPR, r1, i2, notrunc),
1907 const_binop (MULT_EXPR, i1, r2, notrunc),
1908 notrunc);
1909 break;
1911 case RDIV_EXPR:
1913 tree magsquared
1914 = const_binop (PLUS_EXPR,
1915 const_binop (MULT_EXPR, r2, r2, notrunc),
1916 const_binop (MULT_EXPR, i2, i2, notrunc),
1917 notrunc);
1918 tree t1
1919 = const_binop (PLUS_EXPR,
1920 const_binop (MULT_EXPR, r1, r2, notrunc),
1921 const_binop (MULT_EXPR, i1, i2, notrunc),
1922 notrunc);
1923 tree t2
1924 = const_binop (MINUS_EXPR,
1925 const_binop (MULT_EXPR, i1, r2, notrunc),
1926 const_binop (MULT_EXPR, r1, i2, notrunc),
1927 notrunc);
1929 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1930 code = TRUNC_DIV_EXPR;
1932 real = const_binop (code, t1, magsquared, notrunc);
1933 imag = const_binop (code, t2, magsquared, notrunc);
1935 break;
1937 default:
1938 return NULL_TREE;
1941 if (real && imag)
1942 return build_complex (type, real, imag);
1945 return NULL_TREE;
1948 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1949 indicates which particular sizetype to create. */
1951 tree
1952 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1954 return build_int_cst (sizetype_tab[(int) kind], number);
1957 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1958 is a tree code. The type of the result is taken from the operands.
1959 Both must be equivalent integer types, ala int_binop_types_match_p.
1960 If the operands are constant, so is the result. */
1962 tree
1963 size_binop (enum tree_code code, tree arg0, tree arg1)
1965 tree type = TREE_TYPE (arg0);
1967 if (arg0 == error_mark_node || arg1 == error_mark_node)
1968 return error_mark_node;
1970 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1971 TREE_TYPE (arg1)));
1973 /* Handle the special case of two integer constants faster. */
1974 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1976 /* And some specific cases even faster than that. */
1977 if (code == PLUS_EXPR)
1979 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1980 return arg1;
1981 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1982 return arg0;
1984 else if (code == MINUS_EXPR)
1986 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1987 return arg0;
1989 else if (code == MULT_EXPR)
1991 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1992 return arg1;
1995 /* Handle general case of two integer constants. */
1996 return int_const_binop (code, arg0, arg1, 0);
1999 return fold_build2 (code, type, arg0, arg1);
2002 /* Given two values, either both of sizetype or both of bitsizetype,
2003 compute the difference between the two values. Return the value
2004 in signed type corresponding to the type of the operands. */
2006 tree
2007 size_diffop (tree arg0, tree arg1)
2009 tree type = TREE_TYPE (arg0);
2010 tree ctype;
2012 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2013 TREE_TYPE (arg1)));
2015 /* If the type is already signed, just do the simple thing. */
2016 if (!TYPE_UNSIGNED (type))
2017 return size_binop (MINUS_EXPR, arg0, arg1);
2019 if (type == sizetype)
2020 ctype = ssizetype;
2021 else if (type == bitsizetype)
2022 ctype = sbitsizetype;
2023 else
2024 ctype = lang_hooks.types.signed_type (type);
2026 /* If either operand is not a constant, do the conversions to the signed
2027 type and subtract. The hardware will do the right thing with any
2028 overflow in the subtraction. */
2029 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2030 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2031 fold_convert (ctype, arg1));
2033 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2034 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2035 overflow) and negate (which can't either). Special-case a result
2036 of zero while we're here. */
2037 if (tree_int_cst_equal (arg0, arg1))
2038 return build_int_cst (ctype, 0);
2039 else if (tree_int_cst_lt (arg1, arg0))
2040 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2041 else
2042 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2043 fold_convert (ctype, size_binop (MINUS_EXPR,
2044 arg1, arg0)));
2047 /* A subroutine of fold_convert_const handling conversions of an
2048 INTEGER_CST to another integer type. */
2050 static tree
2051 fold_convert_const_int_from_int (tree type, tree arg1)
2053 tree t;
2055 /* Given an integer constant, make new constant with new type,
2056 appropriately sign-extended or truncated. */
2057 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2058 TREE_INT_CST_HIGH (arg1),
2059 /* Don't set the overflow when
2060 converting a pointer */
2061 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2062 (TREE_INT_CST_HIGH (arg1) < 0
2063 && (TYPE_UNSIGNED (type)
2064 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2065 | TREE_OVERFLOW (arg1));
2067 return t;
2070 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2071 to an integer type. */
2073 static tree
2074 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
2076 int overflow = 0;
2077 tree t;
2079 /* The following code implements the floating point to integer
2080 conversion rules required by the Java Language Specification,
2081 that IEEE NaNs are mapped to zero and values that overflow
2082 the target precision saturate, i.e. values greater than
2083 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2084 are mapped to INT_MIN. These semantics are allowed by the
2085 C and C++ standards that simply state that the behavior of
2086 FP-to-integer conversion is unspecified upon overflow. */
2088 HOST_WIDE_INT high, low;
2089 REAL_VALUE_TYPE r;
2090 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2092 switch (code)
2094 case FIX_TRUNC_EXPR:
2095 real_trunc (&r, VOIDmode, &x);
2096 break;
2098 default:
2099 gcc_unreachable ();
2102 /* If R is NaN, return zero and show we have an overflow. */
2103 if (REAL_VALUE_ISNAN (r))
2105 overflow = 1;
2106 high = 0;
2107 low = 0;
2110 /* See if R is less than the lower bound or greater than the
2111 upper bound. */
2113 if (! overflow)
2115 tree lt = TYPE_MIN_VALUE (type);
2116 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2117 if (REAL_VALUES_LESS (r, l))
2119 overflow = 1;
2120 high = TREE_INT_CST_HIGH (lt);
2121 low = TREE_INT_CST_LOW (lt);
2125 if (! overflow)
2127 tree ut = TYPE_MAX_VALUE (type);
2128 if (ut)
2130 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2131 if (REAL_VALUES_LESS (u, r))
2133 overflow = 1;
2134 high = TREE_INT_CST_HIGH (ut);
2135 low = TREE_INT_CST_LOW (ut);
2140 if (! overflow)
2141 REAL_VALUE_TO_INT (&low, &high, r);
2143 t = force_fit_type_double (type, low, high, -1,
2144 overflow | TREE_OVERFLOW (arg1));
2145 return t;
2148 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2149 to another floating point type. */
2151 static tree
2152 fold_convert_const_real_from_real (tree type, tree arg1)
2154 REAL_VALUE_TYPE value;
2155 tree t;
2157 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2158 t = build_real (type, value);
2160 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2161 return t;
2164 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2165 type TYPE. If no simplification can be done return NULL_TREE. */
2167 static tree
2168 fold_convert_const (enum tree_code code, tree type, tree arg1)
2170 if (TREE_TYPE (arg1) == type)
2171 return arg1;
2173 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2175 if (TREE_CODE (arg1) == INTEGER_CST)
2176 return fold_convert_const_int_from_int (type, arg1);
2177 else if (TREE_CODE (arg1) == REAL_CST)
2178 return fold_convert_const_int_from_real (code, type, arg1);
2180 else if (TREE_CODE (type) == REAL_TYPE)
2182 if (TREE_CODE (arg1) == INTEGER_CST)
2183 return build_real_from_int_cst (type, arg1);
2184 if (TREE_CODE (arg1) == REAL_CST)
2185 return fold_convert_const_real_from_real (type, arg1);
2187 return NULL_TREE;
2190 /* Construct a vector of zero elements of vector type TYPE. */
2192 static tree
2193 build_zero_vector (tree type)
2195 tree elem, list;
2196 int i, units;
2198 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2199 units = TYPE_VECTOR_SUBPARTS (type);
2201 list = NULL_TREE;
2202 for (i = 0; i < units; i++)
2203 list = tree_cons (NULL_TREE, elem, list);
2204 return build_vector (type, list);
2207 /* Convert expression ARG to type TYPE. Used by the middle-end for
2208 simple conversions in preference to calling the front-end's convert. */
2210 tree
2211 fold_convert (tree type, tree arg)
2213 tree orig = TREE_TYPE (arg);
2214 tree tem;
2216 if (type == orig)
2217 return arg;
2219 if (TREE_CODE (arg) == ERROR_MARK
2220 || TREE_CODE (type) == ERROR_MARK
2221 || TREE_CODE (orig) == ERROR_MARK)
2222 return error_mark_node;
2224 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2225 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2226 TYPE_MAIN_VARIANT (orig)))
2227 return fold_build1 (NOP_EXPR, type, arg);
2229 switch (TREE_CODE (type))
2231 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2232 case POINTER_TYPE: case REFERENCE_TYPE:
2233 case OFFSET_TYPE:
2234 if (TREE_CODE (arg) == INTEGER_CST)
2236 tem = fold_convert_const (NOP_EXPR, type, arg);
2237 if (tem != NULL_TREE)
2238 return tem;
2240 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2241 || TREE_CODE (orig) == OFFSET_TYPE)
2242 return fold_build1 (NOP_EXPR, type, arg);
2243 if (TREE_CODE (orig) == COMPLEX_TYPE)
2245 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2246 return fold_convert (type, tem);
2248 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2249 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2250 return fold_build1 (NOP_EXPR, type, arg);
2252 case REAL_TYPE:
2253 if (TREE_CODE (arg) == INTEGER_CST)
2255 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2256 if (tem != NULL_TREE)
2257 return tem;
2259 else if (TREE_CODE (arg) == REAL_CST)
2261 tem = fold_convert_const (NOP_EXPR, type, arg);
2262 if (tem != NULL_TREE)
2263 return tem;
2266 switch (TREE_CODE (orig))
2268 case INTEGER_TYPE:
2269 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2270 case POINTER_TYPE: case REFERENCE_TYPE:
2271 return fold_build1 (FLOAT_EXPR, type, arg);
2273 case REAL_TYPE:
2274 return fold_build1 (NOP_EXPR, type, arg);
2276 case COMPLEX_TYPE:
2277 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2278 return fold_convert (type, tem);
2280 default:
2281 gcc_unreachable ();
2284 case COMPLEX_TYPE:
2285 switch (TREE_CODE (orig))
2287 case INTEGER_TYPE:
2288 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2289 case POINTER_TYPE: case REFERENCE_TYPE:
2290 case REAL_TYPE:
2291 return build2 (COMPLEX_EXPR, type,
2292 fold_convert (TREE_TYPE (type), arg),
2293 fold_convert (TREE_TYPE (type), integer_zero_node));
2294 case COMPLEX_TYPE:
2296 tree rpart, ipart;
2298 if (TREE_CODE (arg) == COMPLEX_EXPR)
2300 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2301 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2302 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2305 arg = save_expr (arg);
2306 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2307 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2308 rpart = fold_convert (TREE_TYPE (type), rpart);
2309 ipart = fold_convert (TREE_TYPE (type), ipart);
2310 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2313 default:
2314 gcc_unreachable ();
2317 case VECTOR_TYPE:
2318 if (integer_zerop (arg))
2319 return build_zero_vector (type);
2320 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2321 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2322 || TREE_CODE (orig) == VECTOR_TYPE);
2323 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2325 case VOID_TYPE:
2326 tem = fold_ignored_result (arg);
2327 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2328 return tem;
2329 return fold_build1 (NOP_EXPR, type, tem);
2331 default:
2332 gcc_unreachable ();
2336 /* Return false if expr can be assumed not to be an lvalue, true
2337 otherwise. */
2339 static bool
2340 maybe_lvalue_p (tree x)
2342 /* We only need to wrap lvalue tree codes. */
2343 switch (TREE_CODE (x))
2345 case VAR_DECL:
2346 case PARM_DECL:
2347 case RESULT_DECL:
2348 case LABEL_DECL:
2349 case FUNCTION_DECL:
2350 case SSA_NAME:
2352 case COMPONENT_REF:
2353 case INDIRECT_REF:
2354 case ALIGN_INDIRECT_REF:
2355 case MISALIGNED_INDIRECT_REF:
2356 case ARRAY_REF:
2357 case ARRAY_RANGE_REF:
2358 case BIT_FIELD_REF:
2359 case OBJ_TYPE_REF:
2361 case REALPART_EXPR:
2362 case IMAGPART_EXPR:
2363 case PREINCREMENT_EXPR:
2364 case PREDECREMENT_EXPR:
2365 case SAVE_EXPR:
2366 case TRY_CATCH_EXPR:
2367 case WITH_CLEANUP_EXPR:
2368 case COMPOUND_EXPR:
2369 case MODIFY_EXPR:
2370 case GIMPLE_MODIFY_STMT:
2371 case TARGET_EXPR:
2372 case COND_EXPR:
2373 case BIND_EXPR:
2374 case MIN_EXPR:
2375 case MAX_EXPR:
2376 break;
2378 default:
2379 /* Assume the worst for front-end tree codes. */
2380 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2381 break;
2382 return false;
2385 return true;
2388 /* Return an expr equal to X but certainly not valid as an lvalue. */
2390 tree
2391 non_lvalue (tree x)
2393 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2394 us. */
2395 if (in_gimple_form)
2396 return x;
2398 if (! maybe_lvalue_p (x))
2399 return x;
2400 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2403 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2404 Zero means allow extended lvalues. */
2406 int pedantic_lvalues;
2408 /* When pedantic, return an expr equal to X but certainly not valid as a
2409 pedantic lvalue. Otherwise, return X. */
2411 static tree
2412 pedantic_non_lvalue (tree x)
2414 if (pedantic_lvalues)
2415 return non_lvalue (x);
2416 else
2417 return x;
2420 /* Given a tree comparison code, return the code that is the logical inverse
2421 of the given code. It is not safe to do this for floating-point
2422 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2423 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2425 enum tree_code
2426 invert_tree_comparison (enum tree_code code, bool honor_nans)
2428 if (honor_nans && flag_trapping_math)
2429 return ERROR_MARK;
2431 switch (code)
2433 case EQ_EXPR:
2434 return NE_EXPR;
2435 case NE_EXPR:
2436 return EQ_EXPR;
2437 case GT_EXPR:
2438 return honor_nans ? UNLE_EXPR : LE_EXPR;
2439 case GE_EXPR:
2440 return honor_nans ? UNLT_EXPR : LT_EXPR;
2441 case LT_EXPR:
2442 return honor_nans ? UNGE_EXPR : GE_EXPR;
2443 case LE_EXPR:
2444 return honor_nans ? UNGT_EXPR : GT_EXPR;
2445 case LTGT_EXPR:
2446 return UNEQ_EXPR;
2447 case UNEQ_EXPR:
2448 return LTGT_EXPR;
2449 case UNGT_EXPR:
2450 return LE_EXPR;
2451 case UNGE_EXPR:
2452 return LT_EXPR;
2453 case UNLT_EXPR:
2454 return GE_EXPR;
2455 case UNLE_EXPR:
2456 return GT_EXPR;
2457 case ORDERED_EXPR:
2458 return UNORDERED_EXPR;
2459 case UNORDERED_EXPR:
2460 return ORDERED_EXPR;
2461 default:
2462 gcc_unreachable ();
2466 /* Similar, but return the comparison that results if the operands are
2467 swapped. This is safe for floating-point. */
2469 enum tree_code
2470 swap_tree_comparison (enum tree_code code)
2472 switch (code)
2474 case EQ_EXPR:
2475 case NE_EXPR:
2476 case ORDERED_EXPR:
2477 case UNORDERED_EXPR:
2478 case LTGT_EXPR:
2479 case UNEQ_EXPR:
2480 return code;
2481 case GT_EXPR:
2482 return LT_EXPR;
2483 case GE_EXPR:
2484 return LE_EXPR;
2485 case LT_EXPR:
2486 return GT_EXPR;
2487 case LE_EXPR:
2488 return GE_EXPR;
2489 case UNGT_EXPR:
2490 return UNLT_EXPR;
2491 case UNGE_EXPR:
2492 return UNLE_EXPR;
2493 case UNLT_EXPR:
2494 return UNGT_EXPR;
2495 case UNLE_EXPR:
2496 return UNGE_EXPR;
2497 default:
2498 gcc_unreachable ();
2503 /* Convert a comparison tree code from an enum tree_code representation
2504 into a compcode bit-based encoding. This function is the inverse of
2505 compcode_to_comparison. */
2507 static enum comparison_code
2508 comparison_to_compcode (enum tree_code code)
2510 switch (code)
2512 case LT_EXPR:
2513 return COMPCODE_LT;
2514 case EQ_EXPR:
2515 return COMPCODE_EQ;
2516 case LE_EXPR:
2517 return COMPCODE_LE;
2518 case GT_EXPR:
2519 return COMPCODE_GT;
2520 case NE_EXPR:
2521 return COMPCODE_NE;
2522 case GE_EXPR:
2523 return COMPCODE_GE;
2524 case ORDERED_EXPR:
2525 return COMPCODE_ORD;
2526 case UNORDERED_EXPR:
2527 return COMPCODE_UNORD;
2528 case UNLT_EXPR:
2529 return COMPCODE_UNLT;
2530 case UNEQ_EXPR:
2531 return COMPCODE_UNEQ;
2532 case UNLE_EXPR:
2533 return COMPCODE_UNLE;
2534 case UNGT_EXPR:
2535 return COMPCODE_UNGT;
2536 case LTGT_EXPR:
2537 return COMPCODE_LTGT;
2538 case UNGE_EXPR:
2539 return COMPCODE_UNGE;
2540 default:
2541 gcc_unreachable ();
2545 /* Convert a compcode bit-based encoding of a comparison operator back
2546 to GCC's enum tree_code representation. This function is the
2547 inverse of comparison_to_compcode. */
2549 static enum tree_code
2550 compcode_to_comparison (enum comparison_code code)
2552 switch (code)
2554 case COMPCODE_LT:
2555 return LT_EXPR;
2556 case COMPCODE_EQ:
2557 return EQ_EXPR;
2558 case COMPCODE_LE:
2559 return LE_EXPR;
2560 case COMPCODE_GT:
2561 return GT_EXPR;
2562 case COMPCODE_NE:
2563 return NE_EXPR;
2564 case COMPCODE_GE:
2565 return GE_EXPR;
2566 case COMPCODE_ORD:
2567 return ORDERED_EXPR;
2568 case COMPCODE_UNORD:
2569 return UNORDERED_EXPR;
2570 case COMPCODE_UNLT:
2571 return UNLT_EXPR;
2572 case COMPCODE_UNEQ:
2573 return UNEQ_EXPR;
2574 case COMPCODE_UNLE:
2575 return UNLE_EXPR;
2576 case COMPCODE_UNGT:
2577 return UNGT_EXPR;
2578 case COMPCODE_LTGT:
2579 return LTGT_EXPR;
2580 case COMPCODE_UNGE:
2581 return UNGE_EXPR;
2582 default:
2583 gcc_unreachable ();
2587 /* Return a tree for the comparison which is the combination of
2588 doing the AND or OR (depending on CODE) of the two operations LCODE
2589 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2590 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2591 if this makes the transformation invalid. */
2593 tree
2594 combine_comparisons (enum tree_code code, enum tree_code lcode,
2595 enum tree_code rcode, tree truth_type,
2596 tree ll_arg, tree lr_arg)
2598 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2599 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2600 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2601 enum comparison_code compcode;
2603 switch (code)
2605 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2606 compcode = lcompcode & rcompcode;
2607 break;
2609 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2610 compcode = lcompcode | rcompcode;
2611 break;
2613 default:
2614 return NULL_TREE;
2617 if (!honor_nans)
2619 /* Eliminate unordered comparisons, as well as LTGT and ORD
2620 which are not used unless the mode has NaNs. */
2621 compcode &= ~COMPCODE_UNORD;
2622 if (compcode == COMPCODE_LTGT)
2623 compcode = COMPCODE_NE;
2624 else if (compcode == COMPCODE_ORD)
2625 compcode = COMPCODE_TRUE;
2627 else if (flag_trapping_math)
2629 /* Check that the original operation and the optimized ones will trap
2630 under the same condition. */
2631 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2632 && (lcompcode != COMPCODE_EQ)
2633 && (lcompcode != COMPCODE_ORD);
2634 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2635 && (rcompcode != COMPCODE_EQ)
2636 && (rcompcode != COMPCODE_ORD);
2637 bool trap = (compcode & COMPCODE_UNORD) == 0
2638 && (compcode != COMPCODE_EQ)
2639 && (compcode != COMPCODE_ORD);
2641 /* In a short-circuited boolean expression the LHS might be
2642 such that the RHS, if evaluated, will never trap. For
2643 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2644 if neither x nor y is NaN. (This is a mixed blessing: for
2645 example, the expression above will never trap, hence
2646 optimizing it to x < y would be invalid). */
2647 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2648 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2649 rtrap = false;
2651 /* If the comparison was short-circuited, and only the RHS
2652 trapped, we may now generate a spurious trap. */
2653 if (rtrap && !ltrap
2654 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2655 return NULL_TREE;
2657 /* If we changed the conditions that cause a trap, we lose. */
2658 if ((ltrap || rtrap) != trap)
2659 return NULL_TREE;
2662 if (compcode == COMPCODE_TRUE)
2663 return constant_boolean_node (true, truth_type);
2664 else if (compcode == COMPCODE_FALSE)
2665 return constant_boolean_node (false, truth_type);
2666 else
2667 return fold_build2 (compcode_to_comparison (compcode),
2668 truth_type, ll_arg, lr_arg);
2671 /* Return nonzero if CODE is a tree code that represents a truth value. */
2673 static int
2674 truth_value_p (enum tree_code code)
2676 return (TREE_CODE_CLASS (code) == tcc_comparison
2677 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2678 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2679 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2682 /* Return nonzero if two operands (typically of the same tree node)
2683 are necessarily equal. If either argument has side-effects this
2684 function returns zero. FLAGS modifies behavior as follows:
2686 If OEP_ONLY_CONST is set, only return nonzero for constants.
2687 This function tests whether the operands are indistinguishable;
2688 it does not test whether they are equal using C's == operation.
2689 The distinction is important for IEEE floating point, because
2690 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2691 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2693 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2694 even though it may hold multiple values during a function.
2695 This is because a GCC tree node guarantees that nothing else is
2696 executed between the evaluation of its "operands" (which may often
2697 be evaluated in arbitrary order). Hence if the operands themselves
2698 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2699 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2700 unset means assuming isochronic (or instantaneous) tree equivalence.
2701 Unless comparing arbitrary expression trees, such as from different
2702 statements, this flag can usually be left unset.
2704 If OEP_PURE_SAME is set, then pure functions with identical arguments
2705 are considered the same. It is used when the caller has other ways
2706 to ensure that global memory is unchanged in between. */
2709 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2711 /* If either is ERROR_MARK, they aren't equal. */
2712 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2713 return 0;
2715 /* If both types don't have the same signedness, then we can't consider
2716 them equal. We must check this before the STRIP_NOPS calls
2717 because they may change the signedness of the arguments. */
2718 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2719 return 0;
2721 /* If both types don't have the same precision, then it is not safe
2722 to strip NOPs. */
2723 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2724 return 0;
2726 STRIP_NOPS (arg0);
2727 STRIP_NOPS (arg1);
2729 /* In case both args are comparisons but with different comparison
2730 code, try to swap the comparison operands of one arg to produce
2731 a match and compare that variant. */
2732 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2733 && COMPARISON_CLASS_P (arg0)
2734 && COMPARISON_CLASS_P (arg1))
2736 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2738 if (TREE_CODE (arg0) == swap_code)
2739 return operand_equal_p (TREE_OPERAND (arg0, 0),
2740 TREE_OPERAND (arg1, 1), flags)
2741 && operand_equal_p (TREE_OPERAND (arg0, 1),
2742 TREE_OPERAND (arg1, 0), flags);
2745 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2746 /* This is needed for conversions and for COMPONENT_REF.
2747 Might as well play it safe and always test this. */
2748 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2749 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2750 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2751 return 0;
2753 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2754 We don't care about side effects in that case because the SAVE_EXPR
2755 takes care of that for us. In all other cases, two expressions are
2756 equal if they have no side effects. If we have two identical
2757 expressions with side effects that should be treated the same due
2758 to the only side effects being identical SAVE_EXPR's, that will
2759 be detected in the recursive calls below. */
2760 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2761 && (TREE_CODE (arg0) == SAVE_EXPR
2762 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2763 return 1;
2765 /* Next handle constant cases, those for which we can return 1 even
2766 if ONLY_CONST is set. */
2767 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2768 switch (TREE_CODE (arg0))
2770 case INTEGER_CST:
2771 return tree_int_cst_equal (arg0, arg1);
2773 case REAL_CST:
2774 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2775 TREE_REAL_CST (arg1)))
2776 return 1;
2779 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2781 /* If we do not distinguish between signed and unsigned zero,
2782 consider them equal. */
2783 if (real_zerop (arg0) && real_zerop (arg1))
2784 return 1;
2786 return 0;
2788 case VECTOR_CST:
2790 tree v1, v2;
2792 v1 = TREE_VECTOR_CST_ELTS (arg0);
2793 v2 = TREE_VECTOR_CST_ELTS (arg1);
2794 while (v1 && v2)
2796 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2797 flags))
2798 return 0;
2799 v1 = TREE_CHAIN (v1);
2800 v2 = TREE_CHAIN (v2);
2803 return v1 == v2;
2806 case COMPLEX_CST:
2807 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2808 flags)
2809 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2810 flags));
2812 case STRING_CST:
2813 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2814 && ! memcmp (TREE_STRING_POINTER (arg0),
2815 TREE_STRING_POINTER (arg1),
2816 TREE_STRING_LENGTH (arg0)));
2818 case ADDR_EXPR:
2819 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2821 default:
2822 break;
2825 if (flags & OEP_ONLY_CONST)
2826 return 0;
2828 /* Define macros to test an operand from arg0 and arg1 for equality and a
2829 variant that allows null and views null as being different from any
2830 non-null value. In the latter case, if either is null, the both
2831 must be; otherwise, do the normal comparison. */
2832 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2833 TREE_OPERAND (arg1, N), flags)
2835 #define OP_SAME_WITH_NULL(N) \
2836 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2837 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2839 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2841 case tcc_unary:
2842 /* Two conversions are equal only if signedness and modes match. */
2843 switch (TREE_CODE (arg0))
2845 case NOP_EXPR:
2846 case CONVERT_EXPR:
2847 case FIX_TRUNC_EXPR:
2848 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2849 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2850 return 0;
2851 break;
2852 default:
2853 break;
2856 return OP_SAME (0);
2859 case tcc_comparison:
2860 case tcc_binary:
2861 if (OP_SAME (0) && OP_SAME (1))
2862 return 1;
2864 /* For commutative ops, allow the other order. */
2865 return (commutative_tree_code (TREE_CODE (arg0))
2866 && operand_equal_p (TREE_OPERAND (arg0, 0),
2867 TREE_OPERAND (arg1, 1), flags)
2868 && operand_equal_p (TREE_OPERAND (arg0, 1),
2869 TREE_OPERAND (arg1, 0), flags));
2871 case tcc_reference:
2872 /* If either of the pointer (or reference) expressions we are
2873 dereferencing contain a side effect, these cannot be equal. */
2874 if (TREE_SIDE_EFFECTS (arg0)
2875 || TREE_SIDE_EFFECTS (arg1))
2876 return 0;
2878 switch (TREE_CODE (arg0))
2880 case INDIRECT_REF:
2881 case ALIGN_INDIRECT_REF:
2882 case MISALIGNED_INDIRECT_REF:
2883 case REALPART_EXPR:
2884 case IMAGPART_EXPR:
2885 return OP_SAME (0);
2887 case ARRAY_REF:
2888 case ARRAY_RANGE_REF:
2889 /* Operands 2 and 3 may be null. */
2890 return (OP_SAME (0)
2891 && OP_SAME (1)
2892 && OP_SAME_WITH_NULL (2)
2893 && OP_SAME_WITH_NULL (3));
2895 case COMPONENT_REF:
2896 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2897 may be NULL when we're called to compare MEM_EXPRs. */
2898 return OP_SAME_WITH_NULL (0)
2899 && OP_SAME (1)
2900 && OP_SAME_WITH_NULL (2);
2902 case BIT_FIELD_REF:
2903 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2905 default:
2906 return 0;
2909 case tcc_expression:
2910 switch (TREE_CODE (arg0))
2912 case ADDR_EXPR:
2913 case TRUTH_NOT_EXPR:
2914 return OP_SAME (0);
2916 case TRUTH_ANDIF_EXPR:
2917 case TRUTH_ORIF_EXPR:
2918 return OP_SAME (0) && OP_SAME (1);
2920 case TRUTH_AND_EXPR:
2921 case TRUTH_OR_EXPR:
2922 case TRUTH_XOR_EXPR:
2923 if (OP_SAME (0) && OP_SAME (1))
2924 return 1;
2926 /* Otherwise take into account this is a commutative operation. */
2927 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2928 TREE_OPERAND (arg1, 1), flags)
2929 && operand_equal_p (TREE_OPERAND (arg0, 1),
2930 TREE_OPERAND (arg1, 0), flags));
2932 case CALL_EXPR:
2933 /* If the CALL_EXPRs call different functions, then they
2934 clearly can not be equal. */
2935 if (!OP_SAME (0))
2936 return 0;
2939 unsigned int cef = call_expr_flags (arg0);
2940 if (flags & OEP_PURE_SAME)
2941 cef &= ECF_CONST | ECF_PURE;
2942 else
2943 cef &= ECF_CONST;
2944 if (!cef)
2945 return 0;
2948 /* Now see if all the arguments are the same. operand_equal_p
2949 does not handle TREE_LIST, so we walk the operands here
2950 feeding them to operand_equal_p. */
2951 arg0 = TREE_OPERAND (arg0, 1);
2952 arg1 = TREE_OPERAND (arg1, 1);
2953 while (arg0 && arg1)
2955 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2956 flags))
2957 return 0;
2959 arg0 = TREE_CHAIN (arg0);
2960 arg1 = TREE_CHAIN (arg1);
2963 /* If we get here and both argument lists are exhausted
2964 then the CALL_EXPRs are equal. */
2965 return ! (arg0 || arg1);
2967 default:
2968 return 0;
2971 case tcc_declaration:
2972 /* Consider __builtin_sqrt equal to sqrt. */
2973 return (TREE_CODE (arg0) == FUNCTION_DECL
2974 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2975 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2976 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2978 default:
2979 return 0;
2982 #undef OP_SAME
2983 #undef OP_SAME_WITH_NULL
2986 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2987 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2989 When in doubt, return 0. */
2991 static int
2992 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2994 int unsignedp1, unsignedpo;
2995 tree primarg0, primarg1, primother;
2996 unsigned int correct_width;
2998 if (operand_equal_p (arg0, arg1, 0))
2999 return 1;
3001 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3002 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3003 return 0;
3005 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3006 and see if the inner values are the same. This removes any
3007 signedness comparison, which doesn't matter here. */
3008 primarg0 = arg0, primarg1 = arg1;
3009 STRIP_NOPS (primarg0);
3010 STRIP_NOPS (primarg1);
3011 if (operand_equal_p (primarg0, primarg1, 0))
3012 return 1;
3014 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3015 actual comparison operand, ARG0.
3017 First throw away any conversions to wider types
3018 already present in the operands. */
3020 primarg1 = get_narrower (arg1, &unsignedp1);
3021 primother = get_narrower (other, &unsignedpo);
3023 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3024 if (unsignedp1 == unsignedpo
3025 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3026 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3028 tree type = TREE_TYPE (arg0);
3030 /* Make sure shorter operand is extended the right way
3031 to match the longer operand. */
3032 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
3033 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3035 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3036 return 1;
3039 return 0;
3042 /* See if ARG is an expression that is either a comparison or is performing
3043 arithmetic on comparisons. The comparisons must only be comparing
3044 two different values, which will be stored in *CVAL1 and *CVAL2; if
3045 they are nonzero it means that some operands have already been found.
3046 No variables may be used anywhere else in the expression except in the
3047 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3048 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3050 If this is true, return 1. Otherwise, return zero. */
3052 static int
3053 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3055 enum tree_code code = TREE_CODE (arg);
3056 enum tree_code_class class = TREE_CODE_CLASS (code);
3058 /* We can handle some of the tcc_expression cases here. */
3059 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3060 class = tcc_unary;
3061 else if (class == tcc_expression
3062 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3063 || code == COMPOUND_EXPR))
3064 class = tcc_binary;
3066 else if (class == tcc_expression && code == SAVE_EXPR
3067 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3069 /* If we've already found a CVAL1 or CVAL2, this expression is
3070 two complex to handle. */
3071 if (*cval1 || *cval2)
3072 return 0;
3074 class = tcc_unary;
3075 *save_p = 1;
3078 switch (class)
3080 case tcc_unary:
3081 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3083 case tcc_binary:
3084 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3085 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3086 cval1, cval2, save_p));
3088 case tcc_constant:
3089 return 1;
3091 case tcc_expression:
3092 if (code == COND_EXPR)
3093 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3094 cval1, cval2, save_p)
3095 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3096 cval1, cval2, save_p)
3097 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3098 cval1, cval2, save_p));
3099 return 0;
3101 case tcc_comparison:
3102 /* First see if we can handle the first operand, then the second. For
3103 the second operand, we know *CVAL1 can't be zero. It must be that
3104 one side of the comparison is each of the values; test for the
3105 case where this isn't true by failing if the two operands
3106 are the same. */
3108 if (operand_equal_p (TREE_OPERAND (arg, 0),
3109 TREE_OPERAND (arg, 1), 0))
3110 return 0;
3112 if (*cval1 == 0)
3113 *cval1 = TREE_OPERAND (arg, 0);
3114 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3116 else if (*cval2 == 0)
3117 *cval2 = TREE_OPERAND (arg, 0);
3118 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3120 else
3121 return 0;
3123 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3125 else if (*cval2 == 0)
3126 *cval2 = TREE_OPERAND (arg, 1);
3127 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3129 else
3130 return 0;
3132 return 1;
3134 default:
3135 return 0;
3139 /* ARG is a tree that is known to contain just arithmetic operations and
3140 comparisons. Evaluate the operations in the tree substituting NEW0 for
3141 any occurrence of OLD0 as an operand of a comparison and likewise for
3142 NEW1 and OLD1. */
3144 static tree
3145 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3147 tree type = TREE_TYPE (arg);
3148 enum tree_code code = TREE_CODE (arg);
3149 enum tree_code_class class = TREE_CODE_CLASS (code);
3151 /* We can handle some of the tcc_expression cases here. */
3152 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3153 class = tcc_unary;
3154 else if (class == tcc_expression
3155 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3156 class = tcc_binary;
3158 switch (class)
3160 case tcc_unary:
3161 return fold_build1 (code, type,
3162 eval_subst (TREE_OPERAND (arg, 0),
3163 old0, new0, old1, new1));
3165 case tcc_binary:
3166 return fold_build2 (code, type,
3167 eval_subst (TREE_OPERAND (arg, 0),
3168 old0, new0, old1, new1),
3169 eval_subst (TREE_OPERAND (arg, 1),
3170 old0, new0, old1, new1));
3172 case tcc_expression:
3173 switch (code)
3175 case SAVE_EXPR:
3176 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3178 case COMPOUND_EXPR:
3179 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3181 case COND_EXPR:
3182 return fold_build3 (code, type,
3183 eval_subst (TREE_OPERAND (arg, 0),
3184 old0, new0, old1, new1),
3185 eval_subst (TREE_OPERAND (arg, 1),
3186 old0, new0, old1, new1),
3187 eval_subst (TREE_OPERAND (arg, 2),
3188 old0, new0, old1, new1));
3189 default:
3190 break;
3192 /* Fall through - ??? */
3194 case tcc_comparison:
3196 tree arg0 = TREE_OPERAND (arg, 0);
3197 tree arg1 = TREE_OPERAND (arg, 1);
3199 /* We need to check both for exact equality and tree equality. The
3200 former will be true if the operand has a side-effect. In that
3201 case, we know the operand occurred exactly once. */
3203 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3204 arg0 = new0;
3205 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3206 arg0 = new1;
3208 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3209 arg1 = new0;
3210 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3211 arg1 = new1;
3213 return fold_build2 (code, type, arg0, arg1);
3216 default:
3217 return arg;
3221 /* Return a tree for the case when the result of an expression is RESULT
3222 converted to TYPE and OMITTED was previously an operand of the expression
3223 but is now not needed (e.g., we folded OMITTED * 0).
3225 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3226 the conversion of RESULT to TYPE. */
3228 tree
3229 omit_one_operand (tree type, tree result, tree omitted)
3231 tree t = fold_convert (type, result);
3233 if (TREE_SIDE_EFFECTS (omitted))
3234 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3236 return non_lvalue (t);
3239 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3241 static tree
3242 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3244 tree t = fold_convert (type, result);
3246 if (TREE_SIDE_EFFECTS (omitted))
3247 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3249 return pedantic_non_lvalue (t);
3252 /* Return a tree for the case when the result of an expression is RESULT
3253 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3254 of the expression but are now not needed.
3256 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3257 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3258 evaluated before OMITTED2. Otherwise, if neither has side effects,
3259 just do the conversion of RESULT to TYPE. */
3261 tree
3262 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3264 tree t = fold_convert (type, result);
3266 if (TREE_SIDE_EFFECTS (omitted2))
3267 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3268 if (TREE_SIDE_EFFECTS (omitted1))
3269 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3271 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3275 /* Return a simplified tree node for the truth-negation of ARG. This
3276 never alters ARG itself. We assume that ARG is an operation that
3277 returns a truth value (0 or 1).
3279 FIXME: one would think we would fold the result, but it causes
3280 problems with the dominator optimizer. */
3282 tree
3283 fold_truth_not_expr (tree arg)
3285 tree type = TREE_TYPE (arg);
3286 enum tree_code code = TREE_CODE (arg);
3288 /* If this is a comparison, we can simply invert it, except for
3289 floating-point non-equality comparisons, in which case we just
3290 enclose a TRUTH_NOT_EXPR around what we have. */
3292 if (TREE_CODE_CLASS (code) == tcc_comparison)
3294 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3295 if (FLOAT_TYPE_P (op_type)
3296 && flag_trapping_math
3297 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3298 && code != NE_EXPR && code != EQ_EXPR)
3299 return NULL_TREE;
3300 else
3302 code = invert_tree_comparison (code,
3303 HONOR_NANS (TYPE_MODE (op_type)));
3304 if (code == ERROR_MARK)
3305 return NULL_TREE;
3306 else
3307 return build2 (code, type,
3308 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3312 switch (code)
3314 case INTEGER_CST:
3315 return constant_boolean_node (integer_zerop (arg), type);
3317 case TRUTH_AND_EXPR:
3318 return build2 (TRUTH_OR_EXPR, type,
3319 invert_truthvalue (TREE_OPERAND (arg, 0)),
3320 invert_truthvalue (TREE_OPERAND (arg, 1)));
3322 case TRUTH_OR_EXPR:
3323 return build2 (TRUTH_AND_EXPR, type,
3324 invert_truthvalue (TREE_OPERAND (arg, 0)),
3325 invert_truthvalue (TREE_OPERAND (arg, 1)));
3327 case TRUTH_XOR_EXPR:
3328 /* Here we can invert either operand. We invert the first operand
3329 unless the second operand is a TRUTH_NOT_EXPR in which case our
3330 result is the XOR of the first operand with the inside of the
3331 negation of the second operand. */
3333 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3334 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3335 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3336 else
3337 return build2 (TRUTH_XOR_EXPR, type,
3338 invert_truthvalue (TREE_OPERAND (arg, 0)),
3339 TREE_OPERAND (arg, 1));
3341 case TRUTH_ANDIF_EXPR:
3342 return build2 (TRUTH_ORIF_EXPR, type,
3343 invert_truthvalue (TREE_OPERAND (arg, 0)),
3344 invert_truthvalue (TREE_OPERAND (arg, 1)));
3346 case TRUTH_ORIF_EXPR:
3347 return build2 (TRUTH_ANDIF_EXPR, type,
3348 invert_truthvalue (TREE_OPERAND (arg, 0)),
3349 invert_truthvalue (TREE_OPERAND (arg, 1)));
3351 case TRUTH_NOT_EXPR:
3352 return TREE_OPERAND (arg, 0);
3354 case COND_EXPR:
3356 tree arg1 = TREE_OPERAND (arg, 1);
3357 tree arg2 = TREE_OPERAND (arg, 2);
3358 /* A COND_EXPR may have a throw as one operand, which
3359 then has void type. Just leave void operands
3360 as they are. */
3361 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3362 VOID_TYPE_P (TREE_TYPE (arg1))
3363 ? arg1 : invert_truthvalue (arg1),
3364 VOID_TYPE_P (TREE_TYPE (arg2))
3365 ? arg2 : invert_truthvalue (arg2));
3368 case COMPOUND_EXPR:
3369 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3370 invert_truthvalue (TREE_OPERAND (arg, 1)));
3372 case NON_LVALUE_EXPR:
3373 return invert_truthvalue (TREE_OPERAND (arg, 0));
3375 case NOP_EXPR:
3376 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3377 return build1 (TRUTH_NOT_EXPR, type, arg);
3379 case CONVERT_EXPR:
3380 case FLOAT_EXPR:
3381 return build1 (TREE_CODE (arg), type,
3382 invert_truthvalue (TREE_OPERAND (arg, 0)));
3384 case BIT_AND_EXPR:
3385 if (!integer_onep (TREE_OPERAND (arg, 1)))
3386 break;
3387 return build2 (EQ_EXPR, type, arg,
3388 build_int_cst (type, 0));
3390 case SAVE_EXPR:
3391 return build1 (TRUTH_NOT_EXPR, type, arg);
3393 case CLEANUP_POINT_EXPR:
3394 return build1 (CLEANUP_POINT_EXPR, type,
3395 invert_truthvalue (TREE_OPERAND (arg, 0)));
3397 default:
3398 break;
3401 return NULL_TREE;
3404 /* Return a simplified tree node for the truth-negation of ARG. This
3405 never alters ARG itself. We assume that ARG is an operation that
3406 returns a truth value (0 or 1).
3408 FIXME: one would think we would fold the result, but it causes
3409 problems with the dominator optimizer. */
3411 tree
3412 invert_truthvalue (tree arg)
3414 tree tem;
3416 if (TREE_CODE (arg) == ERROR_MARK)
3417 return arg;
3419 tem = fold_truth_not_expr (arg);
3420 if (!tem)
3421 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3423 return tem;
3426 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3427 operands are another bit-wise operation with a common input. If so,
3428 distribute the bit operations to save an operation and possibly two if
3429 constants are involved. For example, convert
3430 (A | B) & (A | C) into A | (B & C)
3431 Further simplification will occur if B and C are constants.
3433 If this optimization cannot be done, 0 will be returned. */
3435 static tree
3436 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3438 tree common;
3439 tree left, right;
3441 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3442 || TREE_CODE (arg0) == code
3443 || (TREE_CODE (arg0) != BIT_AND_EXPR
3444 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3445 return 0;
3447 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3449 common = TREE_OPERAND (arg0, 0);
3450 left = TREE_OPERAND (arg0, 1);
3451 right = TREE_OPERAND (arg1, 1);
3453 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3455 common = TREE_OPERAND (arg0, 0);
3456 left = TREE_OPERAND (arg0, 1);
3457 right = TREE_OPERAND (arg1, 0);
3459 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3461 common = TREE_OPERAND (arg0, 1);
3462 left = TREE_OPERAND (arg0, 0);
3463 right = TREE_OPERAND (arg1, 1);
3465 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3467 common = TREE_OPERAND (arg0, 1);
3468 left = TREE_OPERAND (arg0, 0);
3469 right = TREE_OPERAND (arg1, 0);
3471 else
3472 return 0;
3474 return fold_build2 (TREE_CODE (arg0), type, common,
3475 fold_build2 (code, type, left, right));
3478 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3479 with code CODE. This optimization is unsafe. */
3480 static tree
3481 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3483 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3484 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3486 /* (A / C) +- (B / C) -> (A +- B) / C. */
3487 if (mul0 == mul1
3488 && operand_equal_p (TREE_OPERAND (arg0, 1),
3489 TREE_OPERAND (arg1, 1), 0))
3490 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3491 fold_build2 (code, type,
3492 TREE_OPERAND (arg0, 0),
3493 TREE_OPERAND (arg1, 0)),
3494 TREE_OPERAND (arg0, 1));
3496 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3497 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3498 TREE_OPERAND (arg1, 0), 0)
3499 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3500 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3502 REAL_VALUE_TYPE r0, r1;
3503 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3504 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3505 if (!mul0)
3506 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3507 if (!mul1)
3508 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3509 real_arithmetic (&r0, code, &r0, &r1);
3510 return fold_build2 (MULT_EXPR, type,
3511 TREE_OPERAND (arg0, 0),
3512 build_real (type, r0));
3515 return NULL_TREE;
3518 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3519 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3521 static tree
3522 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3523 int unsignedp)
3525 tree result;
3527 if (bitpos == 0)
3529 tree size = TYPE_SIZE (TREE_TYPE (inner));
3530 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3531 || POINTER_TYPE_P (TREE_TYPE (inner)))
3532 && host_integerp (size, 0)
3533 && tree_low_cst (size, 0) == bitsize)
3534 return fold_convert (type, inner);
3537 result = build3 (BIT_FIELD_REF, type, inner,
3538 size_int (bitsize), bitsize_int (bitpos));
3540 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3542 return result;
3545 /* Optimize a bit-field compare.
3547 There are two cases: First is a compare against a constant and the
3548 second is a comparison of two items where the fields are at the same
3549 bit position relative to the start of a chunk (byte, halfword, word)
3550 large enough to contain it. In these cases we can avoid the shift
3551 implicit in bitfield extractions.
3553 For constants, we emit a compare of the shifted constant with the
3554 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3555 compared. For two fields at the same position, we do the ANDs with the
3556 similar mask and compare the result of the ANDs.
3558 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3559 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3560 are the left and right operands of the comparison, respectively.
3562 If the optimization described above can be done, we return the resulting
3563 tree. Otherwise we return zero. */
3565 static tree
3566 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3567 tree lhs, tree rhs)
3569 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3570 tree type = TREE_TYPE (lhs);
3571 tree signed_type, unsigned_type;
3572 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3573 enum machine_mode lmode, rmode, nmode;
3574 int lunsignedp, runsignedp;
3575 int lvolatilep = 0, rvolatilep = 0;
3576 tree linner, rinner = NULL_TREE;
3577 tree mask;
3578 tree offset;
3580 /* Get all the information about the extractions being done. If the bit size
3581 if the same as the size of the underlying object, we aren't doing an
3582 extraction at all and so can do nothing. We also don't want to
3583 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3584 then will no longer be able to replace it. */
3585 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3586 &lunsignedp, &lvolatilep, false);
3587 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3588 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3589 return 0;
3591 if (!const_p)
3593 /* If this is not a constant, we can only do something if bit positions,
3594 sizes, and signedness are the same. */
3595 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3596 &runsignedp, &rvolatilep, false);
3598 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3599 || lunsignedp != runsignedp || offset != 0
3600 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3601 return 0;
3604 /* See if we can find a mode to refer to this field. We should be able to,
3605 but fail if we can't. */
3606 nmode = get_best_mode (lbitsize, lbitpos,
3607 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3608 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3609 TYPE_ALIGN (TREE_TYPE (rinner))),
3610 word_mode, lvolatilep || rvolatilep);
3611 if (nmode == VOIDmode)
3612 return 0;
3614 /* Set signed and unsigned types of the precision of this mode for the
3615 shifts below. */
3616 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3617 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3619 /* Compute the bit position and size for the new reference and our offset
3620 within it. If the new reference is the same size as the original, we
3621 won't optimize anything, so return zero. */
3622 nbitsize = GET_MODE_BITSIZE (nmode);
3623 nbitpos = lbitpos & ~ (nbitsize - 1);
3624 lbitpos -= nbitpos;
3625 if (nbitsize == lbitsize)
3626 return 0;
3628 if (BYTES_BIG_ENDIAN)
3629 lbitpos = nbitsize - lbitsize - lbitpos;
3631 /* Make the mask to be used against the extracted field. */
3632 mask = build_int_cst_type (unsigned_type, -1);
3633 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3634 mask = const_binop (RSHIFT_EXPR, mask,
3635 size_int (nbitsize - lbitsize - lbitpos), 0);
3637 if (! const_p)
3638 /* If not comparing with constant, just rework the comparison
3639 and return. */
3640 return fold_build2 (code, compare_type,
3641 fold_build2 (BIT_AND_EXPR, unsigned_type,
3642 make_bit_field_ref (linner,
3643 unsigned_type,
3644 nbitsize, nbitpos,
3646 mask),
3647 fold_build2 (BIT_AND_EXPR, unsigned_type,
3648 make_bit_field_ref (rinner,
3649 unsigned_type,
3650 nbitsize, nbitpos,
3652 mask));
3654 /* Otherwise, we are handling the constant case. See if the constant is too
3655 big for the field. Warn and return a tree of for 0 (false) if so. We do
3656 this not only for its own sake, but to avoid having to test for this
3657 error case below. If we didn't, we might generate wrong code.
3659 For unsigned fields, the constant shifted right by the field length should
3660 be all zero. For signed fields, the high-order bits should agree with
3661 the sign bit. */
3663 if (lunsignedp)
3665 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3666 fold_convert (unsigned_type, rhs),
3667 size_int (lbitsize), 0)))
3669 warning (0, "comparison is always %d due to width of bit-field",
3670 code == NE_EXPR);
3671 return constant_boolean_node (code == NE_EXPR, compare_type);
3674 else
3676 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3677 size_int (lbitsize - 1), 0);
3678 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3680 warning (0, "comparison is always %d due to width of bit-field",
3681 code == NE_EXPR);
3682 return constant_boolean_node (code == NE_EXPR, compare_type);
3686 /* Single-bit compares should always be against zero. */
3687 if (lbitsize == 1 && ! integer_zerop (rhs))
3689 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3690 rhs = build_int_cst (type, 0);
3693 /* Make a new bitfield reference, shift the constant over the
3694 appropriate number of bits and mask it with the computed mask
3695 (in case this was a signed field). If we changed it, make a new one. */
3696 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3697 if (lvolatilep)
3699 TREE_SIDE_EFFECTS (lhs) = 1;
3700 TREE_THIS_VOLATILE (lhs) = 1;
3703 rhs = const_binop (BIT_AND_EXPR,
3704 const_binop (LSHIFT_EXPR,
3705 fold_convert (unsigned_type, rhs),
3706 size_int (lbitpos), 0),
3707 mask, 0);
3709 return build2 (code, compare_type,
3710 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3711 rhs);
3714 /* Subroutine for fold_truthop: decode a field reference.
3716 If EXP is a comparison reference, we return the innermost reference.
3718 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3719 set to the starting bit number.
3721 If the innermost field can be completely contained in a mode-sized
3722 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3724 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3725 otherwise it is not changed.
3727 *PUNSIGNEDP is set to the signedness of the field.
3729 *PMASK is set to the mask used. This is either contained in a
3730 BIT_AND_EXPR or derived from the width of the field.
3732 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3734 Return 0 if this is not a component reference or is one that we can't
3735 do anything with. */
3737 static tree
3738 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3739 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3740 int *punsignedp, int *pvolatilep,
3741 tree *pmask, tree *pand_mask)
3743 tree outer_type = 0;
3744 tree and_mask = 0;
3745 tree mask, inner, offset;
3746 tree unsigned_type;
3747 unsigned int precision;
3749 /* All the optimizations using this function assume integer fields.
3750 There are problems with FP fields since the type_for_size call
3751 below can fail for, e.g., XFmode. */
3752 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3753 return 0;
3755 /* We are interested in the bare arrangement of bits, so strip everything
3756 that doesn't affect the machine mode. However, record the type of the
3757 outermost expression if it may matter below. */
3758 if (TREE_CODE (exp) == NOP_EXPR
3759 || TREE_CODE (exp) == CONVERT_EXPR
3760 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3761 outer_type = TREE_TYPE (exp);
3762 STRIP_NOPS (exp);
3764 if (TREE_CODE (exp) == BIT_AND_EXPR)
3766 and_mask = TREE_OPERAND (exp, 1);
3767 exp = TREE_OPERAND (exp, 0);
3768 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3769 if (TREE_CODE (and_mask) != INTEGER_CST)
3770 return 0;
3773 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3774 punsignedp, pvolatilep, false);
3775 if ((inner == exp && and_mask == 0)
3776 || *pbitsize < 0 || offset != 0
3777 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3778 return 0;
3780 /* If the number of bits in the reference is the same as the bitsize of
3781 the outer type, then the outer type gives the signedness. Otherwise
3782 (in case of a small bitfield) the signedness is unchanged. */
3783 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3784 *punsignedp = TYPE_UNSIGNED (outer_type);
3786 /* Compute the mask to access the bitfield. */
3787 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3788 precision = TYPE_PRECISION (unsigned_type);
3790 mask = build_int_cst_type (unsigned_type, -1);
3792 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3793 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3795 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3796 if (and_mask != 0)
3797 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3798 fold_convert (unsigned_type, and_mask), mask);
3800 *pmask = mask;
3801 *pand_mask = and_mask;
3802 return inner;
3805 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3806 bit positions. */
3808 static int
3809 all_ones_mask_p (tree mask, int size)
3811 tree type = TREE_TYPE (mask);
3812 unsigned int precision = TYPE_PRECISION (type);
3813 tree tmask;
3815 tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
3817 return
3818 tree_int_cst_equal (mask,
3819 const_binop (RSHIFT_EXPR,
3820 const_binop (LSHIFT_EXPR, tmask,
3821 size_int (precision - size),
3823 size_int (precision - size), 0));
3826 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3827 represents the sign bit of EXP's type. If EXP represents a sign
3828 or zero extension, also test VAL against the unextended type.
3829 The return value is the (sub)expression whose sign bit is VAL,
3830 or NULL_TREE otherwise. */
3832 static tree
3833 sign_bit_p (tree exp, tree val)
3835 unsigned HOST_WIDE_INT mask_lo, lo;
3836 HOST_WIDE_INT mask_hi, hi;
3837 int width;
3838 tree t;
3840 /* Tree EXP must have an integral type. */
3841 t = TREE_TYPE (exp);
3842 if (! INTEGRAL_TYPE_P (t))
3843 return NULL_TREE;
3845 /* Tree VAL must be an integer constant. */
3846 if (TREE_CODE (val) != INTEGER_CST
3847 || TREE_OVERFLOW (val))
3848 return NULL_TREE;
3850 width = TYPE_PRECISION (t);
3851 if (width > HOST_BITS_PER_WIDE_INT)
3853 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3854 lo = 0;
3856 mask_hi = ((unsigned HOST_WIDE_INT) -1
3857 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3858 mask_lo = -1;
3860 else
3862 hi = 0;
3863 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3865 mask_hi = 0;
3866 mask_lo = ((unsigned HOST_WIDE_INT) -1
3867 >> (HOST_BITS_PER_WIDE_INT - width));
3870 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3871 treat VAL as if it were unsigned. */
3872 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3873 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3874 return exp;
3876 /* Handle extension from a narrower type. */
3877 if (TREE_CODE (exp) == NOP_EXPR
3878 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3879 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3881 return NULL_TREE;
3884 /* Subroutine for fold_truthop: determine if an operand is simple enough
3885 to be evaluated unconditionally. */
3887 static int
3888 simple_operand_p (tree exp)
3890 /* Strip any conversions that don't change the machine mode. */
3891 STRIP_NOPS (exp);
3893 return (CONSTANT_CLASS_P (exp)
3894 || TREE_CODE (exp) == SSA_NAME
3895 || (DECL_P (exp)
3896 && ! TREE_ADDRESSABLE (exp)
3897 && ! TREE_THIS_VOLATILE (exp)
3898 && ! DECL_NONLOCAL (exp)
3899 /* Don't regard global variables as simple. They may be
3900 allocated in ways unknown to the compiler (shared memory,
3901 #pragma weak, etc). */
3902 && ! TREE_PUBLIC (exp)
3903 && ! DECL_EXTERNAL (exp)
3904 /* Loading a static variable is unduly expensive, but global
3905 registers aren't expensive. */
3906 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3909 /* The following functions are subroutines to fold_range_test and allow it to
3910 try to change a logical combination of comparisons into a range test.
3912 For example, both
3913 X == 2 || X == 3 || X == 4 || X == 5
3915 X >= 2 && X <= 5
3916 are converted to
3917 (unsigned) (X - 2) <= 3
3919 We describe each set of comparisons as being either inside or outside
3920 a range, using a variable named like IN_P, and then describe the
3921 range with a lower and upper bound. If one of the bounds is omitted,
3922 it represents either the highest or lowest value of the type.
3924 In the comments below, we represent a range by two numbers in brackets
3925 preceded by a "+" to designate being inside that range, or a "-" to
3926 designate being outside that range, so the condition can be inverted by
3927 flipping the prefix. An omitted bound is represented by a "-". For
3928 example, "- [-, 10]" means being outside the range starting at the lowest
3929 possible value and ending at 10, in other words, being greater than 10.
3930 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3931 always false.
3933 We set up things so that the missing bounds are handled in a consistent
3934 manner so neither a missing bound nor "true" and "false" need to be
3935 handled using a special case. */
3937 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3938 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3939 and UPPER1_P are nonzero if the respective argument is an upper bound
3940 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3941 must be specified for a comparison. ARG1 will be converted to ARG0's
3942 type if both are specified. */
3944 static tree
3945 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3946 tree arg1, int upper1_p)
3948 tree tem;
3949 int result;
3950 int sgn0, sgn1;
3952 /* If neither arg represents infinity, do the normal operation.
3953 Else, if not a comparison, return infinity. Else handle the special
3954 comparison rules. Note that most of the cases below won't occur, but
3955 are handled for consistency. */
3957 if (arg0 != 0 && arg1 != 0)
3959 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3960 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3961 STRIP_NOPS (tem);
3962 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3965 if (TREE_CODE_CLASS (code) != tcc_comparison)
3966 return 0;
3968 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3969 for neither. In real maths, we cannot assume open ended ranges are
3970 the same. But, this is computer arithmetic, where numbers are finite.
3971 We can therefore make the transformation of any unbounded range with
3972 the value Z, Z being greater than any representable number. This permits
3973 us to treat unbounded ranges as equal. */
3974 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3975 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3976 switch (code)
3978 case EQ_EXPR:
3979 result = sgn0 == sgn1;
3980 break;
3981 case NE_EXPR:
3982 result = sgn0 != sgn1;
3983 break;
3984 case LT_EXPR:
3985 result = sgn0 < sgn1;
3986 break;
3987 case LE_EXPR:
3988 result = sgn0 <= sgn1;
3989 break;
3990 case GT_EXPR:
3991 result = sgn0 > sgn1;
3992 break;
3993 case GE_EXPR:
3994 result = sgn0 >= sgn1;
3995 break;
3996 default:
3997 gcc_unreachable ();
4000 return constant_boolean_node (result, type);
4003 /* Given EXP, a logical expression, set the range it is testing into
4004 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4005 actually being tested. *PLOW and *PHIGH will be made of the same
4006 type as the returned expression. If EXP is not a comparison, we
4007 will most likely not be returning a useful value and range. Set
4008 *STRICT_OVERFLOW_P to true if the return value is only valid
4009 because signed overflow is undefined; otherwise, do not change
4010 *STRICT_OVERFLOW_P. */
4012 static tree
4013 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4014 bool *strict_overflow_p)
4016 enum tree_code code;
4017 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4018 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4019 int in_p, n_in_p;
4020 tree low, high, n_low, n_high;
4022 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4023 and see if we can refine the range. Some of the cases below may not
4024 happen, but it doesn't seem worth worrying about this. We "continue"
4025 the outer loop when we've changed something; otherwise we "break"
4026 the switch, which will "break" the while. */
4028 in_p = 0;
4029 low = high = build_int_cst (TREE_TYPE (exp), 0);
4031 while (1)
4033 code = TREE_CODE (exp);
4034 exp_type = TREE_TYPE (exp);
4036 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4038 if (TREE_CODE_LENGTH (code) > 0)
4039 arg0 = TREE_OPERAND (exp, 0);
4040 if (TREE_CODE_CLASS (code) == tcc_comparison
4041 || TREE_CODE_CLASS (code) == tcc_unary
4042 || TREE_CODE_CLASS (code) == tcc_binary)
4043 arg0_type = TREE_TYPE (arg0);
4044 if (TREE_CODE_CLASS (code) == tcc_binary
4045 || TREE_CODE_CLASS (code) == tcc_comparison
4046 || (TREE_CODE_CLASS (code) == tcc_expression
4047 && TREE_CODE_LENGTH (code) > 1))
4048 arg1 = TREE_OPERAND (exp, 1);
4051 switch (code)
4053 case TRUTH_NOT_EXPR:
4054 in_p = ! in_p, exp = arg0;
4055 continue;
4057 case EQ_EXPR: case NE_EXPR:
4058 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4059 /* We can only do something if the range is testing for zero
4060 and if the second operand is an integer constant. Note that
4061 saying something is "in" the range we make is done by
4062 complementing IN_P since it will set in the initial case of
4063 being not equal to zero; "out" is leaving it alone. */
4064 if (low == 0 || high == 0
4065 || ! integer_zerop (low) || ! integer_zerop (high)
4066 || TREE_CODE (arg1) != INTEGER_CST)
4067 break;
4069 switch (code)
4071 case NE_EXPR: /* - [c, c] */
4072 low = high = arg1;
4073 break;
4074 case EQ_EXPR: /* + [c, c] */
4075 in_p = ! in_p, low = high = arg1;
4076 break;
4077 case GT_EXPR: /* - [-, c] */
4078 low = 0, high = arg1;
4079 break;
4080 case GE_EXPR: /* + [c, -] */
4081 in_p = ! in_p, low = arg1, high = 0;
4082 break;
4083 case LT_EXPR: /* - [c, -] */
4084 low = arg1, high = 0;
4085 break;
4086 case LE_EXPR: /* + [-, c] */
4087 in_p = ! in_p, low = 0, high = arg1;
4088 break;
4089 default:
4090 gcc_unreachable ();
4093 /* If this is an unsigned comparison, we also know that EXP is
4094 greater than or equal to zero. We base the range tests we make
4095 on that fact, so we record it here so we can parse existing
4096 range tests. We test arg0_type since often the return type
4097 of, e.g. EQ_EXPR, is boolean. */
4098 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4100 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4101 in_p, low, high, 1,
4102 build_int_cst (arg0_type, 0),
4103 NULL_TREE))
4104 break;
4106 in_p = n_in_p, low = n_low, high = n_high;
4108 /* If the high bound is missing, but we have a nonzero low
4109 bound, reverse the range so it goes from zero to the low bound
4110 minus 1. */
4111 if (high == 0 && low && ! integer_zerop (low))
4113 in_p = ! in_p;
4114 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4115 integer_one_node, 0);
4116 low = build_int_cst (arg0_type, 0);
4120 exp = arg0;
4121 continue;
4123 case NEGATE_EXPR:
4124 /* (-x) IN [a,b] -> x in [-b, -a] */
4125 n_low = range_binop (MINUS_EXPR, exp_type,
4126 build_int_cst (exp_type, 0),
4127 0, high, 1);
4128 n_high = range_binop (MINUS_EXPR, exp_type,
4129 build_int_cst (exp_type, 0),
4130 0, low, 0);
4131 low = n_low, high = n_high;
4132 exp = arg0;
4133 continue;
4135 case BIT_NOT_EXPR:
4136 /* ~ X -> -X - 1 */
4137 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4138 build_int_cst (exp_type, 1));
4139 continue;
4141 case PLUS_EXPR: case MINUS_EXPR:
4142 if (TREE_CODE (arg1) != INTEGER_CST)
4143 break;
4145 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4146 move a constant to the other side. */
4147 if (!TYPE_UNSIGNED (arg0_type)
4148 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4149 break;
4151 /* If EXP is signed, any overflow in the computation is undefined,
4152 so we don't worry about it so long as our computations on
4153 the bounds don't overflow. For unsigned, overflow is defined
4154 and this is exactly the right thing. */
4155 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4156 arg0_type, low, 0, arg1, 0);
4157 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4158 arg0_type, high, 1, arg1, 0);
4159 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4160 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4161 break;
4163 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4164 *strict_overflow_p = true;
4166 /* Check for an unsigned range which has wrapped around the maximum
4167 value thus making n_high < n_low, and normalize it. */
4168 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4170 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4171 integer_one_node, 0);
4172 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4173 integer_one_node, 0);
4175 /* If the range is of the form +/- [ x+1, x ], we won't
4176 be able to normalize it. But then, it represents the
4177 whole range or the empty set, so make it
4178 +/- [ -, - ]. */
4179 if (tree_int_cst_equal (n_low, low)
4180 && tree_int_cst_equal (n_high, high))
4181 low = high = 0;
4182 else
4183 in_p = ! in_p;
4185 else
4186 low = n_low, high = n_high;
4188 exp = arg0;
4189 continue;
4191 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4192 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4193 break;
4195 if (! INTEGRAL_TYPE_P (arg0_type)
4196 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4197 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4198 break;
4200 n_low = low, n_high = high;
4202 if (n_low != 0)
4203 n_low = fold_convert (arg0_type, n_low);
4205 if (n_high != 0)
4206 n_high = fold_convert (arg0_type, n_high);
4209 /* If we're converting arg0 from an unsigned type, to exp,
4210 a signed type, we will be doing the comparison as unsigned.
4211 The tests above have already verified that LOW and HIGH
4212 are both positive.
4214 So we have to ensure that we will handle large unsigned
4215 values the same way that the current signed bounds treat
4216 negative values. */
4218 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4220 tree high_positive;
4221 tree equiv_type = lang_hooks.types.type_for_mode
4222 (TYPE_MODE (arg0_type), 1);
4224 /* A range without an upper bound is, naturally, unbounded.
4225 Since convert would have cropped a very large value, use
4226 the max value for the destination type. */
4227 high_positive
4228 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4229 : TYPE_MAX_VALUE (arg0_type);
4231 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4232 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4233 fold_convert (arg0_type,
4234 high_positive),
4235 build_int_cst (arg0_type, 1));
4237 /* If the low bound is specified, "and" the range with the
4238 range for which the original unsigned value will be
4239 positive. */
4240 if (low != 0)
4242 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4243 1, n_low, n_high, 1,
4244 fold_convert (arg0_type,
4245 integer_zero_node),
4246 high_positive))
4247 break;
4249 in_p = (n_in_p == in_p);
4251 else
4253 /* Otherwise, "or" the range with the range of the input
4254 that will be interpreted as negative. */
4255 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4256 0, n_low, n_high, 1,
4257 fold_convert (arg0_type,
4258 integer_zero_node),
4259 high_positive))
4260 break;
4262 in_p = (in_p != n_in_p);
4266 exp = arg0;
4267 low = n_low, high = n_high;
4268 continue;
4270 default:
4271 break;
4274 break;
4277 /* If EXP is a constant, we can evaluate whether this is true or false. */
4278 if (TREE_CODE (exp) == INTEGER_CST)
4280 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4281 exp, 0, low, 0))
4282 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4283 exp, 1, high, 1)));
4284 low = high = 0;
4285 exp = 0;
4288 *pin_p = in_p, *plow = low, *phigh = high;
4289 return exp;
4292 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4293 type, TYPE, return an expression to test if EXP is in (or out of, depending
4294 on IN_P) the range. Return 0 if the test couldn't be created. */
4296 static tree
4297 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4299 tree etype = TREE_TYPE (exp);
4300 tree value;
4302 #ifdef HAVE_canonicalize_funcptr_for_compare
4303 /* Disable this optimization for function pointer expressions
4304 on targets that require function pointer canonicalization. */
4305 if (HAVE_canonicalize_funcptr_for_compare
4306 && TREE_CODE (etype) == POINTER_TYPE
4307 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4308 return NULL_TREE;
4309 #endif
4311 if (! in_p)
4313 value = build_range_check (type, exp, 1, low, high);
4314 if (value != 0)
4315 return invert_truthvalue (value);
4317 return 0;
4320 if (low == 0 && high == 0)
4321 return build_int_cst (type, 1);
4323 if (low == 0)
4324 return fold_build2 (LE_EXPR, type, exp,
4325 fold_convert (etype, high));
4327 if (high == 0)
4328 return fold_build2 (GE_EXPR, type, exp,
4329 fold_convert (etype, low));
4331 if (operand_equal_p (low, high, 0))
4332 return fold_build2 (EQ_EXPR, type, exp,
4333 fold_convert (etype, low));
4335 if (integer_zerop (low))
4337 if (! TYPE_UNSIGNED (etype))
4339 etype = lang_hooks.types.unsigned_type (etype);
4340 high = fold_convert (etype, high);
4341 exp = fold_convert (etype, exp);
4343 return build_range_check (type, exp, 1, 0, high);
4346 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4347 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4349 unsigned HOST_WIDE_INT lo;
4350 HOST_WIDE_INT hi;
4351 int prec;
4353 prec = TYPE_PRECISION (etype);
4354 if (prec <= HOST_BITS_PER_WIDE_INT)
4356 hi = 0;
4357 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4359 else
4361 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4362 lo = (unsigned HOST_WIDE_INT) -1;
4365 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4367 if (TYPE_UNSIGNED (etype))
4369 etype = lang_hooks.types.signed_type (etype);
4370 exp = fold_convert (etype, exp);
4372 return fold_build2 (GT_EXPR, type, exp,
4373 build_int_cst (etype, 0));
4377 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4378 This requires wrap-around arithmetics for the type of the expression. */
4379 switch (TREE_CODE (etype))
4381 case INTEGER_TYPE:
4382 /* There is no requirement that LOW be within the range of ETYPE
4383 if the latter is a subtype. It must, however, be within the base
4384 type of ETYPE. So be sure we do the subtraction in that type. */
4385 if (TREE_TYPE (etype))
4386 etype = TREE_TYPE (etype);
4387 break;
4389 case ENUMERAL_TYPE:
4390 case BOOLEAN_TYPE:
4391 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4392 TYPE_UNSIGNED (etype));
4393 break;
4395 default:
4396 break;
4399 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4400 if (TREE_CODE (etype) == INTEGER_TYPE
4401 && !TYPE_OVERFLOW_WRAPS (etype))
4403 tree utype, minv, maxv;
4405 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4406 for the type in question, as we rely on this here. */
4407 utype = lang_hooks.types.unsigned_type (etype);
4408 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4409 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4410 integer_one_node, 1);
4411 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4413 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4414 minv, 1, maxv, 1)))
4415 etype = utype;
4416 else
4417 return 0;
4420 high = fold_convert (etype, high);
4421 low = fold_convert (etype, low);
4422 exp = fold_convert (etype, exp);
4424 value = const_binop (MINUS_EXPR, high, low, 0);
4426 if (value != 0 && !TREE_OVERFLOW (value))
4427 return build_range_check (type,
4428 fold_build2 (MINUS_EXPR, etype, exp, low),
4429 1, build_int_cst (etype, 0), value);
4431 return 0;
4434 /* Return the predecessor of VAL in its type, handling the infinite case. */
4436 static tree
4437 range_predecessor (tree val)
4439 tree type = TREE_TYPE (val);
4441 if (INTEGRAL_TYPE_P (type)
4442 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4443 return 0;
4444 else
4445 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4448 /* Return the successor of VAL in its type, handling the infinite case. */
4450 static tree
4451 range_successor (tree val)
4453 tree type = TREE_TYPE (val);
4455 if (INTEGRAL_TYPE_P (type)
4456 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4457 return 0;
4458 else
4459 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4462 /* Given two ranges, see if we can merge them into one. Return 1 if we
4463 can, 0 if we can't. Set the output range into the specified parameters. */
4465 static int
4466 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4467 tree high0, int in1_p, tree low1, tree high1)
4469 int no_overlap;
4470 int subset;
4471 int temp;
4472 tree tem;
4473 int in_p;
4474 tree low, high;
4475 int lowequal = ((low0 == 0 && low1 == 0)
4476 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4477 low0, 0, low1, 0)));
4478 int highequal = ((high0 == 0 && high1 == 0)
4479 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4480 high0, 1, high1, 1)));
4482 /* Make range 0 be the range that starts first, or ends last if they
4483 start at the same value. Swap them if it isn't. */
4484 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4485 low0, 0, low1, 0))
4486 || (lowequal
4487 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4488 high1, 1, high0, 1))))
4490 temp = in0_p, in0_p = in1_p, in1_p = temp;
4491 tem = low0, low0 = low1, low1 = tem;
4492 tem = high0, high0 = high1, high1 = tem;
4495 /* Now flag two cases, whether the ranges are disjoint or whether the
4496 second range is totally subsumed in the first. Note that the tests
4497 below are simplified by the ones above. */
4498 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4499 high0, 1, low1, 0));
4500 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4501 high1, 1, high0, 1));
4503 /* We now have four cases, depending on whether we are including or
4504 excluding the two ranges. */
4505 if (in0_p && in1_p)
4507 /* If they don't overlap, the result is false. If the second range
4508 is a subset it is the result. Otherwise, the range is from the start
4509 of the second to the end of the first. */
4510 if (no_overlap)
4511 in_p = 0, low = high = 0;
4512 else if (subset)
4513 in_p = 1, low = low1, high = high1;
4514 else
4515 in_p = 1, low = low1, high = high0;
4518 else if (in0_p && ! in1_p)
4520 /* If they don't overlap, the result is the first range. If they are
4521 equal, the result is false. If the second range is a subset of the
4522 first, and the ranges begin at the same place, we go from just after
4523 the end of the second range to the end of the first. If the second
4524 range is not a subset of the first, or if it is a subset and both
4525 ranges end at the same place, the range starts at the start of the
4526 first range and ends just before the second range.
4527 Otherwise, we can't describe this as a single range. */
4528 if (no_overlap)
4529 in_p = 1, low = low0, high = high0;
4530 else if (lowequal && highequal)
4531 in_p = 0, low = high = 0;
4532 else if (subset && lowequal)
4534 low = range_successor (high1);
4535 high = high0;
4536 in_p = (low != 0);
4538 else if (! subset || highequal)
4540 low = low0;
4541 high = range_predecessor (low1);
4542 in_p = (high != 0);
4544 else
4545 return 0;
4548 else if (! in0_p && in1_p)
4550 /* If they don't overlap, the result is the second range. If the second
4551 is a subset of the first, the result is false. Otherwise,
4552 the range starts just after the first range and ends at the
4553 end of the second. */
4554 if (no_overlap)
4555 in_p = 1, low = low1, high = high1;
4556 else if (subset || highequal)
4557 in_p = 0, low = high = 0;
4558 else
4560 low = range_successor (high0);
4561 high = high1;
4562 in_p = (low != 0);
4566 else
4568 /* The case where we are excluding both ranges. Here the complex case
4569 is if they don't overlap. In that case, the only time we have a
4570 range is if they are adjacent. If the second is a subset of the
4571 first, the result is the first. Otherwise, the range to exclude
4572 starts at the beginning of the first range and ends at the end of the
4573 second. */
4574 if (no_overlap)
4576 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4577 range_successor (high0),
4578 1, low1, 0)))
4579 in_p = 0, low = low0, high = high1;
4580 else
4582 /* Canonicalize - [min, x] into - [-, x]. */
4583 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4584 switch (TREE_CODE (TREE_TYPE (low0)))
4586 case ENUMERAL_TYPE:
4587 if (TYPE_PRECISION (TREE_TYPE (low0))
4588 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4589 break;
4590 /* FALLTHROUGH */
4591 case INTEGER_TYPE:
4592 if (tree_int_cst_equal (low0,
4593 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4594 low0 = 0;
4595 break;
4596 case POINTER_TYPE:
4597 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4598 && integer_zerop (low0))
4599 low0 = 0;
4600 break;
4601 default:
4602 break;
4605 /* Canonicalize - [x, max] into - [x, -]. */
4606 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4607 switch (TREE_CODE (TREE_TYPE (high1)))
4609 case ENUMERAL_TYPE:
4610 if (TYPE_PRECISION (TREE_TYPE (high1))
4611 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4612 break;
4613 /* FALLTHROUGH */
4614 case INTEGER_TYPE:
4615 if (tree_int_cst_equal (high1,
4616 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4617 high1 = 0;
4618 break;
4619 case POINTER_TYPE:
4620 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4621 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4622 high1, 1,
4623 integer_one_node, 1)))
4624 high1 = 0;
4625 break;
4626 default:
4627 break;
4630 /* The ranges might be also adjacent between the maximum and
4631 minimum values of the given type. For
4632 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4633 return + [x + 1, y - 1]. */
4634 if (low0 == 0 && high1 == 0)
4636 low = range_successor (high0);
4637 high = range_predecessor (low1);
4638 if (low == 0 || high == 0)
4639 return 0;
4641 in_p = 1;
4643 else
4644 return 0;
4647 else if (subset)
4648 in_p = 0, low = low0, high = high0;
4649 else
4650 in_p = 0, low = low0, high = high1;
4653 *pin_p = in_p, *plow = low, *phigh = high;
4654 return 1;
4658 /* Subroutine of fold, looking inside expressions of the form
4659 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4660 of the COND_EXPR. This function is being used also to optimize
4661 A op B ? C : A, by reversing the comparison first.
4663 Return a folded expression whose code is not a COND_EXPR
4664 anymore, or NULL_TREE if no folding opportunity is found. */
4666 static tree
4667 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4669 enum tree_code comp_code = TREE_CODE (arg0);
4670 tree arg00 = TREE_OPERAND (arg0, 0);
4671 tree arg01 = TREE_OPERAND (arg0, 1);
4672 tree arg1_type = TREE_TYPE (arg1);
4673 tree tem;
4675 STRIP_NOPS (arg1);
4676 STRIP_NOPS (arg2);
4678 /* If we have A op 0 ? A : -A, consider applying the following
4679 transformations:
4681 A == 0? A : -A same as -A
4682 A != 0? A : -A same as A
4683 A >= 0? A : -A same as abs (A)
4684 A > 0? A : -A same as abs (A)
4685 A <= 0? A : -A same as -abs (A)
4686 A < 0? A : -A same as -abs (A)
4688 None of these transformations work for modes with signed
4689 zeros. If A is +/-0, the first two transformations will
4690 change the sign of the result (from +0 to -0, or vice
4691 versa). The last four will fix the sign of the result,
4692 even though the original expressions could be positive or
4693 negative, depending on the sign of A.
4695 Note that all these transformations are correct if A is
4696 NaN, since the two alternatives (A and -A) are also NaNs. */
4697 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4698 ? real_zerop (arg01)
4699 : integer_zerop (arg01))
4700 && ((TREE_CODE (arg2) == NEGATE_EXPR
4701 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4702 /* In the case that A is of the form X-Y, '-A' (arg2) may
4703 have already been folded to Y-X, check for that. */
4704 || (TREE_CODE (arg1) == MINUS_EXPR
4705 && TREE_CODE (arg2) == MINUS_EXPR
4706 && operand_equal_p (TREE_OPERAND (arg1, 0),
4707 TREE_OPERAND (arg2, 1), 0)
4708 && operand_equal_p (TREE_OPERAND (arg1, 1),
4709 TREE_OPERAND (arg2, 0), 0))))
4710 switch (comp_code)
4712 case EQ_EXPR:
4713 case UNEQ_EXPR:
4714 tem = fold_convert (arg1_type, arg1);
4715 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4716 case NE_EXPR:
4717 case LTGT_EXPR:
4718 return pedantic_non_lvalue (fold_convert (type, arg1));
4719 case UNGE_EXPR:
4720 case UNGT_EXPR:
4721 if (flag_trapping_math)
4722 break;
4723 /* Fall through. */
4724 case GE_EXPR:
4725 case GT_EXPR:
4726 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4727 arg1 = fold_convert (lang_hooks.types.signed_type
4728 (TREE_TYPE (arg1)), arg1);
4729 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4730 return pedantic_non_lvalue (fold_convert (type, tem));
4731 case UNLE_EXPR:
4732 case UNLT_EXPR:
4733 if (flag_trapping_math)
4734 break;
4735 case LE_EXPR:
4736 case LT_EXPR:
4737 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4738 arg1 = fold_convert (lang_hooks.types.signed_type
4739 (TREE_TYPE (arg1)), arg1);
4740 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4741 return negate_expr (fold_convert (type, tem));
4742 default:
4743 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4744 break;
4747 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4748 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4749 both transformations are correct when A is NaN: A != 0
4750 is then true, and A == 0 is false. */
4752 if (integer_zerop (arg01) && integer_zerop (arg2))
4754 if (comp_code == NE_EXPR)
4755 return pedantic_non_lvalue (fold_convert (type, arg1));
4756 else if (comp_code == EQ_EXPR)
4757 return build_int_cst (type, 0);
4760 /* Try some transformations of A op B ? A : B.
4762 A == B? A : B same as B
4763 A != B? A : B same as A
4764 A >= B? A : B same as max (A, B)
4765 A > B? A : B same as max (B, A)
4766 A <= B? A : B same as min (A, B)
4767 A < B? A : B same as min (B, A)
4769 As above, these transformations don't work in the presence
4770 of signed zeros. For example, if A and B are zeros of
4771 opposite sign, the first two transformations will change
4772 the sign of the result. In the last four, the original
4773 expressions give different results for (A=+0, B=-0) and
4774 (A=-0, B=+0), but the transformed expressions do not.
4776 The first two transformations are correct if either A or B
4777 is a NaN. In the first transformation, the condition will
4778 be false, and B will indeed be chosen. In the case of the
4779 second transformation, the condition A != B will be true,
4780 and A will be chosen.
4782 The conversions to max() and min() are not correct if B is
4783 a number and A is not. The conditions in the original
4784 expressions will be false, so all four give B. The min()
4785 and max() versions would give a NaN instead. */
4786 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4787 /* Avoid these transformations if the COND_EXPR may be used
4788 as an lvalue in the C++ front-end. PR c++/19199. */
4789 && (in_gimple_form
4790 || (strcmp (lang_hooks.name, "GNU C++") != 0
4791 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4792 || ! maybe_lvalue_p (arg1)
4793 || ! maybe_lvalue_p (arg2)))
4795 tree comp_op0 = arg00;
4796 tree comp_op1 = arg01;
4797 tree comp_type = TREE_TYPE (comp_op0);
4799 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4800 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4802 comp_type = type;
4803 comp_op0 = arg1;
4804 comp_op1 = arg2;
4807 switch (comp_code)
4809 case EQ_EXPR:
4810 return pedantic_non_lvalue (fold_convert (type, arg2));
4811 case NE_EXPR:
4812 return pedantic_non_lvalue (fold_convert (type, arg1));
4813 case LE_EXPR:
4814 case LT_EXPR:
4815 case UNLE_EXPR:
4816 case UNLT_EXPR:
4817 /* In C++ a ?: expression can be an lvalue, so put the
4818 operand which will be used if they are equal first
4819 so that we can convert this back to the
4820 corresponding COND_EXPR. */
4821 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4823 comp_op0 = fold_convert (comp_type, comp_op0);
4824 comp_op1 = fold_convert (comp_type, comp_op1);
4825 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4826 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4827 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4828 return pedantic_non_lvalue (fold_convert (type, tem));
4830 break;
4831 case GE_EXPR:
4832 case GT_EXPR:
4833 case UNGE_EXPR:
4834 case UNGT_EXPR:
4835 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4837 comp_op0 = fold_convert (comp_type, comp_op0);
4838 comp_op1 = fold_convert (comp_type, comp_op1);
4839 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4840 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4841 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4842 return pedantic_non_lvalue (fold_convert (type, tem));
4844 break;
4845 case UNEQ_EXPR:
4846 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4847 return pedantic_non_lvalue (fold_convert (type, arg2));
4848 break;
4849 case LTGT_EXPR:
4850 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4851 return pedantic_non_lvalue (fold_convert (type, arg1));
4852 break;
4853 default:
4854 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4855 break;
4859 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4860 we might still be able to simplify this. For example,
4861 if C1 is one less or one more than C2, this might have started
4862 out as a MIN or MAX and been transformed by this function.
4863 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4865 if (INTEGRAL_TYPE_P (type)
4866 && TREE_CODE (arg01) == INTEGER_CST
4867 && TREE_CODE (arg2) == INTEGER_CST)
4868 switch (comp_code)
4870 case EQ_EXPR:
4871 /* We can replace A with C1 in this case. */
4872 arg1 = fold_convert (type, arg01);
4873 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4875 case LT_EXPR:
4876 /* If C1 is C2 + 1, this is min(A, C2). */
4877 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4878 OEP_ONLY_CONST)
4879 && operand_equal_p (arg01,
4880 const_binop (PLUS_EXPR, arg2,
4881 build_int_cst (type, 1), 0),
4882 OEP_ONLY_CONST))
4883 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4884 type, arg1, arg2));
4885 break;
4887 case LE_EXPR:
4888 /* If C1 is C2 - 1, this is min(A, C2). */
4889 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4890 OEP_ONLY_CONST)
4891 && operand_equal_p (arg01,
4892 const_binop (MINUS_EXPR, arg2,
4893 build_int_cst (type, 1), 0),
4894 OEP_ONLY_CONST))
4895 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4896 type, arg1, arg2));
4897 break;
4899 case GT_EXPR:
4900 /* If C1 is C2 - 1, this is max(A, C2). */
4901 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4902 OEP_ONLY_CONST)
4903 && operand_equal_p (arg01,
4904 const_binop (MINUS_EXPR, arg2,
4905 build_int_cst (type, 1), 0),
4906 OEP_ONLY_CONST))
4907 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4908 type, arg1, arg2));
4909 break;
4911 case GE_EXPR:
4912 /* If C1 is C2 + 1, this is max(A, C2). */
4913 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4914 OEP_ONLY_CONST)
4915 && operand_equal_p (arg01,
4916 const_binop (PLUS_EXPR, arg2,
4917 build_int_cst (type, 1), 0),
4918 OEP_ONLY_CONST))
4919 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4920 type, arg1, arg2));
4921 break;
4922 case NE_EXPR:
4923 break;
4924 default:
4925 gcc_unreachable ();
4928 return NULL_TREE;
4933 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4934 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4935 #endif
4937 /* EXP is some logical combination of boolean tests. See if we can
4938 merge it into some range test. Return the new tree if so. */
4940 static tree
4941 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4943 int or_op = (code == TRUTH_ORIF_EXPR
4944 || code == TRUTH_OR_EXPR);
4945 int in0_p, in1_p, in_p;
4946 tree low0, low1, low, high0, high1, high;
4947 bool strict_overflow_p = false;
4948 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4949 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4950 tree tem;
4951 const char * const warnmsg = G_("assuming signed overflow does not occur "
4952 "when simplifying range test");
4954 /* If this is an OR operation, invert both sides; we will invert
4955 again at the end. */
4956 if (or_op)
4957 in0_p = ! in0_p, in1_p = ! in1_p;
4959 /* If both expressions are the same, if we can merge the ranges, and we
4960 can build the range test, return it or it inverted. If one of the
4961 ranges is always true or always false, consider it to be the same
4962 expression as the other. */
4963 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4964 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4965 in1_p, low1, high1)
4966 && 0 != (tem = (build_range_check (type,
4967 lhs != 0 ? lhs
4968 : rhs != 0 ? rhs : integer_zero_node,
4969 in_p, low, high))))
4971 if (strict_overflow_p)
4972 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4973 return or_op ? invert_truthvalue (tem) : tem;
4976 /* On machines where the branch cost is expensive, if this is a
4977 short-circuited branch and the underlying object on both sides
4978 is the same, make a non-short-circuit operation. */
4979 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4980 && lhs != 0 && rhs != 0
4981 && (code == TRUTH_ANDIF_EXPR
4982 || code == TRUTH_ORIF_EXPR)
4983 && operand_equal_p (lhs, rhs, 0))
4985 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4986 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4987 which cases we can't do this. */
4988 if (simple_operand_p (lhs))
4989 return build2 (code == TRUTH_ANDIF_EXPR
4990 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4991 type, op0, op1);
4993 else if (lang_hooks.decls.global_bindings_p () == 0
4994 && ! CONTAINS_PLACEHOLDER_P (lhs))
4996 tree common = save_expr (lhs);
4998 if (0 != (lhs = build_range_check (type, common,
4999 or_op ? ! in0_p : in0_p,
5000 low0, high0))
5001 && (0 != (rhs = build_range_check (type, common,
5002 or_op ? ! in1_p : in1_p,
5003 low1, high1))))
5005 if (strict_overflow_p)
5006 fold_overflow_warning (warnmsg,
5007 WARN_STRICT_OVERFLOW_COMPARISON);
5008 return build2 (code == TRUTH_ANDIF_EXPR
5009 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5010 type, lhs, rhs);
5015 return 0;
5018 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5019 bit value. Arrange things so the extra bits will be set to zero if and
5020 only if C is signed-extended to its full width. If MASK is nonzero,
5021 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5023 static tree
5024 unextend (tree c, int p, int unsignedp, tree mask)
5026 tree type = TREE_TYPE (c);
5027 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5028 tree temp;
5030 if (p == modesize || unsignedp)
5031 return c;
5033 /* We work by getting just the sign bit into the low-order bit, then
5034 into the high-order bit, then sign-extend. We then XOR that value
5035 with C. */
5036 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5037 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5039 /* We must use a signed type in order to get an arithmetic right shift.
5040 However, we must also avoid introducing accidental overflows, so that
5041 a subsequent call to integer_zerop will work. Hence we must
5042 do the type conversion here. At this point, the constant is either
5043 zero or one, and the conversion to a signed type can never overflow.
5044 We could get an overflow if this conversion is done anywhere else. */
5045 if (TYPE_UNSIGNED (type))
5046 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
5048 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5049 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5050 if (mask != 0)
5051 temp = const_binop (BIT_AND_EXPR, temp,
5052 fold_convert (TREE_TYPE (c), mask), 0);
5053 /* If necessary, convert the type back to match the type of C. */
5054 if (TYPE_UNSIGNED (type))
5055 temp = fold_convert (type, temp);
5057 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5060 /* Find ways of folding logical expressions of LHS and RHS:
5061 Try to merge two comparisons to the same innermost item.
5062 Look for range tests like "ch >= '0' && ch <= '9'".
5063 Look for combinations of simple terms on machines with expensive branches
5064 and evaluate the RHS unconditionally.
5066 For example, if we have p->a == 2 && p->b == 4 and we can make an
5067 object large enough to span both A and B, we can do this with a comparison
5068 against the object ANDed with the a mask.
5070 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5071 operations to do this with one comparison.
5073 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5074 function and the one above.
5076 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5077 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5079 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5080 two operands.
5082 We return the simplified tree or 0 if no optimization is possible. */
5084 static tree
5085 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5087 /* If this is the "or" of two comparisons, we can do something if
5088 the comparisons are NE_EXPR. If this is the "and", we can do something
5089 if the comparisons are EQ_EXPR. I.e.,
5090 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5092 WANTED_CODE is this operation code. For single bit fields, we can
5093 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5094 comparison for one-bit fields. */
5096 enum tree_code wanted_code;
5097 enum tree_code lcode, rcode;
5098 tree ll_arg, lr_arg, rl_arg, rr_arg;
5099 tree ll_inner, lr_inner, rl_inner, rr_inner;
5100 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5101 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5102 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5103 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5104 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5105 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5106 enum machine_mode lnmode, rnmode;
5107 tree ll_mask, lr_mask, rl_mask, rr_mask;
5108 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5109 tree l_const, r_const;
5110 tree lntype, rntype, result;
5111 int first_bit, end_bit;
5112 int volatilep;
5113 tree orig_lhs = lhs, orig_rhs = rhs;
5114 enum tree_code orig_code = code;
5116 /* Start by getting the comparison codes. Fail if anything is volatile.
5117 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5118 it were surrounded with a NE_EXPR. */
5120 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5121 return 0;
5123 lcode = TREE_CODE (lhs);
5124 rcode = TREE_CODE (rhs);
5126 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5128 lhs = build2 (NE_EXPR, truth_type, lhs,
5129 build_int_cst (TREE_TYPE (lhs), 0));
5130 lcode = NE_EXPR;
5133 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5135 rhs = build2 (NE_EXPR, truth_type, rhs,
5136 build_int_cst (TREE_TYPE (rhs), 0));
5137 rcode = NE_EXPR;
5140 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5141 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5142 return 0;
5144 ll_arg = TREE_OPERAND (lhs, 0);
5145 lr_arg = TREE_OPERAND (lhs, 1);
5146 rl_arg = TREE_OPERAND (rhs, 0);
5147 rr_arg = TREE_OPERAND (rhs, 1);
5149 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5150 if (simple_operand_p (ll_arg)
5151 && simple_operand_p (lr_arg))
5153 tree result;
5154 if (operand_equal_p (ll_arg, rl_arg, 0)
5155 && operand_equal_p (lr_arg, rr_arg, 0))
5157 result = combine_comparisons (code, lcode, rcode,
5158 truth_type, ll_arg, lr_arg);
5159 if (result)
5160 return result;
5162 else if (operand_equal_p (ll_arg, rr_arg, 0)
5163 && operand_equal_p (lr_arg, rl_arg, 0))
5165 result = combine_comparisons (code, lcode,
5166 swap_tree_comparison (rcode),
5167 truth_type, ll_arg, lr_arg);
5168 if (result)
5169 return result;
5173 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5174 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5176 /* If the RHS can be evaluated unconditionally and its operands are
5177 simple, it wins to evaluate the RHS unconditionally on machines
5178 with expensive branches. In this case, this isn't a comparison
5179 that can be merged. Avoid doing this if the RHS is a floating-point
5180 comparison since those can trap. */
5182 if (BRANCH_COST >= 2
5183 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5184 && simple_operand_p (rl_arg)
5185 && simple_operand_p (rr_arg))
5187 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5188 if (code == TRUTH_OR_EXPR
5189 && lcode == NE_EXPR && integer_zerop (lr_arg)
5190 && rcode == NE_EXPR && integer_zerop (rr_arg)
5191 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5192 return build2 (NE_EXPR, truth_type,
5193 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5194 ll_arg, rl_arg),
5195 build_int_cst (TREE_TYPE (ll_arg), 0));
5197 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5198 if (code == TRUTH_AND_EXPR
5199 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5200 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5201 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5202 return build2 (EQ_EXPR, truth_type,
5203 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5204 ll_arg, rl_arg),
5205 build_int_cst (TREE_TYPE (ll_arg), 0));
5207 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5209 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5210 return build2 (code, truth_type, lhs, rhs);
5211 return NULL_TREE;
5215 /* See if the comparisons can be merged. Then get all the parameters for
5216 each side. */
5218 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5219 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5220 return 0;
5222 volatilep = 0;
5223 ll_inner = decode_field_reference (ll_arg,
5224 &ll_bitsize, &ll_bitpos, &ll_mode,
5225 &ll_unsignedp, &volatilep, &ll_mask,
5226 &ll_and_mask);
5227 lr_inner = decode_field_reference (lr_arg,
5228 &lr_bitsize, &lr_bitpos, &lr_mode,
5229 &lr_unsignedp, &volatilep, &lr_mask,
5230 &lr_and_mask);
5231 rl_inner = decode_field_reference (rl_arg,
5232 &rl_bitsize, &rl_bitpos, &rl_mode,
5233 &rl_unsignedp, &volatilep, &rl_mask,
5234 &rl_and_mask);
5235 rr_inner = decode_field_reference (rr_arg,
5236 &rr_bitsize, &rr_bitpos, &rr_mode,
5237 &rr_unsignedp, &volatilep, &rr_mask,
5238 &rr_and_mask);
5240 /* It must be true that the inner operation on the lhs of each
5241 comparison must be the same if we are to be able to do anything.
5242 Then see if we have constants. If not, the same must be true for
5243 the rhs's. */
5244 if (volatilep || ll_inner == 0 || rl_inner == 0
5245 || ! operand_equal_p (ll_inner, rl_inner, 0))
5246 return 0;
5248 if (TREE_CODE (lr_arg) == INTEGER_CST
5249 && TREE_CODE (rr_arg) == INTEGER_CST)
5250 l_const = lr_arg, r_const = rr_arg;
5251 else if (lr_inner == 0 || rr_inner == 0
5252 || ! operand_equal_p (lr_inner, rr_inner, 0))
5253 return 0;
5254 else
5255 l_const = r_const = 0;
5257 /* If either comparison code is not correct for our logical operation,
5258 fail. However, we can convert a one-bit comparison against zero into
5259 the opposite comparison against that bit being set in the field. */
5261 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5262 if (lcode != wanted_code)
5264 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5266 /* Make the left operand unsigned, since we are only interested
5267 in the value of one bit. Otherwise we are doing the wrong
5268 thing below. */
5269 ll_unsignedp = 1;
5270 l_const = ll_mask;
5272 else
5273 return 0;
5276 /* This is analogous to the code for l_const above. */
5277 if (rcode != wanted_code)
5279 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5281 rl_unsignedp = 1;
5282 r_const = rl_mask;
5284 else
5285 return 0;
5288 /* See if we can find a mode that contains both fields being compared on
5289 the left. If we can't, fail. Otherwise, update all constants and masks
5290 to be relative to a field of that size. */
5291 first_bit = MIN (ll_bitpos, rl_bitpos);
5292 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5293 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5294 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5295 volatilep);
5296 if (lnmode == VOIDmode)
5297 return 0;
5299 lnbitsize = GET_MODE_BITSIZE (lnmode);
5300 lnbitpos = first_bit & ~ (lnbitsize - 1);
5301 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5302 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5304 if (BYTES_BIG_ENDIAN)
5306 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5307 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5310 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5311 size_int (xll_bitpos), 0);
5312 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5313 size_int (xrl_bitpos), 0);
5315 if (l_const)
5317 l_const = fold_convert (lntype, l_const);
5318 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5319 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5320 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5321 fold_build1 (BIT_NOT_EXPR,
5322 lntype, ll_mask),
5323 0)))
5325 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5327 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5330 if (r_const)
5332 r_const = fold_convert (lntype, r_const);
5333 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5334 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5335 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5336 fold_build1 (BIT_NOT_EXPR,
5337 lntype, rl_mask),
5338 0)))
5340 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5342 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5346 /* If the right sides are not constant, do the same for it. Also,
5347 disallow this optimization if a size or signedness mismatch occurs
5348 between the left and right sides. */
5349 if (l_const == 0)
5351 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5352 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5353 /* Make sure the two fields on the right
5354 correspond to the left without being swapped. */
5355 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5356 return 0;
5358 first_bit = MIN (lr_bitpos, rr_bitpos);
5359 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5360 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5361 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5362 volatilep);
5363 if (rnmode == VOIDmode)
5364 return 0;
5366 rnbitsize = GET_MODE_BITSIZE (rnmode);
5367 rnbitpos = first_bit & ~ (rnbitsize - 1);
5368 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5369 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5371 if (BYTES_BIG_ENDIAN)
5373 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5374 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5377 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5378 size_int (xlr_bitpos), 0);
5379 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5380 size_int (xrr_bitpos), 0);
5382 /* Make a mask that corresponds to both fields being compared.
5383 Do this for both items being compared. If the operands are the
5384 same size and the bits being compared are in the same position
5385 then we can do this by masking both and comparing the masked
5386 results. */
5387 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5388 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5389 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5391 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5392 ll_unsignedp || rl_unsignedp);
5393 if (! all_ones_mask_p (ll_mask, lnbitsize))
5394 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5396 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5397 lr_unsignedp || rr_unsignedp);
5398 if (! all_ones_mask_p (lr_mask, rnbitsize))
5399 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5401 return build2 (wanted_code, truth_type, lhs, rhs);
5404 /* There is still another way we can do something: If both pairs of
5405 fields being compared are adjacent, we may be able to make a wider
5406 field containing them both.
5408 Note that we still must mask the lhs/rhs expressions. Furthermore,
5409 the mask must be shifted to account for the shift done by
5410 make_bit_field_ref. */
5411 if ((ll_bitsize + ll_bitpos == rl_bitpos
5412 && lr_bitsize + lr_bitpos == rr_bitpos)
5413 || (ll_bitpos == rl_bitpos + rl_bitsize
5414 && lr_bitpos == rr_bitpos + rr_bitsize))
5416 tree type;
5418 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5419 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5420 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5421 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5423 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5424 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5425 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5426 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5428 /* Convert to the smaller type before masking out unwanted bits. */
5429 type = lntype;
5430 if (lntype != rntype)
5432 if (lnbitsize > rnbitsize)
5434 lhs = fold_convert (rntype, lhs);
5435 ll_mask = fold_convert (rntype, ll_mask);
5436 type = rntype;
5438 else if (lnbitsize < rnbitsize)
5440 rhs = fold_convert (lntype, rhs);
5441 lr_mask = fold_convert (lntype, lr_mask);
5442 type = lntype;
5446 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5447 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5449 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5450 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5452 return build2 (wanted_code, truth_type, lhs, rhs);
5455 return 0;
5458 /* Handle the case of comparisons with constants. If there is something in
5459 common between the masks, those bits of the constants must be the same.
5460 If not, the condition is always false. Test for this to avoid generating
5461 incorrect code below. */
5462 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5463 if (! integer_zerop (result)
5464 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5465 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5467 if (wanted_code == NE_EXPR)
5469 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5470 return constant_boolean_node (true, truth_type);
5472 else
5474 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5475 return constant_boolean_node (false, truth_type);
5479 /* Construct the expression we will return. First get the component
5480 reference we will make. Unless the mask is all ones the width of
5481 that field, perform the mask operation. Then compare with the
5482 merged constant. */
5483 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5484 ll_unsignedp || rl_unsignedp);
5486 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5487 if (! all_ones_mask_p (ll_mask, lnbitsize))
5488 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5490 return build2 (wanted_code, truth_type, result,
5491 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5494 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5495 constant. */
5497 static tree
5498 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5500 tree arg0 = op0;
5501 enum tree_code op_code;
5502 tree comp_const = op1;
5503 tree minmax_const;
5504 int consts_equal, consts_lt;
5505 tree inner;
5507 STRIP_SIGN_NOPS (arg0);
5509 op_code = TREE_CODE (arg0);
5510 minmax_const = TREE_OPERAND (arg0, 1);
5511 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5512 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5513 inner = TREE_OPERAND (arg0, 0);
5515 /* If something does not permit us to optimize, return the original tree. */
5516 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5517 || TREE_CODE (comp_const) != INTEGER_CST
5518 || TREE_OVERFLOW (comp_const)
5519 || TREE_CODE (minmax_const) != INTEGER_CST
5520 || TREE_OVERFLOW (minmax_const))
5521 return NULL_TREE;
5523 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5524 and GT_EXPR, doing the rest with recursive calls using logical
5525 simplifications. */
5526 switch (code)
5528 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5530 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5531 type, op0, op1);
5532 if (tem)
5533 return invert_truthvalue (tem);
5534 return NULL_TREE;
5537 case GE_EXPR:
5538 return
5539 fold_build2 (TRUTH_ORIF_EXPR, type,
5540 optimize_minmax_comparison
5541 (EQ_EXPR, type, arg0, comp_const),
5542 optimize_minmax_comparison
5543 (GT_EXPR, type, arg0, comp_const));
5545 case EQ_EXPR:
5546 if (op_code == MAX_EXPR && consts_equal)
5547 /* MAX (X, 0) == 0 -> X <= 0 */
5548 return fold_build2 (LE_EXPR, type, inner, comp_const);
5550 else if (op_code == MAX_EXPR && consts_lt)
5551 /* MAX (X, 0) == 5 -> X == 5 */
5552 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5554 else if (op_code == MAX_EXPR)
5555 /* MAX (X, 0) == -1 -> false */
5556 return omit_one_operand (type, integer_zero_node, inner);
5558 else if (consts_equal)
5559 /* MIN (X, 0) == 0 -> X >= 0 */
5560 return fold_build2 (GE_EXPR, type, inner, comp_const);
5562 else if (consts_lt)
5563 /* MIN (X, 0) == 5 -> false */
5564 return omit_one_operand (type, integer_zero_node, inner);
5566 else
5567 /* MIN (X, 0) == -1 -> X == -1 */
5568 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5570 case GT_EXPR:
5571 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5572 /* MAX (X, 0) > 0 -> X > 0
5573 MAX (X, 0) > 5 -> X > 5 */
5574 return fold_build2 (GT_EXPR, type, inner, comp_const);
5576 else if (op_code == MAX_EXPR)
5577 /* MAX (X, 0) > -1 -> true */
5578 return omit_one_operand (type, integer_one_node, inner);
5580 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5581 /* MIN (X, 0) > 0 -> false
5582 MIN (X, 0) > 5 -> false */
5583 return omit_one_operand (type, integer_zero_node, inner);
5585 else
5586 /* MIN (X, 0) > -1 -> X > -1 */
5587 return fold_build2 (GT_EXPR, type, inner, comp_const);
5589 default:
5590 return NULL_TREE;
5594 /* T is an integer expression that is being multiplied, divided, or taken a
5595 modulus (CODE says which and what kind of divide or modulus) by a
5596 constant C. See if we can eliminate that operation by folding it with
5597 other operations already in T. WIDE_TYPE, if non-null, is a type that
5598 should be used for the computation if wider than our type.
5600 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5601 (X * 2) + (Y * 4). We must, however, be assured that either the original
5602 expression would not overflow or that overflow is undefined for the type
5603 in the language in question.
5605 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5606 the machine has a multiply-accumulate insn or that this is part of an
5607 addressing calculation.
5609 If we return a non-null expression, it is an equivalent form of the
5610 original computation, but need not be in the original type.
5612 We set *STRICT_OVERFLOW_P to true if the return values depends on
5613 signed overflow being undefined. Otherwise we do not change
5614 *STRICT_OVERFLOW_P. */
5616 static tree
5617 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5618 bool *strict_overflow_p)
5620 /* To avoid exponential search depth, refuse to allow recursion past
5621 three levels. Beyond that (1) it's highly unlikely that we'll find
5622 something interesting and (2) we've probably processed it before
5623 when we built the inner expression. */
5625 static int depth;
5626 tree ret;
5628 if (depth > 3)
5629 return NULL;
5631 depth++;
5632 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5633 depth--;
5635 return ret;
5638 static tree
5639 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5640 bool *strict_overflow_p)
5642 tree type = TREE_TYPE (t);
5643 enum tree_code tcode = TREE_CODE (t);
5644 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5645 > GET_MODE_SIZE (TYPE_MODE (type)))
5646 ? wide_type : type);
5647 tree t1, t2;
5648 int same_p = tcode == code;
5649 tree op0 = NULL_TREE, op1 = NULL_TREE;
5650 bool sub_strict_overflow_p;
5652 /* Don't deal with constants of zero here; they confuse the code below. */
5653 if (integer_zerop (c))
5654 return NULL_TREE;
5656 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5657 op0 = TREE_OPERAND (t, 0);
5659 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5660 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5662 /* Note that we need not handle conditional operations here since fold
5663 already handles those cases. So just do arithmetic here. */
5664 switch (tcode)
5666 case INTEGER_CST:
5667 /* For a constant, we can always simplify if we are a multiply
5668 or (for divide and modulus) if it is a multiple of our constant. */
5669 if (code == MULT_EXPR
5670 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5671 return const_binop (code, fold_convert (ctype, t),
5672 fold_convert (ctype, c), 0);
5673 break;
5675 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5676 /* If op0 is an expression ... */
5677 if ((COMPARISON_CLASS_P (op0)
5678 || UNARY_CLASS_P (op0)
5679 || BINARY_CLASS_P (op0)
5680 || EXPRESSION_CLASS_P (op0))
5681 /* ... and is unsigned, and its type is smaller than ctype,
5682 then we cannot pass through as widening. */
5683 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5684 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5685 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5686 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5687 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5688 /* ... or this is a truncation (t is narrower than op0),
5689 then we cannot pass through this narrowing. */
5690 || (GET_MODE_SIZE (TYPE_MODE (type))
5691 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5692 /* ... or signedness changes for division or modulus,
5693 then we cannot pass through this conversion. */
5694 || (code != MULT_EXPR
5695 && (TYPE_UNSIGNED (ctype)
5696 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5697 break;
5699 /* Pass the constant down and see if we can make a simplification. If
5700 we can, replace this expression with the inner simplification for
5701 possible later conversion to our or some other type. */
5702 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5703 && TREE_CODE (t2) == INTEGER_CST
5704 && !TREE_OVERFLOW (t2)
5705 && (0 != (t1 = extract_muldiv (op0, t2, code,
5706 code == MULT_EXPR
5707 ? ctype : NULL_TREE,
5708 strict_overflow_p))))
5709 return t1;
5710 break;
5712 case ABS_EXPR:
5713 /* If widening the type changes it from signed to unsigned, then we
5714 must avoid building ABS_EXPR itself as unsigned. */
5715 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5717 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5718 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5719 != 0)
5721 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5722 return fold_convert (ctype, t1);
5724 break;
5726 /* FALLTHROUGH */
5727 case NEGATE_EXPR:
5728 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5729 != 0)
5730 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5731 break;
5733 case MIN_EXPR: case MAX_EXPR:
5734 /* If widening the type changes the signedness, then we can't perform
5735 this optimization as that changes the result. */
5736 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5737 break;
5739 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5740 sub_strict_overflow_p = false;
5741 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5742 &sub_strict_overflow_p)) != 0
5743 && (t2 = extract_muldiv (op1, c, code, wide_type,
5744 &sub_strict_overflow_p)) != 0)
5746 if (tree_int_cst_sgn (c) < 0)
5747 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5748 if (sub_strict_overflow_p)
5749 *strict_overflow_p = true;
5750 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5751 fold_convert (ctype, t2));
5753 break;
5755 case LSHIFT_EXPR: case RSHIFT_EXPR:
5756 /* If the second operand is constant, this is a multiplication
5757 or floor division, by a power of two, so we can treat it that
5758 way unless the multiplier or divisor overflows. Signed
5759 left-shift overflow is implementation-defined rather than
5760 undefined in C90, so do not convert signed left shift into
5761 multiplication. */
5762 if (TREE_CODE (op1) == INTEGER_CST
5763 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5764 /* const_binop may not detect overflow correctly,
5765 so check for it explicitly here. */
5766 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5767 && TREE_INT_CST_HIGH (op1) == 0
5768 && 0 != (t1 = fold_convert (ctype,
5769 const_binop (LSHIFT_EXPR,
5770 size_one_node,
5771 op1, 0)))
5772 && !TREE_OVERFLOW (t1))
5773 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5774 ? MULT_EXPR : FLOOR_DIV_EXPR,
5775 ctype, fold_convert (ctype, op0), t1),
5776 c, code, wide_type, strict_overflow_p);
5777 break;
5779 case PLUS_EXPR: case MINUS_EXPR:
5780 /* See if we can eliminate the operation on both sides. If we can, we
5781 can return a new PLUS or MINUS. If we can't, the only remaining
5782 cases where we can do anything are if the second operand is a
5783 constant. */
5784 sub_strict_overflow_p = false;
5785 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5786 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5787 if (t1 != 0 && t2 != 0
5788 && (code == MULT_EXPR
5789 /* If not multiplication, we can only do this if both operands
5790 are divisible by c. */
5791 || (multiple_of_p (ctype, op0, c)
5792 && multiple_of_p (ctype, op1, c))))
5794 if (sub_strict_overflow_p)
5795 *strict_overflow_p = true;
5796 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5797 fold_convert (ctype, t2));
5800 /* If this was a subtraction, negate OP1 and set it to be an addition.
5801 This simplifies the logic below. */
5802 if (tcode == MINUS_EXPR)
5803 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5805 if (TREE_CODE (op1) != INTEGER_CST)
5806 break;
5808 /* If either OP1 or C are negative, this optimization is not safe for
5809 some of the division and remainder types while for others we need
5810 to change the code. */
5811 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5813 if (code == CEIL_DIV_EXPR)
5814 code = FLOOR_DIV_EXPR;
5815 else if (code == FLOOR_DIV_EXPR)
5816 code = CEIL_DIV_EXPR;
5817 else if (code != MULT_EXPR
5818 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5819 break;
5822 /* If it's a multiply or a division/modulus operation of a multiple
5823 of our constant, do the operation and verify it doesn't overflow. */
5824 if (code == MULT_EXPR
5825 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5827 op1 = const_binop (code, fold_convert (ctype, op1),
5828 fold_convert (ctype, c), 0);
5829 /* We allow the constant to overflow with wrapping semantics. */
5830 if (op1 == 0
5831 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5832 break;
5834 else
5835 break;
5837 /* If we have an unsigned type is not a sizetype, we cannot widen
5838 the operation since it will change the result if the original
5839 computation overflowed. */
5840 if (TYPE_UNSIGNED (ctype)
5841 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5842 && ctype != type)
5843 break;
5845 /* If we were able to eliminate our operation from the first side,
5846 apply our operation to the second side and reform the PLUS. */
5847 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5848 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5850 /* The last case is if we are a multiply. In that case, we can
5851 apply the distributive law to commute the multiply and addition
5852 if the multiplication of the constants doesn't overflow. */
5853 if (code == MULT_EXPR)
5854 return fold_build2 (tcode, ctype,
5855 fold_build2 (code, ctype,
5856 fold_convert (ctype, op0),
5857 fold_convert (ctype, c)),
5858 op1);
5860 break;
5862 case MULT_EXPR:
5863 /* We have a special case here if we are doing something like
5864 (C * 8) % 4 since we know that's zero. */
5865 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5866 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5867 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5868 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5869 return omit_one_operand (type, integer_zero_node, op0);
5871 /* ... fall through ... */
5873 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5874 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5875 /* If we can extract our operation from the LHS, do so and return a
5876 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5877 do something only if the second operand is a constant. */
5878 if (same_p
5879 && (t1 = extract_muldiv (op0, c, code, wide_type,
5880 strict_overflow_p)) != 0)
5881 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5882 fold_convert (ctype, op1));
5883 else if (tcode == MULT_EXPR && code == MULT_EXPR
5884 && (t1 = extract_muldiv (op1, c, code, wide_type,
5885 strict_overflow_p)) != 0)
5886 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5887 fold_convert (ctype, t1));
5888 else if (TREE_CODE (op1) != INTEGER_CST)
5889 return 0;
5891 /* If these are the same operation types, we can associate them
5892 assuming no overflow. */
5893 if (tcode == code
5894 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5895 fold_convert (ctype, c), 0))
5896 && !TREE_OVERFLOW (t1))
5897 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5899 /* If these operations "cancel" each other, we have the main
5900 optimizations of this pass, which occur when either constant is a
5901 multiple of the other, in which case we replace this with either an
5902 operation or CODE or TCODE.
5904 If we have an unsigned type that is not a sizetype, we cannot do
5905 this since it will change the result if the original computation
5906 overflowed. */
5907 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5908 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5909 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5910 || (tcode == MULT_EXPR
5911 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5912 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5914 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5916 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5917 *strict_overflow_p = true;
5918 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5919 fold_convert (ctype,
5920 const_binop (TRUNC_DIV_EXPR,
5921 op1, c, 0)));
5923 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5925 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5926 *strict_overflow_p = true;
5927 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5928 fold_convert (ctype,
5929 const_binop (TRUNC_DIV_EXPR,
5930 c, op1, 0)));
5933 break;
5935 default:
5936 break;
5939 return 0;
5942 /* Return a node which has the indicated constant VALUE (either 0 or
5943 1), and is of the indicated TYPE. */
5945 tree
5946 constant_boolean_node (int value, tree type)
5948 if (type == integer_type_node)
5949 return value ? integer_one_node : integer_zero_node;
5950 else if (type == boolean_type_node)
5951 return value ? boolean_true_node : boolean_false_node;
5952 else
5953 return build_int_cst (type, value);
5957 /* Return true if expr looks like an ARRAY_REF and set base and
5958 offset to the appropriate trees. If there is no offset,
5959 offset is set to NULL_TREE. Base will be canonicalized to
5960 something you can get the element type from using
5961 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5962 in bytes to the base. */
5964 static bool
5965 extract_array_ref (tree expr, tree *base, tree *offset)
5967 /* One canonical form is a PLUS_EXPR with the first
5968 argument being an ADDR_EXPR with a possible NOP_EXPR
5969 attached. */
5970 if (TREE_CODE (expr) == PLUS_EXPR)
5972 tree op0 = TREE_OPERAND (expr, 0);
5973 tree inner_base, dummy1;
5974 /* Strip NOP_EXPRs here because the C frontends and/or
5975 folders present us (int *)&x.a + 4B possibly. */
5976 STRIP_NOPS (op0);
5977 if (extract_array_ref (op0, &inner_base, &dummy1))
5979 *base = inner_base;
5980 if (dummy1 == NULL_TREE)
5981 *offset = TREE_OPERAND (expr, 1);
5982 else
5983 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5984 dummy1, TREE_OPERAND (expr, 1));
5985 return true;
5988 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5989 which we transform into an ADDR_EXPR with appropriate
5990 offset. For other arguments to the ADDR_EXPR we assume
5991 zero offset and as such do not care about the ADDR_EXPR
5992 type and strip possible nops from it. */
5993 else if (TREE_CODE (expr) == ADDR_EXPR)
5995 tree op0 = TREE_OPERAND (expr, 0);
5996 if (TREE_CODE (op0) == ARRAY_REF)
5998 tree idx = TREE_OPERAND (op0, 1);
5999 *base = TREE_OPERAND (op0, 0);
6000 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
6001 array_ref_element_size (op0));
6003 else
6005 /* Handle array-to-pointer decay as &a. */
6006 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
6007 *base = TREE_OPERAND (expr, 0);
6008 else
6009 *base = expr;
6010 *offset = NULL_TREE;
6012 return true;
6014 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
6015 else if (SSA_VAR_P (expr)
6016 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
6018 *base = expr;
6019 *offset = NULL_TREE;
6020 return true;
6023 return false;
6027 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6028 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6029 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6030 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6031 COND is the first argument to CODE; otherwise (as in the example
6032 given here), it is the second argument. TYPE is the type of the
6033 original expression. Return NULL_TREE if no simplification is
6034 possible. */
6036 static tree
6037 fold_binary_op_with_conditional_arg (enum tree_code code,
6038 tree type, tree op0, tree op1,
6039 tree cond, tree arg, int cond_first_p)
6041 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6042 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6043 tree test, true_value, false_value;
6044 tree lhs = NULL_TREE;
6045 tree rhs = NULL_TREE;
6047 /* This transformation is only worthwhile if we don't have to wrap
6048 arg in a SAVE_EXPR, and the operation can be simplified on at least
6049 one of the branches once its pushed inside the COND_EXPR. */
6050 if (!TREE_CONSTANT (arg))
6051 return NULL_TREE;
6053 if (TREE_CODE (cond) == COND_EXPR)
6055 test = TREE_OPERAND (cond, 0);
6056 true_value = TREE_OPERAND (cond, 1);
6057 false_value = TREE_OPERAND (cond, 2);
6058 /* If this operand throws an expression, then it does not make
6059 sense to try to perform a logical or arithmetic operation
6060 involving it. */
6061 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6062 lhs = true_value;
6063 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6064 rhs = false_value;
6066 else
6068 tree testtype = TREE_TYPE (cond);
6069 test = cond;
6070 true_value = constant_boolean_node (true, testtype);
6071 false_value = constant_boolean_node (false, testtype);
6074 arg = fold_convert (arg_type, arg);
6075 if (lhs == 0)
6077 true_value = fold_convert (cond_type, true_value);
6078 if (cond_first_p)
6079 lhs = fold_build2 (code, type, true_value, arg);
6080 else
6081 lhs = fold_build2 (code, type, arg, true_value);
6083 if (rhs == 0)
6085 false_value = fold_convert (cond_type, false_value);
6086 if (cond_first_p)
6087 rhs = fold_build2 (code, type, false_value, arg);
6088 else
6089 rhs = fold_build2 (code, type, arg, false_value);
6092 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6093 return fold_convert (type, test);
6097 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6099 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6100 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6101 ADDEND is the same as X.
6103 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6104 and finite. The problematic cases are when X is zero, and its mode
6105 has signed zeros. In the case of rounding towards -infinity,
6106 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6107 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6109 static bool
6110 fold_real_zero_addition_p (tree type, tree addend, int negate)
6112 if (!real_zerop (addend))
6113 return false;
6115 /* Don't allow the fold with -fsignaling-nans. */
6116 if (HONOR_SNANS (TYPE_MODE (type)))
6117 return false;
6119 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6120 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6121 return true;
6123 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6124 if (TREE_CODE (addend) == REAL_CST
6125 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6126 negate = !negate;
6128 /* The mode has signed zeros, and we have to honor their sign.
6129 In this situation, there is only one case we can return true for.
6130 X - 0 is the same as X unless rounding towards -infinity is
6131 supported. */
6132 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6135 /* Subroutine of fold() that checks comparisons of built-in math
6136 functions against real constants.
6138 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6139 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6140 is the type of the result and ARG0 and ARG1 are the operands of the
6141 comparison. ARG1 must be a TREE_REAL_CST.
6143 The function returns the constant folded tree if a simplification
6144 can be made, and NULL_TREE otherwise. */
6146 static tree
6147 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6148 tree type, tree arg0, tree arg1)
6150 REAL_VALUE_TYPE c;
6152 if (BUILTIN_SQRT_P (fcode))
6154 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
6155 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6157 c = TREE_REAL_CST (arg1);
6158 if (REAL_VALUE_NEGATIVE (c))
6160 /* sqrt(x) < y is always false, if y is negative. */
6161 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6162 return omit_one_operand (type, integer_zero_node, arg);
6164 /* sqrt(x) > y is always true, if y is negative and we
6165 don't care about NaNs, i.e. negative values of x. */
6166 if (code == NE_EXPR || !HONOR_NANS (mode))
6167 return omit_one_operand (type, integer_one_node, arg);
6169 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6170 return fold_build2 (GE_EXPR, type, arg,
6171 build_real (TREE_TYPE (arg), dconst0));
6173 else if (code == GT_EXPR || code == GE_EXPR)
6175 REAL_VALUE_TYPE c2;
6177 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6178 real_convert (&c2, mode, &c2);
6180 if (REAL_VALUE_ISINF (c2))
6182 /* sqrt(x) > y is x == +Inf, when y is very large. */
6183 if (HONOR_INFINITIES (mode))
6184 return fold_build2 (EQ_EXPR, type, arg,
6185 build_real (TREE_TYPE (arg), c2));
6187 /* sqrt(x) > y is always false, when y is very large
6188 and we don't care about infinities. */
6189 return omit_one_operand (type, integer_zero_node, arg);
6192 /* sqrt(x) > c is the same as x > c*c. */
6193 return fold_build2 (code, type, arg,
6194 build_real (TREE_TYPE (arg), c2));
6196 else if (code == LT_EXPR || code == LE_EXPR)
6198 REAL_VALUE_TYPE c2;
6200 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6201 real_convert (&c2, mode, &c2);
6203 if (REAL_VALUE_ISINF (c2))
6205 /* sqrt(x) < y is always true, when y is a very large
6206 value and we don't care about NaNs or Infinities. */
6207 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6208 return omit_one_operand (type, integer_one_node, arg);
6210 /* sqrt(x) < y is x != +Inf when y is very large and we
6211 don't care about NaNs. */
6212 if (! HONOR_NANS (mode))
6213 return fold_build2 (NE_EXPR, type, arg,
6214 build_real (TREE_TYPE (arg), c2));
6216 /* sqrt(x) < y is x >= 0 when y is very large and we
6217 don't care about Infinities. */
6218 if (! HONOR_INFINITIES (mode))
6219 return fold_build2 (GE_EXPR, type, arg,
6220 build_real (TREE_TYPE (arg), dconst0));
6222 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6223 if (lang_hooks.decls.global_bindings_p () != 0
6224 || CONTAINS_PLACEHOLDER_P (arg))
6225 return NULL_TREE;
6227 arg = save_expr (arg);
6228 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6229 fold_build2 (GE_EXPR, type, arg,
6230 build_real (TREE_TYPE (arg),
6231 dconst0)),
6232 fold_build2 (NE_EXPR, type, arg,
6233 build_real (TREE_TYPE (arg),
6234 c2)));
6237 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6238 if (! HONOR_NANS (mode))
6239 return fold_build2 (code, type, arg,
6240 build_real (TREE_TYPE (arg), c2));
6242 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6243 if (lang_hooks.decls.global_bindings_p () == 0
6244 && ! CONTAINS_PLACEHOLDER_P (arg))
6246 arg = save_expr (arg);
6247 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6248 fold_build2 (GE_EXPR, type, arg,
6249 build_real (TREE_TYPE (arg),
6250 dconst0)),
6251 fold_build2 (code, type, arg,
6252 build_real (TREE_TYPE (arg),
6253 c2)));
6258 return NULL_TREE;
6261 /* Subroutine of fold() that optimizes comparisons against Infinities,
6262 either +Inf or -Inf.
6264 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6265 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6266 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6268 The function returns the constant folded tree if a simplification
6269 can be made, and NULL_TREE otherwise. */
6271 static tree
6272 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6274 enum machine_mode mode;
6275 REAL_VALUE_TYPE max;
6276 tree temp;
6277 bool neg;
6279 mode = TYPE_MODE (TREE_TYPE (arg0));
6281 /* For negative infinity swap the sense of the comparison. */
6282 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6283 if (neg)
6284 code = swap_tree_comparison (code);
6286 switch (code)
6288 case GT_EXPR:
6289 /* x > +Inf is always false, if with ignore sNANs. */
6290 if (HONOR_SNANS (mode))
6291 return NULL_TREE;
6292 return omit_one_operand (type, integer_zero_node, arg0);
6294 case LE_EXPR:
6295 /* x <= +Inf is always true, if we don't case about NaNs. */
6296 if (! HONOR_NANS (mode))
6297 return omit_one_operand (type, integer_one_node, arg0);
6299 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6300 if (lang_hooks.decls.global_bindings_p () == 0
6301 && ! CONTAINS_PLACEHOLDER_P (arg0))
6303 arg0 = save_expr (arg0);
6304 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6306 break;
6308 case EQ_EXPR:
6309 case GE_EXPR:
6310 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6311 real_maxval (&max, neg, mode);
6312 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6313 arg0, build_real (TREE_TYPE (arg0), max));
6315 case LT_EXPR:
6316 /* x < +Inf is always equal to x <= DBL_MAX. */
6317 real_maxval (&max, neg, mode);
6318 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6319 arg0, build_real (TREE_TYPE (arg0), max));
6321 case NE_EXPR:
6322 /* x != +Inf is always equal to !(x > DBL_MAX). */
6323 real_maxval (&max, neg, mode);
6324 if (! HONOR_NANS (mode))
6325 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6326 arg0, build_real (TREE_TYPE (arg0), max));
6328 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6329 arg0, build_real (TREE_TYPE (arg0), max));
6330 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6332 default:
6333 break;
6336 return NULL_TREE;
6339 /* Subroutine of fold() that optimizes comparisons of a division by
6340 a nonzero integer constant against an integer constant, i.e.
6341 X/C1 op C2.
6343 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6344 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6345 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6347 The function returns the constant folded tree if a simplification
6348 can be made, and NULL_TREE otherwise. */
6350 static tree
6351 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6353 tree prod, tmp, hi, lo;
6354 tree arg00 = TREE_OPERAND (arg0, 0);
6355 tree arg01 = TREE_OPERAND (arg0, 1);
6356 unsigned HOST_WIDE_INT lpart;
6357 HOST_WIDE_INT hpart;
6358 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6359 bool neg_overflow;
6360 int overflow;
6362 /* We have to do this the hard way to detect unsigned overflow.
6363 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6364 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6365 TREE_INT_CST_HIGH (arg01),
6366 TREE_INT_CST_LOW (arg1),
6367 TREE_INT_CST_HIGH (arg1),
6368 &lpart, &hpart, unsigned_p);
6369 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6370 -1, overflow);
6371 neg_overflow = false;
6373 if (unsigned_p)
6375 tmp = int_const_binop (MINUS_EXPR, arg01,
6376 build_int_cst (TREE_TYPE (arg01), 1), 0);
6377 lo = prod;
6379 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6380 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6381 TREE_INT_CST_HIGH (prod),
6382 TREE_INT_CST_LOW (tmp),
6383 TREE_INT_CST_HIGH (tmp),
6384 &lpart, &hpart, unsigned_p);
6385 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6386 -1, overflow | TREE_OVERFLOW (prod));
6388 else if (tree_int_cst_sgn (arg01) >= 0)
6390 tmp = int_const_binop (MINUS_EXPR, arg01,
6391 build_int_cst (TREE_TYPE (arg01), 1), 0);
6392 switch (tree_int_cst_sgn (arg1))
6394 case -1:
6395 neg_overflow = true;
6396 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6397 hi = prod;
6398 break;
6400 case 0:
6401 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6402 hi = tmp;
6403 break;
6405 case 1:
6406 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6407 lo = prod;
6408 break;
6410 default:
6411 gcc_unreachable ();
6414 else
6416 /* A negative divisor reverses the relational operators. */
6417 code = swap_tree_comparison (code);
6419 tmp = int_const_binop (PLUS_EXPR, arg01,
6420 build_int_cst (TREE_TYPE (arg01), 1), 0);
6421 switch (tree_int_cst_sgn (arg1))
6423 case -1:
6424 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6425 lo = prod;
6426 break;
6428 case 0:
6429 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6430 lo = tmp;
6431 break;
6433 case 1:
6434 neg_overflow = true;
6435 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6436 hi = prod;
6437 break;
6439 default:
6440 gcc_unreachable ();
6444 switch (code)
6446 case EQ_EXPR:
6447 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6448 return omit_one_operand (type, integer_zero_node, arg00);
6449 if (TREE_OVERFLOW (hi))
6450 return fold_build2 (GE_EXPR, type, arg00, lo);
6451 if (TREE_OVERFLOW (lo))
6452 return fold_build2 (LE_EXPR, type, arg00, hi);
6453 return build_range_check (type, arg00, 1, lo, hi);
6455 case NE_EXPR:
6456 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6457 return omit_one_operand (type, integer_one_node, arg00);
6458 if (TREE_OVERFLOW (hi))
6459 return fold_build2 (LT_EXPR, type, arg00, lo);
6460 if (TREE_OVERFLOW (lo))
6461 return fold_build2 (GT_EXPR, type, arg00, hi);
6462 return build_range_check (type, arg00, 0, lo, hi);
6464 case LT_EXPR:
6465 if (TREE_OVERFLOW (lo))
6467 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6468 return omit_one_operand (type, tmp, arg00);
6470 return fold_build2 (LT_EXPR, type, arg00, lo);
6472 case LE_EXPR:
6473 if (TREE_OVERFLOW (hi))
6475 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6476 return omit_one_operand (type, tmp, arg00);
6478 return fold_build2 (LE_EXPR, type, arg00, hi);
6480 case GT_EXPR:
6481 if (TREE_OVERFLOW (hi))
6483 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6484 return omit_one_operand (type, tmp, arg00);
6486 return fold_build2 (GT_EXPR, type, arg00, hi);
6488 case GE_EXPR:
6489 if (TREE_OVERFLOW (lo))
6491 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6492 return omit_one_operand (type, tmp, arg00);
6494 return fold_build2 (GE_EXPR, type, arg00, lo);
6496 default:
6497 break;
6500 return NULL_TREE;
6504 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6505 equality/inequality test, then return a simplified form of the test
6506 using a sign testing. Otherwise return NULL. TYPE is the desired
6507 result type. */
6509 static tree
6510 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6511 tree result_type)
6513 /* If this is testing a single bit, we can optimize the test. */
6514 if ((code == NE_EXPR || code == EQ_EXPR)
6515 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6516 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6518 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6519 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6520 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6522 if (arg00 != NULL_TREE
6523 /* This is only a win if casting to a signed type is cheap,
6524 i.e. when arg00's type is not a partial mode. */
6525 && TYPE_PRECISION (TREE_TYPE (arg00))
6526 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6528 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6529 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6530 result_type, fold_convert (stype, arg00),
6531 build_int_cst (stype, 0));
6535 return NULL_TREE;
6538 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6539 equality/inequality test, then return a simplified form of
6540 the test using shifts and logical operations. Otherwise return
6541 NULL. TYPE is the desired result type. */
6543 tree
6544 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6545 tree result_type)
6547 /* If this is testing a single bit, we can optimize the test. */
6548 if ((code == NE_EXPR || code == EQ_EXPR)
6549 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6550 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6552 tree inner = TREE_OPERAND (arg0, 0);
6553 tree type = TREE_TYPE (arg0);
6554 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6555 enum machine_mode operand_mode = TYPE_MODE (type);
6556 int ops_unsigned;
6557 tree signed_type, unsigned_type, intermediate_type;
6558 tree tem, one;
6560 /* First, see if we can fold the single bit test into a sign-bit
6561 test. */
6562 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6563 result_type);
6564 if (tem)
6565 return tem;
6567 /* Otherwise we have (A & C) != 0 where C is a single bit,
6568 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6569 Similarly for (A & C) == 0. */
6571 /* If INNER is a right shift of a constant and it plus BITNUM does
6572 not overflow, adjust BITNUM and INNER. */
6573 if (TREE_CODE (inner) == RSHIFT_EXPR
6574 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6575 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6576 && bitnum < TYPE_PRECISION (type)
6577 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6578 bitnum - TYPE_PRECISION (type)))
6580 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6581 inner = TREE_OPERAND (inner, 0);
6584 /* If we are going to be able to omit the AND below, we must do our
6585 operations as unsigned. If we must use the AND, we have a choice.
6586 Normally unsigned is faster, but for some machines signed is. */
6587 #ifdef LOAD_EXTEND_OP
6588 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6589 && !flag_syntax_only) ? 0 : 1;
6590 #else
6591 ops_unsigned = 1;
6592 #endif
6594 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6595 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6596 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6597 inner = fold_convert (intermediate_type, inner);
6599 if (bitnum != 0)
6600 inner = build2 (RSHIFT_EXPR, intermediate_type,
6601 inner, size_int (bitnum));
6603 one = build_int_cst (intermediate_type, 1);
6605 if (code == EQ_EXPR)
6606 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6608 /* Put the AND last so it can combine with more things. */
6609 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6611 /* Make sure to return the proper type. */
6612 inner = fold_convert (result_type, inner);
6614 return inner;
6616 return NULL_TREE;
6619 /* Check whether we are allowed to reorder operands arg0 and arg1,
6620 such that the evaluation of arg1 occurs before arg0. */
6622 static bool
6623 reorder_operands_p (tree arg0, tree arg1)
6625 if (! flag_evaluation_order)
6626 return true;
6627 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6628 return true;
6629 return ! TREE_SIDE_EFFECTS (arg0)
6630 && ! TREE_SIDE_EFFECTS (arg1);
6633 /* Test whether it is preferable two swap two operands, ARG0 and
6634 ARG1, for example because ARG0 is an integer constant and ARG1
6635 isn't. If REORDER is true, only recommend swapping if we can
6636 evaluate the operands in reverse order. */
6638 bool
6639 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6641 STRIP_SIGN_NOPS (arg0);
6642 STRIP_SIGN_NOPS (arg1);
6644 if (TREE_CODE (arg1) == INTEGER_CST)
6645 return 0;
6646 if (TREE_CODE (arg0) == INTEGER_CST)
6647 return 1;
6649 if (TREE_CODE (arg1) == REAL_CST)
6650 return 0;
6651 if (TREE_CODE (arg0) == REAL_CST)
6652 return 1;
6654 if (TREE_CODE (arg1) == COMPLEX_CST)
6655 return 0;
6656 if (TREE_CODE (arg0) == COMPLEX_CST)
6657 return 1;
6659 if (TREE_CONSTANT (arg1))
6660 return 0;
6661 if (TREE_CONSTANT (arg0))
6662 return 1;
6664 if (optimize_size)
6665 return 0;
6667 if (reorder && flag_evaluation_order
6668 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6669 return 0;
6671 if (DECL_P (arg1))
6672 return 0;
6673 if (DECL_P (arg0))
6674 return 1;
6676 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6677 for commutative and comparison operators. Ensuring a canonical
6678 form allows the optimizers to find additional redundancies without
6679 having to explicitly check for both orderings. */
6680 if (TREE_CODE (arg0) == SSA_NAME
6681 && TREE_CODE (arg1) == SSA_NAME
6682 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6683 return 1;
6685 return 0;
6688 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6689 ARG0 is extended to a wider type. */
6691 static tree
6692 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6694 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6695 tree arg1_unw;
6696 tree shorter_type, outer_type;
6697 tree min, max;
6698 bool above, below;
6700 if (arg0_unw == arg0)
6701 return NULL_TREE;
6702 shorter_type = TREE_TYPE (arg0_unw);
6704 #ifdef HAVE_canonicalize_funcptr_for_compare
6705 /* Disable this optimization if we're casting a function pointer
6706 type on targets that require function pointer canonicalization. */
6707 if (HAVE_canonicalize_funcptr_for_compare
6708 && TREE_CODE (shorter_type) == POINTER_TYPE
6709 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6710 return NULL_TREE;
6711 #endif
6713 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6714 return NULL_TREE;
6716 arg1_unw = get_unwidened (arg1, shorter_type);
6718 /* If possible, express the comparison in the shorter mode. */
6719 if ((code == EQ_EXPR || code == NE_EXPR
6720 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6721 && (TREE_TYPE (arg1_unw) == shorter_type
6722 || (TREE_CODE (arg1_unw) == INTEGER_CST
6723 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6724 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6725 && int_fits_type_p (arg1_unw, shorter_type))))
6726 return fold_build2 (code, type, arg0_unw,
6727 fold_convert (shorter_type, arg1_unw));
6729 if (TREE_CODE (arg1_unw) != INTEGER_CST
6730 || TREE_CODE (shorter_type) != INTEGER_TYPE
6731 || !int_fits_type_p (arg1_unw, shorter_type))
6732 return NULL_TREE;
6734 /* If we are comparing with the integer that does not fit into the range
6735 of the shorter type, the result is known. */
6736 outer_type = TREE_TYPE (arg1_unw);
6737 min = lower_bound_in_type (outer_type, shorter_type);
6738 max = upper_bound_in_type (outer_type, shorter_type);
6740 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6741 max, arg1_unw));
6742 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6743 arg1_unw, min));
6745 switch (code)
6747 case EQ_EXPR:
6748 if (above || below)
6749 return omit_one_operand (type, integer_zero_node, arg0);
6750 break;
6752 case NE_EXPR:
6753 if (above || below)
6754 return omit_one_operand (type, integer_one_node, arg0);
6755 break;
6757 case LT_EXPR:
6758 case LE_EXPR:
6759 if (above)
6760 return omit_one_operand (type, integer_one_node, arg0);
6761 else if (below)
6762 return omit_one_operand (type, integer_zero_node, arg0);
6764 case GT_EXPR:
6765 case GE_EXPR:
6766 if (above)
6767 return omit_one_operand (type, integer_zero_node, arg0);
6768 else if (below)
6769 return omit_one_operand (type, integer_one_node, arg0);
6771 default:
6772 break;
6775 return NULL_TREE;
6778 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6779 ARG0 just the signedness is changed. */
6781 static tree
6782 fold_sign_changed_comparison (enum tree_code code, tree type,
6783 tree arg0, tree arg1)
6785 tree arg0_inner;
6786 tree inner_type, outer_type;
6788 if (TREE_CODE (arg0) != NOP_EXPR
6789 && TREE_CODE (arg0) != CONVERT_EXPR)
6790 return NULL_TREE;
6792 outer_type = TREE_TYPE (arg0);
6793 arg0_inner = TREE_OPERAND (arg0, 0);
6794 inner_type = TREE_TYPE (arg0_inner);
6796 #ifdef HAVE_canonicalize_funcptr_for_compare
6797 /* Disable this optimization if we're casting a function pointer
6798 type on targets that require function pointer canonicalization. */
6799 if (HAVE_canonicalize_funcptr_for_compare
6800 && TREE_CODE (inner_type) == POINTER_TYPE
6801 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6802 return NULL_TREE;
6803 #endif
6805 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6806 return NULL_TREE;
6808 if (TREE_CODE (arg1) != INTEGER_CST
6809 && !((TREE_CODE (arg1) == NOP_EXPR
6810 || TREE_CODE (arg1) == CONVERT_EXPR)
6811 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6812 return NULL_TREE;
6814 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6815 && code != NE_EXPR
6816 && code != EQ_EXPR)
6817 return NULL_TREE;
6819 if (TREE_CODE (arg1) == INTEGER_CST)
6820 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6821 TREE_INT_CST_HIGH (arg1), 0,
6822 TREE_OVERFLOW (arg1));
6823 else
6824 arg1 = fold_convert (inner_type, arg1);
6826 return fold_build2 (code, type, arg0_inner, arg1);
6829 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6830 step of the array. Reconstructs s and delta in the case of s * delta
6831 being an integer constant (and thus already folded).
6832 ADDR is the address. MULT is the multiplicative expression.
6833 If the function succeeds, the new address expression is returned. Otherwise
6834 NULL_TREE is returned. */
6836 static tree
6837 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6839 tree s, delta, step;
6840 tree ref = TREE_OPERAND (addr, 0), pref;
6841 tree ret, pos;
6842 tree itype;
6843 bool mdim = false;
6845 /* Canonicalize op1 into a possibly non-constant delta
6846 and an INTEGER_CST s. */
6847 if (TREE_CODE (op1) == MULT_EXPR)
6849 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6851 STRIP_NOPS (arg0);
6852 STRIP_NOPS (arg1);
6854 if (TREE_CODE (arg0) == INTEGER_CST)
6856 s = arg0;
6857 delta = arg1;
6859 else if (TREE_CODE (arg1) == INTEGER_CST)
6861 s = arg1;
6862 delta = arg0;
6864 else
6865 return NULL_TREE;
6867 else if (TREE_CODE (op1) == INTEGER_CST)
6869 delta = op1;
6870 s = NULL_TREE;
6872 else
6874 /* Simulate we are delta * 1. */
6875 delta = op1;
6876 s = integer_one_node;
6879 for (;; ref = TREE_OPERAND (ref, 0))
6881 if (TREE_CODE (ref) == ARRAY_REF)
6883 /* Remember if this was a multi-dimensional array. */
6884 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6885 mdim = true;
6887 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6888 if (! itype)
6889 continue;
6891 step = array_ref_element_size (ref);
6892 if (TREE_CODE (step) != INTEGER_CST)
6893 continue;
6895 if (s)
6897 if (! tree_int_cst_equal (step, s))
6898 continue;
6900 else
6902 /* Try if delta is a multiple of step. */
6903 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6904 if (! tmp)
6905 continue;
6906 delta = tmp;
6909 /* Only fold here if we can verify we do not overflow one
6910 dimension of a multi-dimensional array. */
6911 if (mdim)
6913 tree tmp;
6915 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6916 || !INTEGRAL_TYPE_P (itype)
6917 || !TYPE_MAX_VALUE (itype)
6918 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6919 continue;
6921 tmp = fold_binary (code, itype,
6922 fold_convert (itype,
6923 TREE_OPERAND (ref, 1)),
6924 fold_convert (itype, delta));
6925 if (!tmp
6926 || TREE_CODE (tmp) != INTEGER_CST
6927 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6928 continue;
6931 break;
6933 else
6934 mdim = false;
6936 if (!handled_component_p (ref))
6937 return NULL_TREE;
6940 /* We found the suitable array reference. So copy everything up to it,
6941 and replace the index. */
6943 pref = TREE_OPERAND (addr, 0);
6944 ret = copy_node (pref);
6945 pos = ret;
6947 while (pref != ref)
6949 pref = TREE_OPERAND (pref, 0);
6950 TREE_OPERAND (pos, 0) = copy_node (pref);
6951 pos = TREE_OPERAND (pos, 0);
6954 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6955 fold_convert (itype,
6956 TREE_OPERAND (pos, 1)),
6957 fold_convert (itype, delta));
6959 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6963 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6964 means A >= Y && A != MAX, but in this case we know that
6965 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6967 static tree
6968 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6970 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6972 if (TREE_CODE (bound) == LT_EXPR)
6973 a = TREE_OPERAND (bound, 0);
6974 else if (TREE_CODE (bound) == GT_EXPR)
6975 a = TREE_OPERAND (bound, 1);
6976 else
6977 return NULL_TREE;
6979 typea = TREE_TYPE (a);
6980 if (!INTEGRAL_TYPE_P (typea)
6981 && !POINTER_TYPE_P (typea))
6982 return NULL_TREE;
6984 if (TREE_CODE (ineq) == LT_EXPR)
6986 a1 = TREE_OPERAND (ineq, 1);
6987 y = TREE_OPERAND (ineq, 0);
6989 else if (TREE_CODE (ineq) == GT_EXPR)
6991 a1 = TREE_OPERAND (ineq, 0);
6992 y = TREE_OPERAND (ineq, 1);
6994 else
6995 return NULL_TREE;
6997 if (TREE_TYPE (a1) != typea)
6998 return NULL_TREE;
7000 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
7001 if (!integer_onep (diff))
7002 return NULL_TREE;
7004 return fold_build2 (GE_EXPR, type, a, y);
7007 /* Fold a sum or difference of at least one multiplication.
7008 Returns the folded tree or NULL if no simplification could be made. */
7010 static tree
7011 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7013 tree arg00, arg01, arg10, arg11;
7014 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7016 /* (A * C) +- (B * C) -> (A+-B) * C.
7017 (A * C) +- A -> A * (C+-1).
7018 We are most concerned about the case where C is a constant,
7019 but other combinations show up during loop reduction. Since
7020 it is not difficult, try all four possibilities. */
7022 if (TREE_CODE (arg0) == MULT_EXPR)
7024 arg00 = TREE_OPERAND (arg0, 0);
7025 arg01 = TREE_OPERAND (arg0, 1);
7027 else
7029 arg00 = arg0;
7030 arg01 = build_one_cst (type);
7032 if (TREE_CODE (arg1) == MULT_EXPR)
7034 arg10 = TREE_OPERAND (arg1, 0);
7035 arg11 = TREE_OPERAND (arg1, 1);
7037 else
7039 arg10 = arg1;
7040 arg11 = build_one_cst (type);
7042 same = NULL_TREE;
7044 if (operand_equal_p (arg01, arg11, 0))
7045 same = arg01, alt0 = arg00, alt1 = arg10;
7046 else if (operand_equal_p (arg00, arg10, 0))
7047 same = arg00, alt0 = arg01, alt1 = arg11;
7048 else if (operand_equal_p (arg00, arg11, 0))
7049 same = arg00, alt0 = arg01, alt1 = arg10;
7050 else if (operand_equal_p (arg01, arg10, 0))
7051 same = arg01, alt0 = arg00, alt1 = arg11;
7053 /* No identical multiplicands; see if we can find a common
7054 power-of-two factor in non-power-of-two multiplies. This
7055 can help in multi-dimensional array access. */
7056 else if (host_integerp (arg01, 0)
7057 && host_integerp (arg11, 0))
7059 HOST_WIDE_INT int01, int11, tmp;
7060 bool swap = false;
7061 tree maybe_same;
7062 int01 = TREE_INT_CST_LOW (arg01);
7063 int11 = TREE_INT_CST_LOW (arg11);
7065 /* Move min of absolute values to int11. */
7066 if ((int01 >= 0 ? int01 : -int01)
7067 < (int11 >= 0 ? int11 : -int11))
7069 tmp = int01, int01 = int11, int11 = tmp;
7070 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7071 maybe_same = arg01;
7072 swap = true;
7074 else
7075 maybe_same = arg11;
7077 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7079 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7080 build_int_cst (TREE_TYPE (arg00),
7081 int01 / int11));
7082 alt1 = arg10;
7083 same = maybe_same;
7084 if (swap)
7085 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7089 if (same)
7090 return fold_build2 (MULT_EXPR, type,
7091 fold_build2 (code, type,
7092 fold_convert (type, alt0),
7093 fold_convert (type, alt1)),
7094 fold_convert (type, same));
7096 return NULL_TREE;
7099 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7100 specified by EXPR into the buffer PTR of length LEN bytes.
7101 Return the number of bytes placed in the buffer, or zero
7102 upon failure. */
7104 static int
7105 native_encode_int (tree expr, unsigned char *ptr, int len)
7107 tree type = TREE_TYPE (expr);
7108 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7109 int byte, offset, word, words;
7110 unsigned char value;
7112 if (total_bytes > len)
7113 return 0;
7114 words = total_bytes / UNITS_PER_WORD;
7116 for (byte = 0; byte < total_bytes; byte++)
7118 int bitpos = byte * BITS_PER_UNIT;
7119 if (bitpos < HOST_BITS_PER_WIDE_INT)
7120 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7121 else
7122 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7123 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7125 if (total_bytes > UNITS_PER_WORD)
7127 word = byte / UNITS_PER_WORD;
7128 if (WORDS_BIG_ENDIAN)
7129 word = (words - 1) - word;
7130 offset = word * UNITS_PER_WORD;
7131 if (BYTES_BIG_ENDIAN)
7132 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7133 else
7134 offset += byte % UNITS_PER_WORD;
7136 else
7137 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7138 ptr[offset] = value;
7140 return total_bytes;
7144 /* Subroutine of native_encode_expr. Encode the REAL_CST
7145 specified by EXPR into the buffer PTR of length LEN bytes.
7146 Return the number of bytes placed in the buffer, or zero
7147 upon failure. */
7149 static int
7150 native_encode_real (tree expr, unsigned char *ptr, int len)
7152 tree type = TREE_TYPE (expr);
7153 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7154 int byte, offset, word, words;
7155 unsigned char value;
7157 /* There are always 32 bits in each long, no matter the size of
7158 the hosts long. We handle floating point representations with
7159 up to 192 bits. */
7160 long tmp[6];
7162 if (total_bytes > len)
7163 return 0;
7164 words = total_bytes / UNITS_PER_WORD;
7166 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7168 for (byte = 0; byte < total_bytes; byte++)
7170 int bitpos = byte * BITS_PER_UNIT;
7171 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7173 if (total_bytes > UNITS_PER_WORD)
7175 word = byte / UNITS_PER_WORD;
7176 if (FLOAT_WORDS_BIG_ENDIAN)
7177 word = (words - 1) - word;
7178 offset = word * UNITS_PER_WORD;
7179 if (BYTES_BIG_ENDIAN)
7180 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7181 else
7182 offset += byte % UNITS_PER_WORD;
7184 else
7185 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7186 ptr[offset] = value;
7188 return total_bytes;
7191 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7192 specified by EXPR into the buffer PTR of length LEN bytes.
7193 Return the number of bytes placed in the buffer, or zero
7194 upon failure. */
7196 static int
7197 native_encode_complex (tree expr, unsigned char *ptr, int len)
7199 int rsize, isize;
7200 tree part;
7202 part = TREE_REALPART (expr);
7203 rsize = native_encode_expr (part, ptr, len);
7204 if (rsize == 0)
7205 return 0;
7206 part = TREE_IMAGPART (expr);
7207 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7208 if (isize != rsize)
7209 return 0;
7210 return rsize + isize;
7214 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7215 specified by EXPR into the buffer PTR of length LEN bytes.
7216 Return the number of bytes placed in the buffer, or zero
7217 upon failure. */
7219 static int
7220 native_encode_vector (tree expr, unsigned char *ptr, int len)
7222 int i, size, offset, count;
7223 tree itype, elem, elements;
7225 offset = 0;
7226 elements = TREE_VECTOR_CST_ELTS (expr);
7227 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7228 itype = TREE_TYPE (TREE_TYPE (expr));
7229 size = GET_MODE_SIZE (TYPE_MODE (itype));
7230 for (i = 0; i < count; i++)
7232 if (elements)
7234 elem = TREE_VALUE (elements);
7235 elements = TREE_CHAIN (elements);
7237 else
7238 elem = NULL_TREE;
7240 if (elem)
7242 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7243 return 0;
7245 else
7247 if (offset + size > len)
7248 return 0;
7249 memset (ptr+offset, 0, size);
7251 offset += size;
7253 return offset;
7257 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7258 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7259 buffer PTR of length LEN bytes. Return the number of bytes
7260 placed in the buffer, or zero upon failure. */
7262 static int
7263 native_encode_expr (tree expr, unsigned char *ptr, int len)
7265 switch (TREE_CODE (expr))
7267 case INTEGER_CST:
7268 return native_encode_int (expr, ptr, len);
7270 case REAL_CST:
7271 return native_encode_real (expr, ptr, len);
7273 case COMPLEX_CST:
7274 return native_encode_complex (expr, ptr, len);
7276 case VECTOR_CST:
7277 return native_encode_vector (expr, ptr, len);
7279 default:
7280 return 0;
7285 /* Subroutine of native_interpret_expr. Interpret the contents of
7286 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7287 If the buffer cannot be interpreted, return NULL_TREE. */
7289 static tree
7290 native_interpret_int (tree type, unsigned char *ptr, int len)
7292 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7293 int byte, offset, word, words;
7294 unsigned char value;
7295 unsigned int HOST_WIDE_INT lo = 0;
7296 HOST_WIDE_INT hi = 0;
7298 if (total_bytes > len)
7299 return NULL_TREE;
7300 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7301 return NULL_TREE;
7302 words = total_bytes / UNITS_PER_WORD;
7304 for (byte = 0; byte < total_bytes; byte++)
7306 int bitpos = byte * BITS_PER_UNIT;
7307 if (total_bytes > UNITS_PER_WORD)
7309 word = byte / UNITS_PER_WORD;
7310 if (WORDS_BIG_ENDIAN)
7311 word = (words - 1) - word;
7312 offset = word * UNITS_PER_WORD;
7313 if (BYTES_BIG_ENDIAN)
7314 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7315 else
7316 offset += byte % UNITS_PER_WORD;
7318 else
7319 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7320 value = ptr[offset];
7322 if (bitpos < HOST_BITS_PER_WIDE_INT)
7323 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7324 else
7325 hi |= (unsigned HOST_WIDE_INT) value
7326 << (bitpos - HOST_BITS_PER_WIDE_INT);
7329 return build_int_cst_wide_type (type, lo, hi);
7333 /* Subroutine of native_interpret_expr. Interpret the contents of
7334 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7335 If the buffer cannot be interpreted, return NULL_TREE. */
7337 static tree
7338 native_interpret_real (tree type, unsigned char *ptr, int len)
7340 enum machine_mode mode = TYPE_MODE (type);
7341 int total_bytes = GET_MODE_SIZE (mode);
7342 int byte, offset, word, words;
7343 unsigned char value;
7344 /* There are always 32 bits in each long, no matter the size of
7345 the hosts long. We handle floating point representations with
7346 up to 192 bits. */
7347 REAL_VALUE_TYPE r;
7348 long tmp[6];
7350 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7351 if (total_bytes > len || total_bytes > 24)
7352 return NULL_TREE;
7353 words = total_bytes / UNITS_PER_WORD;
7355 memset (tmp, 0, sizeof (tmp));
7356 for (byte = 0; byte < total_bytes; byte++)
7358 int bitpos = byte * BITS_PER_UNIT;
7359 if (total_bytes > UNITS_PER_WORD)
7361 word = byte / UNITS_PER_WORD;
7362 if (FLOAT_WORDS_BIG_ENDIAN)
7363 word = (words - 1) - word;
7364 offset = word * UNITS_PER_WORD;
7365 if (BYTES_BIG_ENDIAN)
7366 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7367 else
7368 offset += byte % UNITS_PER_WORD;
7370 else
7371 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7372 value = ptr[offset];
7374 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7377 real_from_target (&r, tmp, mode);
7378 return build_real (type, r);
7382 /* Subroutine of native_interpret_expr. Interpret the contents of
7383 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7384 If the buffer cannot be interpreted, return NULL_TREE. */
7386 static tree
7387 native_interpret_complex (tree type, unsigned char *ptr, int len)
7389 tree etype, rpart, ipart;
7390 int size;
7392 etype = TREE_TYPE (type);
7393 size = GET_MODE_SIZE (TYPE_MODE (etype));
7394 if (size * 2 > len)
7395 return NULL_TREE;
7396 rpart = native_interpret_expr (etype, ptr, size);
7397 if (!rpart)
7398 return NULL_TREE;
7399 ipart = native_interpret_expr (etype, ptr+size, size);
7400 if (!ipart)
7401 return NULL_TREE;
7402 return build_complex (type, rpart, ipart);
7406 /* Subroutine of native_interpret_expr. Interpret the contents of
7407 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7408 If the buffer cannot be interpreted, return NULL_TREE. */
7410 static tree
7411 native_interpret_vector (tree type, unsigned char *ptr, int len)
7413 tree etype, elem, elements;
7414 int i, size, count;
7416 etype = TREE_TYPE (type);
7417 size = GET_MODE_SIZE (TYPE_MODE (etype));
7418 count = TYPE_VECTOR_SUBPARTS (type);
7419 if (size * count > len)
7420 return NULL_TREE;
7422 elements = NULL_TREE;
7423 for (i = count - 1; i >= 0; i--)
7425 elem = native_interpret_expr (etype, ptr+(i*size), size);
7426 if (!elem)
7427 return NULL_TREE;
7428 elements = tree_cons (NULL_TREE, elem, elements);
7430 return build_vector (type, elements);
7434 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7435 the buffer PTR of length LEN as a constant of type TYPE. For
7436 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7437 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7438 return NULL_TREE. */
7440 static tree
7441 native_interpret_expr (tree type, unsigned char *ptr, int len)
7443 switch (TREE_CODE (type))
7445 case INTEGER_TYPE:
7446 case ENUMERAL_TYPE:
7447 case BOOLEAN_TYPE:
7448 return native_interpret_int (type, ptr, len);
7450 case REAL_TYPE:
7451 return native_interpret_real (type, ptr, len);
7453 case COMPLEX_TYPE:
7454 return native_interpret_complex (type, ptr, len);
7456 case VECTOR_TYPE:
7457 return native_interpret_vector (type, ptr, len);
7459 default:
7460 return NULL_TREE;
7465 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7466 TYPE at compile-time. If we're unable to perform the conversion
7467 return NULL_TREE. */
7469 static tree
7470 fold_view_convert_expr (tree type, tree expr)
7472 /* We support up to 512-bit values (for V8DFmode). */
7473 unsigned char buffer[64];
7474 int len;
7476 /* Check that the host and target are sane. */
7477 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7478 return NULL_TREE;
7480 len = native_encode_expr (expr, buffer, sizeof (buffer));
7481 if (len == 0)
7482 return NULL_TREE;
7484 return native_interpret_expr (type, buffer, len);
7488 /* Fold a unary expression of code CODE and type TYPE with operand
7489 OP0. Return the folded expression if folding is successful.
7490 Otherwise, return NULL_TREE. */
7492 tree
7493 fold_unary (enum tree_code code, tree type, tree op0)
7495 tree tem;
7496 tree arg0;
7497 enum tree_code_class kind = TREE_CODE_CLASS (code);
7499 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7500 && TREE_CODE_LENGTH (code) == 1);
7502 arg0 = op0;
7503 if (arg0)
7505 if (code == NOP_EXPR || code == CONVERT_EXPR
7506 || code == FLOAT_EXPR || code == ABS_EXPR)
7508 /* Don't use STRIP_NOPS, because signedness of argument type
7509 matters. */
7510 STRIP_SIGN_NOPS (arg0);
7512 else
7514 /* Strip any conversions that don't change the mode. This
7515 is safe for every expression, except for a comparison
7516 expression because its signedness is derived from its
7517 operands.
7519 Note that this is done as an internal manipulation within
7520 the constant folder, in order to find the simplest
7521 representation of the arguments so that their form can be
7522 studied. In any cases, the appropriate type conversions
7523 should be put back in the tree that will get out of the
7524 constant folder. */
7525 STRIP_NOPS (arg0);
7529 if (TREE_CODE_CLASS (code) == tcc_unary)
7531 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7532 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7533 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7534 else if (TREE_CODE (arg0) == COND_EXPR)
7536 tree arg01 = TREE_OPERAND (arg0, 1);
7537 tree arg02 = TREE_OPERAND (arg0, 2);
7538 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7539 arg01 = fold_build1 (code, type, arg01);
7540 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7541 arg02 = fold_build1 (code, type, arg02);
7542 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7543 arg01, arg02);
7545 /* If this was a conversion, and all we did was to move into
7546 inside the COND_EXPR, bring it back out. But leave it if
7547 it is a conversion from integer to integer and the
7548 result precision is no wider than a word since such a
7549 conversion is cheap and may be optimized away by combine,
7550 while it couldn't if it were outside the COND_EXPR. Then return
7551 so we don't get into an infinite recursion loop taking the
7552 conversion out and then back in. */
7554 if ((code == NOP_EXPR || code == CONVERT_EXPR
7555 || code == NON_LVALUE_EXPR)
7556 && TREE_CODE (tem) == COND_EXPR
7557 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7558 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7559 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7560 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7561 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7562 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7563 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7564 && (INTEGRAL_TYPE_P
7565 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7566 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7567 || flag_syntax_only))
7568 tem = build1 (code, type,
7569 build3 (COND_EXPR,
7570 TREE_TYPE (TREE_OPERAND
7571 (TREE_OPERAND (tem, 1), 0)),
7572 TREE_OPERAND (tem, 0),
7573 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7574 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7575 return tem;
7577 else if (COMPARISON_CLASS_P (arg0))
7579 if (TREE_CODE (type) == BOOLEAN_TYPE)
7581 arg0 = copy_node (arg0);
7582 TREE_TYPE (arg0) = type;
7583 return arg0;
7585 else if (TREE_CODE (type) != INTEGER_TYPE)
7586 return fold_build3 (COND_EXPR, type, arg0,
7587 fold_build1 (code, type,
7588 integer_one_node),
7589 fold_build1 (code, type,
7590 integer_zero_node));
7594 switch (code)
7596 case NOP_EXPR:
7597 case FLOAT_EXPR:
7598 case CONVERT_EXPR:
7599 case FIX_TRUNC_EXPR:
7600 if (TREE_TYPE (op0) == type)
7601 return op0;
7603 /* If we have (type) (a CMP b) and type is an integral type, return
7604 new expression involving the new type. */
7605 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7606 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7607 TREE_OPERAND (op0, 1));
7609 /* Handle cases of two conversions in a row. */
7610 if (TREE_CODE (op0) == NOP_EXPR
7611 || TREE_CODE (op0) == CONVERT_EXPR)
7613 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7614 tree inter_type = TREE_TYPE (op0);
7615 int inside_int = INTEGRAL_TYPE_P (inside_type);
7616 int inside_ptr = POINTER_TYPE_P (inside_type);
7617 int inside_float = FLOAT_TYPE_P (inside_type);
7618 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7619 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7620 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7621 int inter_int = INTEGRAL_TYPE_P (inter_type);
7622 int inter_ptr = POINTER_TYPE_P (inter_type);
7623 int inter_float = FLOAT_TYPE_P (inter_type);
7624 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7625 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7626 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7627 int final_int = INTEGRAL_TYPE_P (type);
7628 int final_ptr = POINTER_TYPE_P (type);
7629 int final_float = FLOAT_TYPE_P (type);
7630 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7631 unsigned int final_prec = TYPE_PRECISION (type);
7632 int final_unsignedp = TYPE_UNSIGNED (type);
7634 /* In addition to the cases of two conversions in a row
7635 handled below, if we are converting something to its own
7636 type via an object of identical or wider precision, neither
7637 conversion is needed. */
7638 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7639 && (((inter_int || inter_ptr) && final_int)
7640 || (inter_float && final_float))
7641 && inter_prec >= final_prec)
7642 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7644 /* Likewise, if the intermediate and final types are either both
7645 float or both integer, we don't need the middle conversion if
7646 it is wider than the final type and doesn't change the signedness
7647 (for integers). Avoid this if the final type is a pointer
7648 since then we sometimes need the inner conversion. Likewise if
7649 the outer has a precision not equal to the size of its mode. */
7650 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7651 || (inter_float && inside_float)
7652 || (inter_vec && inside_vec))
7653 && inter_prec >= inside_prec
7654 && (inter_float || inter_vec
7655 || inter_unsignedp == inside_unsignedp)
7656 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7657 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7658 && ! final_ptr
7659 && (! final_vec || inter_prec == inside_prec))
7660 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7662 /* If we have a sign-extension of a zero-extended value, we can
7663 replace that by a single zero-extension. */
7664 if (inside_int && inter_int && final_int
7665 && inside_prec < inter_prec && inter_prec < final_prec
7666 && inside_unsignedp && !inter_unsignedp)
7667 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7669 /* Two conversions in a row are not needed unless:
7670 - some conversion is floating-point (overstrict for now), or
7671 - some conversion is a vector (overstrict for now), or
7672 - the intermediate type is narrower than both initial and
7673 final, or
7674 - the intermediate type and innermost type differ in signedness,
7675 and the outermost type is wider than the intermediate, or
7676 - the initial type is a pointer type and the precisions of the
7677 intermediate and final types differ, or
7678 - the final type is a pointer type and the precisions of the
7679 initial and intermediate types differ.
7680 - the final type is a pointer type and the initial type not
7681 - the initial type is a pointer to an array and the final type
7682 not. */
7683 if (! inside_float && ! inter_float && ! final_float
7684 && ! inside_vec && ! inter_vec && ! final_vec
7685 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7686 && ! (inside_int && inter_int
7687 && inter_unsignedp != inside_unsignedp
7688 && inter_prec < final_prec)
7689 && ((inter_unsignedp && inter_prec > inside_prec)
7690 == (final_unsignedp && final_prec > inter_prec))
7691 && ! (inside_ptr && inter_prec != final_prec)
7692 && ! (final_ptr && inside_prec != inter_prec)
7693 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7694 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7695 && final_ptr == inside_ptr
7696 && ! (inside_ptr
7697 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7698 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7699 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7702 /* Handle (T *)&A.B.C for A being of type T and B and C
7703 living at offset zero. This occurs frequently in
7704 C++ upcasting and then accessing the base. */
7705 if (TREE_CODE (op0) == ADDR_EXPR
7706 && POINTER_TYPE_P (type)
7707 && handled_component_p (TREE_OPERAND (op0, 0)))
7709 HOST_WIDE_INT bitsize, bitpos;
7710 tree offset;
7711 enum machine_mode mode;
7712 int unsignedp, volatilep;
7713 tree base = TREE_OPERAND (op0, 0);
7714 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7715 &mode, &unsignedp, &volatilep, false);
7716 /* If the reference was to a (constant) zero offset, we can use
7717 the address of the base if it has the same base type
7718 as the result type. */
7719 if (! offset && bitpos == 0
7720 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7721 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7722 return fold_convert (type, build_fold_addr_expr (base));
7725 if ((TREE_CODE (op0) == MODIFY_EXPR
7726 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7727 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7728 /* Detect assigning a bitfield. */
7729 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7730 && DECL_BIT_FIELD
7731 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7733 /* Don't leave an assignment inside a conversion
7734 unless assigning a bitfield. */
7735 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7736 /* First do the assignment, then return converted constant. */
7737 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7738 TREE_NO_WARNING (tem) = 1;
7739 TREE_USED (tem) = 1;
7740 return tem;
7743 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7744 constants (if x has signed type, the sign bit cannot be set
7745 in c). This folds extension into the BIT_AND_EXPR. */
7746 if (INTEGRAL_TYPE_P (type)
7747 && TREE_CODE (type) != BOOLEAN_TYPE
7748 && TREE_CODE (op0) == BIT_AND_EXPR
7749 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7751 tree and = op0;
7752 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7753 int change = 0;
7755 if (TYPE_UNSIGNED (TREE_TYPE (and))
7756 || (TYPE_PRECISION (type)
7757 <= TYPE_PRECISION (TREE_TYPE (and))))
7758 change = 1;
7759 else if (TYPE_PRECISION (TREE_TYPE (and1))
7760 <= HOST_BITS_PER_WIDE_INT
7761 && host_integerp (and1, 1))
7763 unsigned HOST_WIDE_INT cst;
7765 cst = tree_low_cst (and1, 1);
7766 cst &= (HOST_WIDE_INT) -1
7767 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7768 change = (cst == 0);
7769 #ifdef LOAD_EXTEND_OP
7770 if (change
7771 && !flag_syntax_only
7772 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7773 == ZERO_EXTEND))
7775 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7776 and0 = fold_convert (uns, and0);
7777 and1 = fold_convert (uns, and1);
7779 #endif
7781 if (change)
7783 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7784 TREE_INT_CST_HIGH (and1), 0,
7785 TREE_OVERFLOW (and1));
7786 return fold_build2 (BIT_AND_EXPR, type,
7787 fold_convert (type, and0), tem);
7791 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7792 T2 being pointers to types of the same size. */
7793 if (POINTER_TYPE_P (type)
7794 && BINARY_CLASS_P (arg0)
7795 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7796 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7798 tree arg00 = TREE_OPERAND (arg0, 0);
7799 tree t0 = type;
7800 tree t1 = TREE_TYPE (arg00);
7801 tree tt0 = TREE_TYPE (t0);
7802 tree tt1 = TREE_TYPE (t1);
7803 tree s0 = TYPE_SIZE (tt0);
7804 tree s1 = TYPE_SIZE (tt1);
7806 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7807 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7808 TREE_OPERAND (arg0, 1));
7811 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7812 of the same precision, and X is a integer type not narrower than
7813 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7814 if (INTEGRAL_TYPE_P (type)
7815 && TREE_CODE (op0) == BIT_NOT_EXPR
7816 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7817 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7818 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7819 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7821 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7822 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7823 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7824 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7827 tem = fold_convert_const (code, type, arg0);
7828 return tem ? tem : NULL_TREE;
7830 case VIEW_CONVERT_EXPR:
7831 if (TREE_TYPE (op0) == type)
7832 return op0;
7833 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7834 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7835 return fold_view_convert_expr (type, op0);
7837 case NEGATE_EXPR:
7838 tem = fold_negate_expr (arg0);
7839 if (tem)
7840 return fold_convert (type, tem);
7841 return NULL_TREE;
7843 case ABS_EXPR:
7844 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7845 return fold_abs_const (arg0, type);
7846 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7847 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7848 /* Convert fabs((double)float) into (double)fabsf(float). */
7849 else if (TREE_CODE (arg0) == NOP_EXPR
7850 && TREE_CODE (type) == REAL_TYPE)
7852 tree targ0 = strip_float_extensions (arg0);
7853 if (targ0 != arg0)
7854 return fold_convert (type, fold_build1 (ABS_EXPR,
7855 TREE_TYPE (targ0),
7856 targ0));
7858 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7859 else if (TREE_CODE (arg0) == ABS_EXPR)
7860 return arg0;
7861 else if (tree_expr_nonnegative_p (arg0))
7862 return arg0;
7864 /* Strip sign ops from argument. */
7865 if (TREE_CODE (type) == REAL_TYPE)
7867 tem = fold_strip_sign_ops (arg0);
7868 if (tem)
7869 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7871 return NULL_TREE;
7873 case CONJ_EXPR:
7874 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7875 return fold_convert (type, arg0);
7876 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7878 tree itype = TREE_TYPE (type);
7879 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7880 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7881 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7883 if (TREE_CODE (arg0) == COMPLEX_CST)
7885 tree itype = TREE_TYPE (type);
7886 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7887 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7888 return build_complex (type, rpart, negate_expr (ipart));
7890 if (TREE_CODE (arg0) == CONJ_EXPR)
7891 return fold_convert (type, TREE_OPERAND (arg0, 0));
7892 return NULL_TREE;
7894 case BIT_NOT_EXPR:
7895 if (TREE_CODE (arg0) == INTEGER_CST)
7896 return fold_not_const (arg0, type);
7897 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7898 return TREE_OPERAND (arg0, 0);
7899 /* Convert ~ (-A) to A - 1. */
7900 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7901 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7902 build_int_cst (type, 1));
7903 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7904 else if (INTEGRAL_TYPE_P (type)
7905 && ((TREE_CODE (arg0) == MINUS_EXPR
7906 && integer_onep (TREE_OPERAND (arg0, 1)))
7907 || (TREE_CODE (arg0) == PLUS_EXPR
7908 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7909 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7910 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7911 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7912 && (tem = fold_unary (BIT_NOT_EXPR, type,
7913 fold_convert (type,
7914 TREE_OPERAND (arg0, 0)))))
7915 return fold_build2 (BIT_XOR_EXPR, type, tem,
7916 fold_convert (type, TREE_OPERAND (arg0, 1)));
7917 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7918 && (tem = fold_unary (BIT_NOT_EXPR, type,
7919 fold_convert (type,
7920 TREE_OPERAND (arg0, 1)))))
7921 return fold_build2 (BIT_XOR_EXPR, type,
7922 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7924 return NULL_TREE;
7926 case TRUTH_NOT_EXPR:
7927 /* The argument to invert_truthvalue must have Boolean type. */
7928 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7929 arg0 = fold_convert (boolean_type_node, arg0);
7931 /* Note that the operand of this must be an int
7932 and its values must be 0 or 1.
7933 ("true" is a fixed value perhaps depending on the language,
7934 but we don't handle values other than 1 correctly yet.) */
7935 tem = fold_truth_not_expr (arg0);
7936 if (!tem)
7937 return NULL_TREE;
7938 return fold_convert (type, tem);
7940 case REALPART_EXPR:
7941 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7942 return fold_convert (type, arg0);
7943 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7944 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7945 TREE_OPERAND (arg0, 1));
7946 if (TREE_CODE (arg0) == COMPLEX_CST)
7947 return fold_convert (type, TREE_REALPART (arg0));
7948 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7950 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7951 tem = fold_build2 (TREE_CODE (arg0), itype,
7952 fold_build1 (REALPART_EXPR, itype,
7953 TREE_OPERAND (arg0, 0)),
7954 fold_build1 (REALPART_EXPR, itype,
7955 TREE_OPERAND (arg0, 1)));
7956 return fold_convert (type, tem);
7958 if (TREE_CODE (arg0) == CONJ_EXPR)
7960 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7961 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7962 return fold_convert (type, tem);
7964 if (TREE_CODE (arg0) == CALL_EXPR)
7966 tree fn = get_callee_fndecl (arg0);
7967 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7968 switch (DECL_FUNCTION_CODE (fn))
7970 CASE_FLT_FN (BUILT_IN_CEXPI):
7971 fn = mathfn_built_in (type, BUILT_IN_COS);
7972 if (fn)
7973 return build_function_call_expr (fn,
7974 TREE_OPERAND (arg0, 1));
7975 break;
7977 default:
7978 break;
7981 return NULL_TREE;
7983 case IMAGPART_EXPR:
7984 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7985 return fold_convert (type, integer_zero_node);
7986 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7987 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7988 TREE_OPERAND (arg0, 0));
7989 if (TREE_CODE (arg0) == COMPLEX_CST)
7990 return fold_convert (type, TREE_IMAGPART (arg0));
7991 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7993 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7994 tem = fold_build2 (TREE_CODE (arg0), itype,
7995 fold_build1 (IMAGPART_EXPR, itype,
7996 TREE_OPERAND (arg0, 0)),
7997 fold_build1 (IMAGPART_EXPR, itype,
7998 TREE_OPERAND (arg0, 1)));
7999 return fold_convert (type, tem);
8001 if (TREE_CODE (arg0) == CONJ_EXPR)
8003 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8004 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8005 return fold_convert (type, negate_expr (tem));
8007 if (TREE_CODE (arg0) == CALL_EXPR)
8009 tree fn = get_callee_fndecl (arg0);
8010 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8011 switch (DECL_FUNCTION_CODE (fn))
8013 CASE_FLT_FN (BUILT_IN_CEXPI):
8014 fn = mathfn_built_in (type, BUILT_IN_SIN);
8015 if (fn)
8016 return build_function_call_expr (fn,
8017 TREE_OPERAND (arg0, 1));
8018 break;
8020 default:
8021 break;
8024 return NULL_TREE;
8026 default:
8027 return NULL_TREE;
8028 } /* switch (code) */
8031 /* Fold a binary expression of code CODE and type TYPE with operands
8032 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8033 Return the folded expression if folding is successful. Otherwise,
8034 return NULL_TREE. */
8036 static tree
8037 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8039 enum tree_code compl_code;
8041 if (code == MIN_EXPR)
8042 compl_code = MAX_EXPR;
8043 else if (code == MAX_EXPR)
8044 compl_code = MIN_EXPR;
8045 else
8046 gcc_unreachable ();
8048 /* MIN (MAX (a, b), b) == b. */
8049 if (TREE_CODE (op0) == compl_code
8050 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8051 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8053 /* MIN (MAX (b, a), b) == b. */
8054 if (TREE_CODE (op0) == compl_code
8055 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8056 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8057 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8059 /* MIN (a, MAX (a, b)) == a. */
8060 if (TREE_CODE (op1) == compl_code
8061 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8062 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8063 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8065 /* MIN (a, MAX (b, a)) == a. */
8066 if (TREE_CODE (op1) == compl_code
8067 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8068 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8069 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8071 return NULL_TREE;
8074 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8075 by changing CODE to reduce the magnitude of constants involved in
8076 ARG0 of the comparison.
8077 Returns a canonicalized comparison tree if a simplification was
8078 possible, otherwise returns NULL_TREE.
8079 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8080 valid if signed overflow is undefined. */
8082 static tree
8083 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8084 tree arg0, tree arg1,
8085 bool *strict_overflow_p)
8087 enum tree_code code0 = TREE_CODE (arg0);
8088 tree t, cst0 = NULL_TREE;
8089 int sgn0;
8090 bool swap = false;
8092 /* Match A +- CST code arg1 and CST code arg1. */
8093 if (!(((code0 == MINUS_EXPR
8094 || code0 == PLUS_EXPR)
8095 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8096 || code0 == INTEGER_CST))
8097 return NULL_TREE;
8099 /* Identify the constant in arg0 and its sign. */
8100 if (code0 == INTEGER_CST)
8101 cst0 = arg0;
8102 else
8103 cst0 = TREE_OPERAND (arg0, 1);
8104 sgn0 = tree_int_cst_sgn (cst0);
8106 /* Overflowed constants and zero will cause problems. */
8107 if (integer_zerop (cst0)
8108 || TREE_OVERFLOW (cst0))
8109 return NULL_TREE;
8111 /* See if we can reduce the magnitude of the constant in
8112 arg0 by changing the comparison code. */
8113 if (code0 == INTEGER_CST)
8115 /* CST <= arg1 -> CST-1 < arg1. */
8116 if (code == LE_EXPR && sgn0 == 1)
8117 code = LT_EXPR;
8118 /* -CST < arg1 -> -CST-1 <= arg1. */
8119 else if (code == LT_EXPR && sgn0 == -1)
8120 code = LE_EXPR;
8121 /* CST > arg1 -> CST-1 >= arg1. */
8122 else if (code == GT_EXPR && sgn0 == 1)
8123 code = GE_EXPR;
8124 /* -CST >= arg1 -> -CST-1 > arg1. */
8125 else if (code == GE_EXPR && sgn0 == -1)
8126 code = GT_EXPR;
8127 else
8128 return NULL_TREE;
8129 /* arg1 code' CST' might be more canonical. */
8130 swap = true;
8132 else
8134 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8135 if (code == LT_EXPR
8136 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8137 code = LE_EXPR;
8138 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8139 else if (code == GT_EXPR
8140 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8141 code = GE_EXPR;
8142 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8143 else if (code == LE_EXPR
8144 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8145 code = LT_EXPR;
8146 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8147 else if (code == GE_EXPR
8148 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8149 code = GT_EXPR;
8150 else
8151 return NULL_TREE;
8152 *strict_overflow_p = true;
8155 /* Now build the constant reduced in magnitude. */
8156 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8157 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8158 if (code0 != INTEGER_CST)
8159 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8161 /* If swapping might yield to a more canonical form, do so. */
8162 if (swap)
8163 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8164 else
8165 return fold_build2 (code, type, t, arg1);
8168 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8169 overflow further. Try to decrease the magnitude of constants involved
8170 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8171 and put sole constants at the second argument position.
8172 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8174 static tree
8175 maybe_canonicalize_comparison (enum tree_code code, tree type,
8176 tree arg0, tree arg1)
8178 tree t;
8179 bool strict_overflow_p;
8180 const char * const warnmsg = G_("assuming signed overflow does not occur "
8181 "when reducing constant in comparison");
8183 /* In principle pointers also have undefined overflow behavior,
8184 but that causes problems elsewhere. */
8185 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8186 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8187 return NULL_TREE;
8189 /* Try canonicalization by simplifying arg0. */
8190 strict_overflow_p = false;
8191 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8192 &strict_overflow_p);
8193 if (t)
8195 if (strict_overflow_p)
8196 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8197 return t;
8200 /* Try canonicalization by simplifying arg1 using the swapped
8201 comparison. */
8202 code = swap_tree_comparison (code);
8203 strict_overflow_p = false;
8204 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8205 &strict_overflow_p);
8206 if (t && strict_overflow_p)
8207 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8208 return t;
8211 /* Subroutine of fold_binary. This routine performs all of the
8212 transformations that are common to the equality/inequality
8213 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8214 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8215 fold_binary should call fold_binary. Fold a comparison with
8216 tree code CODE and type TYPE with operands OP0 and OP1. Return
8217 the folded comparison or NULL_TREE. */
8219 static tree
8220 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8222 tree arg0, arg1, tem;
8224 arg0 = op0;
8225 arg1 = op1;
8227 STRIP_SIGN_NOPS (arg0);
8228 STRIP_SIGN_NOPS (arg1);
8230 tem = fold_relational_const (code, type, arg0, arg1);
8231 if (tem != NULL_TREE)
8232 return tem;
8234 /* If one arg is a real or integer constant, put it last. */
8235 if (tree_swap_operands_p (arg0, arg1, true))
8236 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8238 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8239 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8240 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8241 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8242 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8243 && (TREE_CODE (arg1) == INTEGER_CST
8244 && !TREE_OVERFLOW (arg1)))
8246 tree const1 = TREE_OPERAND (arg0, 1);
8247 tree const2 = arg1;
8248 tree variable = TREE_OPERAND (arg0, 0);
8249 tree lhs;
8250 int lhs_add;
8251 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8253 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8254 TREE_TYPE (arg1), const2, const1);
8256 /* If the constant operation overflowed this can be
8257 simplified as a comparison against INT_MAX/INT_MIN. */
8258 if (TREE_CODE (lhs) == INTEGER_CST
8259 && TREE_OVERFLOW (lhs))
8261 int const1_sgn = tree_int_cst_sgn (const1);
8262 enum tree_code code2 = code;
8264 /* Get the sign of the constant on the lhs if the
8265 operation were VARIABLE + CONST1. */
8266 if (TREE_CODE (arg0) == MINUS_EXPR)
8267 const1_sgn = -const1_sgn;
8269 /* The sign of the constant determines if we overflowed
8270 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8271 Canonicalize to the INT_MIN overflow by swapping the comparison
8272 if necessary. */
8273 if (const1_sgn == -1)
8274 code2 = swap_tree_comparison (code);
8276 /* We now can look at the canonicalized case
8277 VARIABLE + 1 CODE2 INT_MIN
8278 and decide on the result. */
8279 if (code2 == LT_EXPR
8280 || code2 == LE_EXPR
8281 || code2 == EQ_EXPR)
8282 return omit_one_operand (type, boolean_false_node, variable);
8283 else if (code2 == NE_EXPR
8284 || code2 == GE_EXPR
8285 || code2 == GT_EXPR)
8286 return omit_one_operand (type, boolean_true_node, variable);
8289 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8290 && (TREE_CODE (lhs) != INTEGER_CST
8291 || !TREE_OVERFLOW (lhs)))
8293 fold_overflow_warning (("assuming signed overflow does not occur "
8294 "when changing X +- C1 cmp C2 to "
8295 "X cmp C1 +- C2"),
8296 WARN_STRICT_OVERFLOW_COMPARISON);
8297 return fold_build2 (code, type, variable, lhs);
8301 /* For comparisons of pointers we can decompose it to a compile time
8302 comparison of the base objects and the offsets into the object.
8303 This requires at least one operand being an ADDR_EXPR to do more
8304 than the operand_equal_p test below. */
8305 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8306 && (TREE_CODE (arg0) == ADDR_EXPR
8307 || TREE_CODE (arg1) == ADDR_EXPR))
8309 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8310 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8311 enum machine_mode mode;
8312 int volatilep, unsignedp;
8313 bool indirect_base0 = false;
8315 /* Get base and offset for the access. Strip ADDR_EXPR for
8316 get_inner_reference, but put it back by stripping INDIRECT_REF
8317 off the base object if possible. */
8318 base0 = arg0;
8319 if (TREE_CODE (arg0) == ADDR_EXPR)
8321 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8322 &bitsize, &bitpos0, &offset0, &mode,
8323 &unsignedp, &volatilep, false);
8324 if (TREE_CODE (base0) == INDIRECT_REF)
8325 base0 = TREE_OPERAND (base0, 0);
8326 else
8327 indirect_base0 = true;
8330 base1 = arg1;
8331 if (TREE_CODE (arg1) == ADDR_EXPR)
8333 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8334 &bitsize, &bitpos1, &offset1, &mode,
8335 &unsignedp, &volatilep, false);
8336 /* We have to make sure to have an indirect/non-indirect base1
8337 just the same as we did for base0. */
8338 if (TREE_CODE (base1) == INDIRECT_REF
8339 && !indirect_base0)
8340 base1 = TREE_OPERAND (base1, 0);
8341 else if (!indirect_base0)
8342 base1 = NULL_TREE;
8344 else if (indirect_base0)
8345 base1 = NULL_TREE;
8347 /* If we have equivalent bases we might be able to simplify. */
8348 if (base0 && base1
8349 && operand_equal_p (base0, base1, 0))
8351 /* We can fold this expression to a constant if the non-constant
8352 offset parts are equal. */
8353 if (offset0 == offset1
8354 || (offset0 && offset1
8355 && operand_equal_p (offset0, offset1, 0)))
8357 switch (code)
8359 case EQ_EXPR:
8360 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8361 case NE_EXPR:
8362 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8363 case LT_EXPR:
8364 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8365 case LE_EXPR:
8366 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8367 case GE_EXPR:
8368 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8369 case GT_EXPR:
8370 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8371 default:;
8374 /* We can simplify the comparison to a comparison of the variable
8375 offset parts if the constant offset parts are equal.
8376 Be careful to use signed size type here because otherwise we
8377 mess with array offsets in the wrong way. This is possible
8378 because pointer arithmetic is restricted to retain within an
8379 object and overflow on pointer differences is undefined as of
8380 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8381 else if (bitpos0 == bitpos1)
8383 tree signed_size_type_node;
8384 signed_size_type_node = signed_type_for (size_type_node);
8386 /* By converting to signed size type we cover middle-end pointer
8387 arithmetic which operates on unsigned pointer types of size
8388 type size and ARRAY_REF offsets which are properly sign or
8389 zero extended from their type in case it is narrower than
8390 size type. */
8391 if (offset0 == NULL_TREE)
8392 offset0 = build_int_cst (signed_size_type_node, 0);
8393 else
8394 offset0 = fold_convert (signed_size_type_node, offset0);
8395 if (offset1 == NULL_TREE)
8396 offset1 = build_int_cst (signed_size_type_node, 0);
8397 else
8398 offset1 = fold_convert (signed_size_type_node, offset1);
8400 return fold_build2 (code, type, offset0, offset1);
8405 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8406 same object, then we can fold this to a comparison of the two offsets in
8407 signed size type. This is possible because pointer arithmetic is
8408 restricted to retain within an object and overflow on pointer differences
8409 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8411 We check flag_wrapv directly because pointers types are unsigned,
8412 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8413 normally what we want to avoid certain odd overflow cases, but
8414 not here. */
8415 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8416 && !flag_wrapv
8417 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8419 tree base0, offset0, base1, offset1;
8421 if (extract_array_ref (arg0, &base0, &offset0)
8422 && extract_array_ref (arg1, &base1, &offset1)
8423 && operand_equal_p (base0, base1, 0))
8425 tree signed_size_type_node;
8426 signed_size_type_node = signed_type_for (size_type_node);
8428 /* By converting to signed size type we cover middle-end pointer
8429 arithmetic which operates on unsigned pointer types of size
8430 type size and ARRAY_REF offsets which are properly sign or
8431 zero extended from their type in case it is narrower than
8432 size type. */
8433 if (offset0 == NULL_TREE)
8434 offset0 = build_int_cst (signed_size_type_node, 0);
8435 else
8436 offset0 = fold_convert (signed_size_type_node, offset0);
8437 if (offset1 == NULL_TREE)
8438 offset1 = build_int_cst (signed_size_type_node, 0);
8439 else
8440 offset1 = fold_convert (signed_size_type_node, offset1);
8442 return fold_build2 (code, type, offset0, offset1);
8446 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8447 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8448 the resulting offset is smaller in absolute value than the
8449 original one. */
8450 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8451 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8452 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8453 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8454 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8455 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8456 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8458 tree const1 = TREE_OPERAND (arg0, 1);
8459 tree const2 = TREE_OPERAND (arg1, 1);
8460 tree variable1 = TREE_OPERAND (arg0, 0);
8461 tree variable2 = TREE_OPERAND (arg1, 0);
8462 tree cst;
8463 const char * const warnmsg = G_("assuming signed overflow does not "
8464 "occur when combining constants around "
8465 "a comparison");
8467 /* Put the constant on the side where it doesn't overflow and is
8468 of lower absolute value than before. */
8469 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8470 ? MINUS_EXPR : PLUS_EXPR,
8471 const2, const1, 0);
8472 if (!TREE_OVERFLOW (cst)
8473 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8475 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8476 return fold_build2 (code, type,
8477 variable1,
8478 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8479 variable2, cst));
8482 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8483 ? MINUS_EXPR : PLUS_EXPR,
8484 const1, const2, 0);
8485 if (!TREE_OVERFLOW (cst)
8486 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8488 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8489 return fold_build2 (code, type,
8490 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8491 variable1, cst),
8492 variable2);
8496 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8497 signed arithmetic case. That form is created by the compiler
8498 often enough for folding it to be of value. One example is in
8499 computing loop trip counts after Operator Strength Reduction. */
8500 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8501 && TREE_CODE (arg0) == MULT_EXPR
8502 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8503 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8504 && integer_zerop (arg1))
8506 tree const1 = TREE_OPERAND (arg0, 1);
8507 tree const2 = arg1; /* zero */
8508 tree variable1 = TREE_OPERAND (arg0, 0);
8509 enum tree_code cmp_code = code;
8511 gcc_assert (!integer_zerop (const1));
8513 fold_overflow_warning (("assuming signed overflow does not occur when "
8514 "eliminating multiplication in comparison "
8515 "with zero"),
8516 WARN_STRICT_OVERFLOW_COMPARISON);
8518 /* If const1 is negative we swap the sense of the comparison. */
8519 if (tree_int_cst_sgn (const1) < 0)
8520 cmp_code = swap_tree_comparison (cmp_code);
8522 return fold_build2 (cmp_code, type, variable1, const2);
8525 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8526 if (tem)
8527 return tem;
8529 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8531 tree targ0 = strip_float_extensions (arg0);
8532 tree targ1 = strip_float_extensions (arg1);
8533 tree newtype = TREE_TYPE (targ0);
8535 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8536 newtype = TREE_TYPE (targ1);
8538 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8539 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8540 return fold_build2 (code, type, fold_convert (newtype, targ0),
8541 fold_convert (newtype, targ1));
8543 /* (-a) CMP (-b) -> b CMP a */
8544 if (TREE_CODE (arg0) == NEGATE_EXPR
8545 && TREE_CODE (arg1) == NEGATE_EXPR)
8546 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8547 TREE_OPERAND (arg0, 0));
8549 if (TREE_CODE (arg1) == REAL_CST)
8551 REAL_VALUE_TYPE cst;
8552 cst = TREE_REAL_CST (arg1);
8554 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8555 if (TREE_CODE (arg0) == NEGATE_EXPR)
8556 return fold_build2 (swap_tree_comparison (code), type,
8557 TREE_OPERAND (arg0, 0),
8558 build_real (TREE_TYPE (arg1),
8559 REAL_VALUE_NEGATE (cst)));
8561 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8562 /* a CMP (-0) -> a CMP 0 */
8563 if (REAL_VALUE_MINUS_ZERO (cst))
8564 return fold_build2 (code, type, arg0,
8565 build_real (TREE_TYPE (arg1), dconst0));
8567 /* x != NaN is always true, other ops are always false. */
8568 if (REAL_VALUE_ISNAN (cst)
8569 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8571 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8572 return omit_one_operand (type, tem, arg0);
8575 /* Fold comparisons against infinity. */
8576 if (REAL_VALUE_ISINF (cst))
8578 tem = fold_inf_compare (code, type, arg0, arg1);
8579 if (tem != NULL_TREE)
8580 return tem;
8584 /* If this is a comparison of a real constant with a PLUS_EXPR
8585 or a MINUS_EXPR of a real constant, we can convert it into a
8586 comparison with a revised real constant as long as no overflow
8587 occurs when unsafe_math_optimizations are enabled. */
8588 if (flag_unsafe_math_optimizations
8589 && TREE_CODE (arg1) == REAL_CST
8590 && (TREE_CODE (arg0) == PLUS_EXPR
8591 || TREE_CODE (arg0) == MINUS_EXPR)
8592 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8593 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8594 ? MINUS_EXPR : PLUS_EXPR,
8595 arg1, TREE_OPERAND (arg0, 1), 0))
8596 && !TREE_OVERFLOW (tem))
8597 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8599 /* Likewise, we can simplify a comparison of a real constant with
8600 a MINUS_EXPR whose first operand is also a real constant, i.e.
8601 (c1 - x) < c2 becomes x > c1-c2. */
8602 if (flag_unsafe_math_optimizations
8603 && TREE_CODE (arg1) == REAL_CST
8604 && TREE_CODE (arg0) == MINUS_EXPR
8605 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8606 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8607 arg1, 0))
8608 && !TREE_OVERFLOW (tem))
8609 return fold_build2 (swap_tree_comparison (code), type,
8610 TREE_OPERAND (arg0, 1), tem);
8612 /* Fold comparisons against built-in math functions. */
8613 if (TREE_CODE (arg1) == REAL_CST
8614 && flag_unsafe_math_optimizations
8615 && ! flag_errno_math)
8617 enum built_in_function fcode = builtin_mathfn_code (arg0);
8619 if (fcode != END_BUILTINS)
8621 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8622 if (tem != NULL_TREE)
8623 return tem;
8628 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8629 if (TREE_CONSTANT (arg1)
8630 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8631 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8632 /* This optimization is invalid for ordered comparisons
8633 if CONST+INCR overflows or if foo+incr might overflow.
8634 This optimization is invalid for floating point due to rounding.
8635 For pointer types we assume overflow doesn't happen. */
8636 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8637 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8638 && (code == EQ_EXPR || code == NE_EXPR))))
8640 tree varop, newconst;
8642 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8644 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8645 arg1, TREE_OPERAND (arg0, 1));
8646 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8647 TREE_OPERAND (arg0, 0),
8648 TREE_OPERAND (arg0, 1));
8650 else
8652 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8653 arg1, TREE_OPERAND (arg0, 1));
8654 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8655 TREE_OPERAND (arg0, 0),
8656 TREE_OPERAND (arg0, 1));
8660 /* If VAROP is a reference to a bitfield, we must mask
8661 the constant by the width of the field. */
8662 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8663 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8664 && host_integerp (DECL_SIZE (TREE_OPERAND
8665 (TREE_OPERAND (varop, 0), 1)), 1))
8667 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8668 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8669 tree folded_compare, shift;
8671 /* First check whether the comparison would come out
8672 always the same. If we don't do that we would
8673 change the meaning with the masking. */
8674 folded_compare = fold_build2 (code, type,
8675 TREE_OPERAND (varop, 0), arg1);
8676 if (TREE_CODE (folded_compare) == INTEGER_CST)
8677 return omit_one_operand (type, folded_compare, varop);
8679 shift = build_int_cst (NULL_TREE,
8680 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8681 shift = fold_convert (TREE_TYPE (varop), shift);
8682 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8683 newconst, shift);
8684 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8685 newconst, shift);
8688 return fold_build2 (code, type, varop, newconst);
8691 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8692 && (TREE_CODE (arg0) == NOP_EXPR
8693 || TREE_CODE (arg0) == CONVERT_EXPR))
8695 /* If we are widening one operand of an integer comparison,
8696 see if the other operand is similarly being widened. Perhaps we
8697 can do the comparison in the narrower type. */
8698 tem = fold_widened_comparison (code, type, arg0, arg1);
8699 if (tem)
8700 return tem;
8702 /* Or if we are changing signedness. */
8703 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8704 if (tem)
8705 return tem;
8708 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8709 constant, we can simplify it. */
8710 if (TREE_CODE (arg1) == INTEGER_CST
8711 && (TREE_CODE (arg0) == MIN_EXPR
8712 || TREE_CODE (arg0) == MAX_EXPR)
8713 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8715 tem = optimize_minmax_comparison (code, type, op0, op1);
8716 if (tem)
8717 return tem;
8720 /* Simplify comparison of something with itself. (For IEEE
8721 floating-point, we can only do some of these simplifications.) */
8722 if (operand_equal_p (arg0, arg1, 0))
8724 switch (code)
8726 case EQ_EXPR:
8727 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8728 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8729 return constant_boolean_node (1, type);
8730 break;
8732 case GE_EXPR:
8733 case LE_EXPR:
8734 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8735 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8736 return constant_boolean_node (1, type);
8737 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8739 case NE_EXPR:
8740 /* For NE, we can only do this simplification if integer
8741 or we don't honor IEEE floating point NaNs. */
8742 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8743 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8744 break;
8745 /* ... fall through ... */
8746 case GT_EXPR:
8747 case LT_EXPR:
8748 return constant_boolean_node (0, type);
8749 default:
8750 gcc_unreachable ();
8754 /* If we are comparing an expression that just has comparisons
8755 of two integer values, arithmetic expressions of those comparisons,
8756 and constants, we can simplify it. There are only three cases
8757 to check: the two values can either be equal, the first can be
8758 greater, or the second can be greater. Fold the expression for
8759 those three values. Since each value must be 0 or 1, we have
8760 eight possibilities, each of which corresponds to the constant 0
8761 or 1 or one of the six possible comparisons.
8763 This handles common cases like (a > b) == 0 but also handles
8764 expressions like ((x > y) - (y > x)) > 0, which supposedly
8765 occur in macroized code. */
8767 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8769 tree cval1 = 0, cval2 = 0;
8770 int save_p = 0;
8772 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8773 /* Don't handle degenerate cases here; they should already
8774 have been handled anyway. */
8775 && cval1 != 0 && cval2 != 0
8776 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8777 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8778 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8779 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8780 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8781 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8782 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8784 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8785 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8787 /* We can't just pass T to eval_subst in case cval1 or cval2
8788 was the same as ARG1. */
8790 tree high_result
8791 = fold_build2 (code, type,
8792 eval_subst (arg0, cval1, maxval,
8793 cval2, minval),
8794 arg1);
8795 tree equal_result
8796 = fold_build2 (code, type,
8797 eval_subst (arg0, cval1, maxval,
8798 cval2, maxval),
8799 arg1);
8800 tree low_result
8801 = fold_build2 (code, type,
8802 eval_subst (arg0, cval1, minval,
8803 cval2, maxval),
8804 arg1);
8806 /* All three of these results should be 0 or 1. Confirm they are.
8807 Then use those values to select the proper code to use. */
8809 if (TREE_CODE (high_result) == INTEGER_CST
8810 && TREE_CODE (equal_result) == INTEGER_CST
8811 && TREE_CODE (low_result) == INTEGER_CST)
8813 /* Make a 3-bit mask with the high-order bit being the
8814 value for `>', the next for '=', and the low for '<'. */
8815 switch ((integer_onep (high_result) * 4)
8816 + (integer_onep (equal_result) * 2)
8817 + integer_onep (low_result))
8819 case 0:
8820 /* Always false. */
8821 return omit_one_operand (type, integer_zero_node, arg0);
8822 case 1:
8823 code = LT_EXPR;
8824 break;
8825 case 2:
8826 code = EQ_EXPR;
8827 break;
8828 case 3:
8829 code = LE_EXPR;
8830 break;
8831 case 4:
8832 code = GT_EXPR;
8833 break;
8834 case 5:
8835 code = NE_EXPR;
8836 break;
8837 case 6:
8838 code = GE_EXPR;
8839 break;
8840 case 7:
8841 /* Always true. */
8842 return omit_one_operand (type, integer_one_node, arg0);
8845 if (save_p)
8846 return save_expr (build2 (code, type, cval1, cval2));
8847 return fold_build2 (code, type, cval1, cval2);
8852 /* Fold a comparison of the address of COMPONENT_REFs with the same
8853 type and component to a comparison of the address of the base
8854 object. In short, &x->a OP &y->a to x OP y and
8855 &x->a OP &y.a to x OP &y */
8856 if (TREE_CODE (arg0) == ADDR_EXPR
8857 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8858 && TREE_CODE (arg1) == ADDR_EXPR
8859 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8861 tree cref0 = TREE_OPERAND (arg0, 0);
8862 tree cref1 = TREE_OPERAND (arg1, 0);
8863 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8865 tree op0 = TREE_OPERAND (cref0, 0);
8866 tree op1 = TREE_OPERAND (cref1, 0);
8867 return fold_build2 (code, type,
8868 build_fold_addr_expr (op0),
8869 build_fold_addr_expr (op1));
8873 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8874 into a single range test. */
8875 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8876 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8877 && TREE_CODE (arg1) == INTEGER_CST
8878 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8879 && !integer_zerop (TREE_OPERAND (arg0, 1))
8880 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8881 && !TREE_OVERFLOW (arg1))
8883 tem = fold_div_compare (code, type, arg0, arg1);
8884 if (tem != NULL_TREE)
8885 return tem;
8888 /* Fold ~X op ~Y as Y op X. */
8889 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8890 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8891 return fold_build2 (code, type,
8892 TREE_OPERAND (arg1, 0),
8893 TREE_OPERAND (arg0, 0));
8895 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8896 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8897 && TREE_CODE (arg1) == INTEGER_CST)
8898 return fold_build2 (swap_tree_comparison (code), type,
8899 TREE_OPERAND (arg0, 0),
8900 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8902 return NULL_TREE;
8906 /* Subroutine of fold_binary. Optimize complex multiplications of the
8907 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8908 argument EXPR represents the expression "z" of type TYPE. */
8910 static tree
8911 fold_mult_zconjz (tree type, tree expr)
8913 tree itype = TREE_TYPE (type);
8914 tree rpart, ipart, tem;
8916 if (TREE_CODE (expr) == COMPLEX_EXPR)
8918 rpart = TREE_OPERAND (expr, 0);
8919 ipart = TREE_OPERAND (expr, 1);
8921 else if (TREE_CODE (expr) == COMPLEX_CST)
8923 rpart = TREE_REALPART (expr);
8924 ipart = TREE_IMAGPART (expr);
8926 else
8928 expr = save_expr (expr);
8929 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8930 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8933 rpart = save_expr (rpart);
8934 ipart = save_expr (ipart);
8935 tem = fold_build2 (PLUS_EXPR, itype,
8936 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8937 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8938 return fold_build2 (COMPLEX_EXPR, type, tem,
8939 fold_convert (itype, integer_zero_node));
8943 /* Fold a binary expression of code CODE and type TYPE with operands
8944 OP0 and OP1. Return the folded expression if folding is
8945 successful. Otherwise, return NULL_TREE. */
8947 tree
8948 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8950 enum tree_code_class kind = TREE_CODE_CLASS (code);
8951 tree arg0, arg1, tem;
8952 tree t1 = NULL_TREE;
8953 bool strict_overflow_p;
8955 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
8956 || IS_GIMPLE_STMT_CODE_CLASS (kind))
8957 && TREE_CODE_LENGTH (code) == 2
8958 && op0 != NULL_TREE
8959 && op1 != NULL_TREE);
8961 arg0 = op0;
8962 arg1 = op1;
8964 /* Strip any conversions that don't change the mode. This is
8965 safe for every expression, except for a comparison expression
8966 because its signedness is derived from its operands. So, in
8967 the latter case, only strip conversions that don't change the
8968 signedness.
8970 Note that this is done as an internal manipulation within the
8971 constant folder, in order to find the simplest representation
8972 of the arguments so that their form can be studied. In any
8973 cases, the appropriate type conversions should be put back in
8974 the tree that will get out of the constant folder. */
8976 if (kind == tcc_comparison)
8978 STRIP_SIGN_NOPS (arg0);
8979 STRIP_SIGN_NOPS (arg1);
8981 else
8983 STRIP_NOPS (arg0);
8984 STRIP_NOPS (arg1);
8987 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8988 constant but we can't do arithmetic on them. */
8989 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8990 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8991 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8992 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8994 if (kind == tcc_binary)
8995 tem = const_binop (code, arg0, arg1, 0);
8996 else if (kind == tcc_comparison)
8997 tem = fold_relational_const (code, type, arg0, arg1);
8998 else
8999 tem = NULL_TREE;
9001 if (tem != NULL_TREE)
9003 if (TREE_TYPE (tem) != type)
9004 tem = fold_convert (type, tem);
9005 return tem;
9009 /* If this is a commutative operation, and ARG0 is a constant, move it
9010 to ARG1 to reduce the number of tests below. */
9011 if (commutative_tree_code (code)
9012 && tree_swap_operands_p (arg0, arg1, true))
9013 return fold_build2 (code, type, op1, op0);
9015 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9017 First check for cases where an arithmetic operation is applied to a
9018 compound, conditional, or comparison operation. Push the arithmetic
9019 operation inside the compound or conditional to see if any folding
9020 can then be done. Convert comparison to conditional for this purpose.
9021 The also optimizes non-constant cases that used to be done in
9022 expand_expr.
9024 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9025 one of the operands is a comparison and the other is a comparison, a
9026 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9027 code below would make the expression more complex. Change it to a
9028 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9029 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9031 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9032 || code == EQ_EXPR || code == NE_EXPR)
9033 && ((truth_value_p (TREE_CODE (arg0))
9034 && (truth_value_p (TREE_CODE (arg1))
9035 || (TREE_CODE (arg1) == BIT_AND_EXPR
9036 && integer_onep (TREE_OPERAND (arg1, 1)))))
9037 || (truth_value_p (TREE_CODE (arg1))
9038 && (truth_value_p (TREE_CODE (arg0))
9039 || (TREE_CODE (arg0) == BIT_AND_EXPR
9040 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9042 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9043 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9044 : TRUTH_XOR_EXPR,
9045 boolean_type_node,
9046 fold_convert (boolean_type_node, arg0),
9047 fold_convert (boolean_type_node, arg1));
9049 if (code == EQ_EXPR)
9050 tem = invert_truthvalue (tem);
9052 return fold_convert (type, tem);
9055 if (TREE_CODE_CLASS (code) == tcc_binary
9056 || TREE_CODE_CLASS (code) == tcc_comparison)
9058 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9059 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9060 fold_build2 (code, type,
9061 TREE_OPERAND (arg0, 1), op1));
9062 if (TREE_CODE (arg1) == COMPOUND_EXPR
9063 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9064 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9065 fold_build2 (code, type,
9066 op0, TREE_OPERAND (arg1, 1)));
9068 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9070 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9071 arg0, arg1,
9072 /*cond_first_p=*/1);
9073 if (tem != NULL_TREE)
9074 return tem;
9077 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9079 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9080 arg1, arg0,
9081 /*cond_first_p=*/0);
9082 if (tem != NULL_TREE)
9083 return tem;
9087 switch (code)
9089 case PLUS_EXPR:
9090 /* A + (-B) -> A - B */
9091 if (TREE_CODE (arg1) == NEGATE_EXPR)
9092 return fold_build2 (MINUS_EXPR, type,
9093 fold_convert (type, arg0),
9094 fold_convert (type, TREE_OPERAND (arg1, 0)));
9095 /* (-A) + B -> B - A */
9096 if (TREE_CODE (arg0) == NEGATE_EXPR
9097 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9098 return fold_build2 (MINUS_EXPR, type,
9099 fold_convert (type, arg1),
9100 fold_convert (type, TREE_OPERAND (arg0, 0)));
9101 /* Convert ~A + 1 to -A. */
9102 if (INTEGRAL_TYPE_P (type)
9103 && TREE_CODE (arg0) == BIT_NOT_EXPR
9104 && integer_onep (arg1))
9105 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9107 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9108 same or one. */
9109 if ((TREE_CODE (arg0) == MULT_EXPR
9110 || TREE_CODE (arg1) == MULT_EXPR)
9111 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9113 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9114 if (tem)
9115 return tem;
9118 if (! FLOAT_TYPE_P (type))
9120 if (integer_zerop (arg1))
9121 return non_lvalue (fold_convert (type, arg0));
9123 /* ~X + X is -1. */
9124 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9125 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9126 && !TYPE_OVERFLOW_TRAPS (type))
9128 t1 = build_int_cst_type (type, -1);
9129 return omit_one_operand (type, t1, arg1);
9132 /* X + ~X is -1. */
9133 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9134 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9135 && !TYPE_OVERFLOW_TRAPS (type))
9137 t1 = build_int_cst_type (type, -1);
9138 return omit_one_operand (type, t1, arg0);
9141 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9142 with a constant, and the two constants have no bits in common,
9143 we should treat this as a BIT_IOR_EXPR since this may produce more
9144 simplifications. */
9145 if (TREE_CODE (arg0) == BIT_AND_EXPR
9146 && TREE_CODE (arg1) == BIT_AND_EXPR
9147 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9148 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9149 && integer_zerop (const_binop (BIT_AND_EXPR,
9150 TREE_OPERAND (arg0, 1),
9151 TREE_OPERAND (arg1, 1), 0)))
9153 code = BIT_IOR_EXPR;
9154 goto bit_ior;
9157 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9158 (plus (plus (mult) (mult)) (foo)) so that we can
9159 take advantage of the factoring cases below. */
9160 if (((TREE_CODE (arg0) == PLUS_EXPR
9161 || TREE_CODE (arg0) == MINUS_EXPR)
9162 && TREE_CODE (arg1) == MULT_EXPR)
9163 || ((TREE_CODE (arg1) == PLUS_EXPR
9164 || TREE_CODE (arg1) == MINUS_EXPR)
9165 && TREE_CODE (arg0) == MULT_EXPR))
9167 tree parg0, parg1, parg, marg;
9168 enum tree_code pcode;
9170 if (TREE_CODE (arg1) == MULT_EXPR)
9171 parg = arg0, marg = arg1;
9172 else
9173 parg = arg1, marg = arg0;
9174 pcode = TREE_CODE (parg);
9175 parg0 = TREE_OPERAND (parg, 0);
9176 parg1 = TREE_OPERAND (parg, 1);
9177 STRIP_NOPS (parg0);
9178 STRIP_NOPS (parg1);
9180 if (TREE_CODE (parg0) == MULT_EXPR
9181 && TREE_CODE (parg1) != MULT_EXPR)
9182 return fold_build2 (pcode, type,
9183 fold_build2 (PLUS_EXPR, type,
9184 fold_convert (type, parg0),
9185 fold_convert (type, marg)),
9186 fold_convert (type, parg1));
9187 if (TREE_CODE (parg0) != MULT_EXPR
9188 && TREE_CODE (parg1) == MULT_EXPR)
9189 return fold_build2 (PLUS_EXPR, type,
9190 fold_convert (type, parg0),
9191 fold_build2 (pcode, type,
9192 fold_convert (type, marg),
9193 fold_convert (type,
9194 parg1)));
9197 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
9198 of the array. Loop optimizer sometimes produce this type of
9199 expressions. */
9200 if (TREE_CODE (arg0) == ADDR_EXPR)
9202 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
9203 if (tem)
9204 return fold_convert (type, tem);
9206 else if (TREE_CODE (arg1) == ADDR_EXPR)
9208 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
9209 if (tem)
9210 return fold_convert (type, tem);
9213 else
9215 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9216 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9217 return non_lvalue (fold_convert (type, arg0));
9219 /* Likewise if the operands are reversed. */
9220 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9221 return non_lvalue (fold_convert (type, arg1));
9223 /* Convert X + -C into X - C. */
9224 if (TREE_CODE (arg1) == REAL_CST
9225 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9227 tem = fold_negate_const (arg1, type);
9228 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9229 return fold_build2 (MINUS_EXPR, type,
9230 fold_convert (type, arg0),
9231 fold_convert (type, tem));
9234 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9235 to __complex__ ( x, y ). This is not the same for SNaNs or
9236 if signed zeros are involved. */
9237 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9238 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9239 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9241 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9242 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9243 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9244 bool arg0rz = false, arg0iz = false;
9245 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9246 || (arg0i && (arg0iz = real_zerop (arg0i))))
9248 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9249 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9250 if (arg0rz && arg1i && real_zerop (arg1i))
9252 tree rp = arg1r ? arg1r
9253 : build1 (REALPART_EXPR, rtype, arg1);
9254 tree ip = arg0i ? arg0i
9255 : build1 (IMAGPART_EXPR, rtype, arg0);
9256 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9258 else if (arg0iz && arg1r && real_zerop (arg1r))
9260 tree rp = arg0r ? arg0r
9261 : build1 (REALPART_EXPR, rtype, arg0);
9262 tree ip = arg1i ? arg1i
9263 : build1 (IMAGPART_EXPR, rtype, arg1);
9264 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9269 if (flag_unsafe_math_optimizations
9270 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9271 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9272 && (tem = distribute_real_division (code, type, arg0, arg1)))
9273 return tem;
9275 /* Convert x+x into x*2.0. */
9276 if (operand_equal_p (arg0, arg1, 0)
9277 && SCALAR_FLOAT_TYPE_P (type))
9278 return fold_build2 (MULT_EXPR, type, arg0,
9279 build_real (type, dconst2));
9281 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
9282 if (flag_unsafe_math_optimizations
9283 && TREE_CODE (arg1) == PLUS_EXPR
9284 && TREE_CODE (arg0) != MULT_EXPR)
9286 tree tree10 = TREE_OPERAND (arg1, 0);
9287 tree tree11 = TREE_OPERAND (arg1, 1);
9288 if (TREE_CODE (tree11) == MULT_EXPR
9289 && TREE_CODE (tree10) == MULT_EXPR)
9291 tree tree0;
9292 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9293 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9296 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
9297 if (flag_unsafe_math_optimizations
9298 && TREE_CODE (arg0) == PLUS_EXPR
9299 && TREE_CODE (arg1) != MULT_EXPR)
9301 tree tree00 = TREE_OPERAND (arg0, 0);
9302 tree tree01 = TREE_OPERAND (arg0, 1);
9303 if (TREE_CODE (tree01) == MULT_EXPR
9304 && TREE_CODE (tree00) == MULT_EXPR)
9306 tree tree0;
9307 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9308 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9313 bit_rotate:
9314 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9315 is a rotate of A by C1 bits. */
9316 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9317 is a rotate of A by B bits. */
9319 enum tree_code code0, code1;
9320 code0 = TREE_CODE (arg0);
9321 code1 = TREE_CODE (arg1);
9322 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9323 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9324 && operand_equal_p (TREE_OPERAND (arg0, 0),
9325 TREE_OPERAND (arg1, 0), 0)
9326 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9328 tree tree01, tree11;
9329 enum tree_code code01, code11;
9331 tree01 = TREE_OPERAND (arg0, 1);
9332 tree11 = TREE_OPERAND (arg1, 1);
9333 STRIP_NOPS (tree01);
9334 STRIP_NOPS (tree11);
9335 code01 = TREE_CODE (tree01);
9336 code11 = TREE_CODE (tree11);
9337 if (code01 == INTEGER_CST
9338 && code11 == INTEGER_CST
9339 && TREE_INT_CST_HIGH (tree01) == 0
9340 && TREE_INT_CST_HIGH (tree11) == 0
9341 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9342 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9343 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9344 code0 == LSHIFT_EXPR ? tree01 : tree11);
9345 else if (code11 == MINUS_EXPR)
9347 tree tree110, tree111;
9348 tree110 = TREE_OPERAND (tree11, 0);
9349 tree111 = TREE_OPERAND (tree11, 1);
9350 STRIP_NOPS (tree110);
9351 STRIP_NOPS (tree111);
9352 if (TREE_CODE (tree110) == INTEGER_CST
9353 && 0 == compare_tree_int (tree110,
9354 TYPE_PRECISION
9355 (TREE_TYPE (TREE_OPERAND
9356 (arg0, 0))))
9357 && operand_equal_p (tree01, tree111, 0))
9358 return build2 ((code0 == LSHIFT_EXPR
9359 ? LROTATE_EXPR
9360 : RROTATE_EXPR),
9361 type, TREE_OPERAND (arg0, 0), tree01);
9363 else if (code01 == MINUS_EXPR)
9365 tree tree010, tree011;
9366 tree010 = TREE_OPERAND (tree01, 0);
9367 tree011 = TREE_OPERAND (tree01, 1);
9368 STRIP_NOPS (tree010);
9369 STRIP_NOPS (tree011);
9370 if (TREE_CODE (tree010) == INTEGER_CST
9371 && 0 == compare_tree_int (tree010,
9372 TYPE_PRECISION
9373 (TREE_TYPE (TREE_OPERAND
9374 (arg0, 0))))
9375 && operand_equal_p (tree11, tree011, 0))
9376 return build2 ((code0 != LSHIFT_EXPR
9377 ? LROTATE_EXPR
9378 : RROTATE_EXPR),
9379 type, TREE_OPERAND (arg0, 0), tree11);
9384 associate:
9385 /* In most languages, can't associate operations on floats through
9386 parentheses. Rather than remember where the parentheses were, we
9387 don't associate floats at all, unless the user has specified
9388 -funsafe-math-optimizations. */
9390 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9392 tree var0, con0, lit0, minus_lit0;
9393 tree var1, con1, lit1, minus_lit1;
9395 /* Split both trees into variables, constants, and literals. Then
9396 associate each group together, the constants with literals,
9397 then the result with variables. This increases the chances of
9398 literals being recombined later and of generating relocatable
9399 expressions for the sum of a constant and literal. */
9400 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9401 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9402 code == MINUS_EXPR);
9404 /* Only do something if we found more than two objects. Otherwise,
9405 nothing has changed and we risk infinite recursion. */
9406 if (2 < ((var0 != 0) + (var1 != 0)
9407 + (con0 != 0) + (con1 != 0)
9408 + (lit0 != 0) + (lit1 != 0)
9409 + (minus_lit0 != 0) + (minus_lit1 != 0)))
9411 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9412 if (code == MINUS_EXPR)
9413 code = PLUS_EXPR;
9415 var0 = associate_trees (var0, var1, code, type);
9416 con0 = associate_trees (con0, con1, code, type);
9417 lit0 = associate_trees (lit0, lit1, code, type);
9418 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9420 /* Preserve the MINUS_EXPR if the negative part of the literal is
9421 greater than the positive part. Otherwise, the multiplicative
9422 folding code (i.e extract_muldiv) may be fooled in case
9423 unsigned constants are subtracted, like in the following
9424 example: ((X*2 + 4) - 8U)/2. */
9425 if (minus_lit0 && lit0)
9427 if (TREE_CODE (lit0) == INTEGER_CST
9428 && TREE_CODE (minus_lit0) == INTEGER_CST
9429 && tree_int_cst_lt (lit0, minus_lit0))
9431 minus_lit0 = associate_trees (minus_lit0, lit0,
9432 MINUS_EXPR, type);
9433 lit0 = 0;
9435 else
9437 lit0 = associate_trees (lit0, minus_lit0,
9438 MINUS_EXPR, type);
9439 minus_lit0 = 0;
9442 if (minus_lit0)
9444 if (con0 == 0)
9445 return fold_convert (type,
9446 associate_trees (var0, minus_lit0,
9447 MINUS_EXPR, type));
9448 else
9450 con0 = associate_trees (con0, minus_lit0,
9451 MINUS_EXPR, type);
9452 return fold_convert (type,
9453 associate_trees (var0, con0,
9454 PLUS_EXPR, type));
9458 con0 = associate_trees (con0, lit0, code, type);
9459 return fold_convert (type, associate_trees (var0, con0,
9460 code, type));
9464 return NULL_TREE;
9466 case MINUS_EXPR:
9467 /* A - (-B) -> A + B */
9468 if (TREE_CODE (arg1) == NEGATE_EXPR)
9469 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9470 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9471 if (TREE_CODE (arg0) == NEGATE_EXPR
9472 && (FLOAT_TYPE_P (type)
9473 || INTEGRAL_TYPE_P (type))
9474 && negate_expr_p (arg1)
9475 && reorder_operands_p (arg0, arg1))
9476 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9477 TREE_OPERAND (arg0, 0));
9478 /* Convert -A - 1 to ~A. */
9479 if (INTEGRAL_TYPE_P (type)
9480 && TREE_CODE (arg0) == NEGATE_EXPR
9481 && integer_onep (arg1)
9482 && !TYPE_OVERFLOW_TRAPS (type))
9483 return fold_build1 (BIT_NOT_EXPR, type,
9484 fold_convert (type, TREE_OPERAND (arg0, 0)));
9486 /* Convert -1 - A to ~A. */
9487 if (INTEGRAL_TYPE_P (type)
9488 && integer_all_onesp (arg0))
9489 return fold_build1 (BIT_NOT_EXPR, type, op1);
9491 if (! FLOAT_TYPE_P (type))
9493 if (integer_zerop (arg0))
9494 return negate_expr (fold_convert (type, arg1));
9495 if (integer_zerop (arg1))
9496 return non_lvalue (fold_convert (type, arg0));
9498 /* Fold A - (A & B) into ~B & A. */
9499 if (!TREE_SIDE_EFFECTS (arg0)
9500 && TREE_CODE (arg1) == BIT_AND_EXPR)
9502 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9503 return fold_build2 (BIT_AND_EXPR, type,
9504 fold_build1 (BIT_NOT_EXPR, type,
9505 TREE_OPERAND (arg1, 0)),
9506 arg0);
9507 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9508 return fold_build2 (BIT_AND_EXPR, type,
9509 fold_build1 (BIT_NOT_EXPR, type,
9510 TREE_OPERAND (arg1, 1)),
9511 arg0);
9514 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9515 any power of 2 minus 1. */
9516 if (TREE_CODE (arg0) == BIT_AND_EXPR
9517 && TREE_CODE (arg1) == BIT_AND_EXPR
9518 && operand_equal_p (TREE_OPERAND (arg0, 0),
9519 TREE_OPERAND (arg1, 0), 0))
9521 tree mask0 = TREE_OPERAND (arg0, 1);
9522 tree mask1 = TREE_OPERAND (arg1, 1);
9523 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9525 if (operand_equal_p (tem, mask1, 0))
9527 tem = fold_build2 (BIT_XOR_EXPR, type,
9528 TREE_OPERAND (arg0, 0), mask1);
9529 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9534 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9535 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9536 return non_lvalue (fold_convert (type, arg0));
9538 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9539 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9540 (-ARG1 + ARG0) reduces to -ARG1. */
9541 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9542 return negate_expr (fold_convert (type, arg1));
9544 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9545 __complex__ ( x, -y ). This is not the same for SNaNs or if
9546 signed zeros are involved. */
9547 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9548 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9549 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9551 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9552 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9553 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9554 bool arg0rz = false, arg0iz = false;
9555 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9556 || (arg0i && (arg0iz = real_zerop (arg0i))))
9558 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9559 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9560 if (arg0rz && arg1i && real_zerop (arg1i))
9562 tree rp = fold_build1 (NEGATE_EXPR, rtype,
9563 arg1r ? arg1r
9564 : build1 (REALPART_EXPR, rtype, arg1));
9565 tree ip = arg0i ? arg0i
9566 : build1 (IMAGPART_EXPR, rtype, arg0);
9567 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9569 else if (arg0iz && arg1r && real_zerop (arg1r))
9571 tree rp = arg0r ? arg0r
9572 : build1 (REALPART_EXPR, rtype, arg0);
9573 tree ip = fold_build1 (NEGATE_EXPR, rtype,
9574 arg1i ? arg1i
9575 : build1 (IMAGPART_EXPR, rtype, arg1));
9576 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9581 /* Fold &x - &x. This can happen from &x.foo - &x.
9582 This is unsafe for certain floats even in non-IEEE formats.
9583 In IEEE, it is unsafe because it does wrong for NaNs.
9584 Also note that operand_equal_p is always false if an operand
9585 is volatile. */
9587 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9588 && operand_equal_p (arg0, arg1, 0))
9589 return fold_convert (type, integer_zero_node);
9591 /* A - B -> A + (-B) if B is easily negatable. */
9592 if (negate_expr_p (arg1)
9593 && ((FLOAT_TYPE_P (type)
9594 /* Avoid this transformation if B is a positive REAL_CST. */
9595 && (TREE_CODE (arg1) != REAL_CST
9596 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9597 || INTEGRAL_TYPE_P (type)))
9598 return fold_build2 (PLUS_EXPR, type,
9599 fold_convert (type, arg0),
9600 fold_convert (type, negate_expr (arg1)));
9602 /* Try folding difference of addresses. */
9604 HOST_WIDE_INT diff;
9606 if ((TREE_CODE (arg0) == ADDR_EXPR
9607 || TREE_CODE (arg1) == ADDR_EXPR)
9608 && ptr_difference_const (arg0, arg1, &diff))
9609 return build_int_cst_type (type, diff);
9612 /* Fold &a[i] - &a[j] to i-j. */
9613 if (TREE_CODE (arg0) == ADDR_EXPR
9614 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9615 && TREE_CODE (arg1) == ADDR_EXPR
9616 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9618 tree aref0 = TREE_OPERAND (arg0, 0);
9619 tree aref1 = TREE_OPERAND (arg1, 0);
9620 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9621 TREE_OPERAND (aref1, 0), 0))
9623 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9624 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9625 tree esz = array_ref_element_size (aref0);
9626 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9627 return fold_build2 (MULT_EXPR, type, diff,
9628 fold_convert (type, esz));
9633 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9634 of the array. Loop optimizer sometimes produce this type of
9635 expressions. */
9636 if (TREE_CODE (arg0) == ADDR_EXPR)
9638 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9639 if (tem)
9640 return fold_convert (type, tem);
9643 if (flag_unsafe_math_optimizations
9644 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9645 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9646 && (tem = distribute_real_division (code, type, arg0, arg1)))
9647 return tem;
9649 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9650 same or one. */
9651 if ((TREE_CODE (arg0) == MULT_EXPR
9652 || TREE_CODE (arg1) == MULT_EXPR)
9653 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9655 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9656 if (tem)
9657 return tem;
9660 goto associate;
9662 case MULT_EXPR:
9663 /* (-A) * (-B) -> A * B */
9664 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9665 return fold_build2 (MULT_EXPR, type,
9666 fold_convert (type, TREE_OPERAND (arg0, 0)),
9667 fold_convert (type, negate_expr (arg1)));
9668 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9669 return fold_build2 (MULT_EXPR, type,
9670 fold_convert (type, negate_expr (arg0)),
9671 fold_convert (type, TREE_OPERAND (arg1, 0)));
9673 if (! FLOAT_TYPE_P (type))
9675 if (integer_zerop (arg1))
9676 return omit_one_operand (type, arg1, arg0);
9677 if (integer_onep (arg1))
9678 return non_lvalue (fold_convert (type, arg0));
9679 /* Transform x * -1 into -x. */
9680 if (integer_all_onesp (arg1))
9681 return fold_convert (type, negate_expr (arg0));
9682 /* Transform x * -C into -x * C if x is easily negatable. */
9683 if (TREE_CODE (arg1) == INTEGER_CST
9684 && tree_int_cst_sgn (arg1) == -1
9685 && negate_expr_p (arg0)
9686 && (tem = negate_expr (arg1)) != arg1
9687 && !TREE_OVERFLOW (tem))
9688 return fold_build2 (MULT_EXPR, type,
9689 negate_expr (arg0), tem);
9691 /* (a * (1 << b)) is (a << b) */
9692 if (TREE_CODE (arg1) == LSHIFT_EXPR
9693 && integer_onep (TREE_OPERAND (arg1, 0)))
9694 return fold_build2 (LSHIFT_EXPR, type, arg0,
9695 TREE_OPERAND (arg1, 1));
9696 if (TREE_CODE (arg0) == LSHIFT_EXPR
9697 && integer_onep (TREE_OPERAND (arg0, 0)))
9698 return fold_build2 (LSHIFT_EXPR, type, arg1,
9699 TREE_OPERAND (arg0, 1));
9701 strict_overflow_p = false;
9702 if (TREE_CODE (arg1) == INTEGER_CST
9703 && 0 != (tem = extract_muldiv (op0,
9704 fold_convert (type, arg1),
9705 code, NULL_TREE,
9706 &strict_overflow_p)))
9708 if (strict_overflow_p)
9709 fold_overflow_warning (("assuming signed overflow does not "
9710 "occur when simplifying "
9711 "multiplication"),
9712 WARN_STRICT_OVERFLOW_MISC);
9713 return fold_convert (type, tem);
9716 /* Optimize z * conj(z) for integer complex numbers. */
9717 if (TREE_CODE (arg0) == CONJ_EXPR
9718 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9719 return fold_mult_zconjz (type, arg1);
9720 if (TREE_CODE (arg1) == CONJ_EXPR
9721 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9722 return fold_mult_zconjz (type, arg0);
9724 else
9726 /* Maybe fold x * 0 to 0. The expressions aren't the same
9727 when x is NaN, since x * 0 is also NaN. Nor are they the
9728 same in modes with signed zeros, since multiplying a
9729 negative value by 0 gives -0, not +0. */
9730 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9731 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9732 && real_zerop (arg1))
9733 return omit_one_operand (type, arg1, arg0);
9734 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9735 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9736 && real_onep (arg1))
9737 return non_lvalue (fold_convert (type, arg0));
9739 /* Transform x * -1.0 into -x. */
9740 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9741 && real_minus_onep (arg1))
9742 return fold_convert (type, negate_expr (arg0));
9744 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9745 if (flag_unsafe_math_optimizations
9746 && TREE_CODE (arg0) == RDIV_EXPR
9747 && TREE_CODE (arg1) == REAL_CST
9748 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9750 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9751 arg1, 0);
9752 if (tem)
9753 return fold_build2 (RDIV_EXPR, type, tem,
9754 TREE_OPERAND (arg0, 1));
9757 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9758 if (operand_equal_p (arg0, arg1, 0))
9760 tree tem = fold_strip_sign_ops (arg0);
9761 if (tem != NULL_TREE)
9763 tem = fold_convert (type, tem);
9764 return fold_build2 (MULT_EXPR, type, tem, tem);
9768 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9769 This is not the same for NaNs or if signed zeros are
9770 involved. */
9771 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9772 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9773 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9774 && TREE_CODE (arg1) == COMPLEX_CST
9775 && real_zerop (TREE_REALPART (arg1)))
9777 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9778 if (real_onep (TREE_IMAGPART (arg1)))
9779 return fold_build2 (COMPLEX_EXPR, type,
9780 negate_expr (fold_build1 (IMAGPART_EXPR,
9781 rtype, arg0)),
9782 fold_build1 (REALPART_EXPR, rtype, arg0));
9783 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9784 return fold_build2 (COMPLEX_EXPR, type,
9785 fold_build1 (IMAGPART_EXPR, rtype, arg0),
9786 negate_expr (fold_build1 (REALPART_EXPR,
9787 rtype, arg0)));
9790 /* Optimize z * conj(z) for floating point complex numbers.
9791 Guarded by flag_unsafe_math_optimizations as non-finite
9792 imaginary components don't produce scalar results. */
9793 if (flag_unsafe_math_optimizations
9794 && TREE_CODE (arg0) == CONJ_EXPR
9795 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9796 return fold_mult_zconjz (type, arg1);
9797 if (flag_unsafe_math_optimizations
9798 && TREE_CODE (arg1) == CONJ_EXPR
9799 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9800 return fold_mult_zconjz (type, arg0);
9802 if (flag_unsafe_math_optimizations)
9804 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9805 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9807 /* Optimizations of root(...)*root(...). */
9808 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9810 tree rootfn, arg, arglist;
9811 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9812 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9814 /* Optimize sqrt(x)*sqrt(x) as x. */
9815 if (BUILTIN_SQRT_P (fcode0)
9816 && operand_equal_p (arg00, arg10, 0)
9817 && ! HONOR_SNANS (TYPE_MODE (type)))
9818 return arg00;
9820 /* Optimize root(x)*root(y) as root(x*y). */
9821 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9822 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9823 arglist = build_tree_list (NULL_TREE, arg);
9824 return build_function_call_expr (rootfn, arglist);
9827 /* Optimize expN(x)*expN(y) as expN(x+y). */
9828 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9830 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9831 tree arg = fold_build2 (PLUS_EXPR, type,
9832 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9833 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9834 tree arglist = build_tree_list (NULL_TREE, arg);
9835 return build_function_call_expr (expfn, arglist);
9838 /* Optimizations of pow(...)*pow(...). */
9839 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9840 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9841 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9843 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9844 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9845 1)));
9846 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9847 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9848 1)));
9850 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9851 if (operand_equal_p (arg01, arg11, 0))
9853 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9854 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9855 tree arglist = tree_cons (NULL_TREE, arg,
9856 build_tree_list (NULL_TREE,
9857 arg01));
9858 return build_function_call_expr (powfn, arglist);
9861 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9862 if (operand_equal_p (arg00, arg10, 0))
9864 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9865 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9866 tree arglist = tree_cons (NULL_TREE, arg00,
9867 build_tree_list (NULL_TREE,
9868 arg));
9869 return build_function_call_expr (powfn, arglist);
9873 /* Optimize tan(x)*cos(x) as sin(x). */
9874 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9875 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9876 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9877 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9878 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9879 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9880 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9881 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9883 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9885 if (sinfn != NULL_TREE)
9886 return build_function_call_expr (sinfn,
9887 TREE_OPERAND (arg0, 1));
9890 /* Optimize x*pow(x,c) as pow(x,c+1). */
9891 if (fcode1 == BUILT_IN_POW
9892 || fcode1 == BUILT_IN_POWF
9893 || fcode1 == BUILT_IN_POWL)
9895 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9896 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9897 1)));
9898 if (TREE_CODE (arg11) == REAL_CST
9899 && !TREE_OVERFLOW (arg11)
9900 && operand_equal_p (arg0, arg10, 0))
9902 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9903 REAL_VALUE_TYPE c;
9904 tree arg, arglist;
9906 c = TREE_REAL_CST (arg11);
9907 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9908 arg = build_real (type, c);
9909 arglist = build_tree_list (NULL_TREE, arg);
9910 arglist = tree_cons (NULL_TREE, arg0, arglist);
9911 return build_function_call_expr (powfn, arglist);
9915 /* Optimize pow(x,c)*x as pow(x,c+1). */
9916 if (fcode0 == BUILT_IN_POW
9917 || fcode0 == BUILT_IN_POWF
9918 || fcode0 == BUILT_IN_POWL)
9920 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9921 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9922 1)));
9923 if (TREE_CODE (arg01) == REAL_CST
9924 && !TREE_OVERFLOW (arg01)
9925 && operand_equal_p (arg1, arg00, 0))
9927 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9928 REAL_VALUE_TYPE c;
9929 tree arg, arglist;
9931 c = TREE_REAL_CST (arg01);
9932 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9933 arg = build_real (type, c);
9934 arglist = build_tree_list (NULL_TREE, arg);
9935 arglist = tree_cons (NULL_TREE, arg1, arglist);
9936 return build_function_call_expr (powfn, arglist);
9940 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9941 if (! optimize_size
9942 && operand_equal_p (arg0, arg1, 0))
9944 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9946 if (powfn)
9948 tree arg = build_real (type, dconst2);
9949 tree arglist = build_tree_list (NULL_TREE, arg);
9950 arglist = tree_cons (NULL_TREE, arg0, arglist);
9951 return build_function_call_expr (powfn, arglist);
9956 goto associate;
9958 case BIT_IOR_EXPR:
9959 bit_ior:
9960 if (integer_all_onesp (arg1))
9961 return omit_one_operand (type, arg1, arg0);
9962 if (integer_zerop (arg1))
9963 return non_lvalue (fold_convert (type, arg0));
9964 if (operand_equal_p (arg0, arg1, 0))
9965 return non_lvalue (fold_convert (type, arg0));
9967 /* ~X | X is -1. */
9968 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9969 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9971 t1 = build_int_cst_type (type, -1);
9972 return omit_one_operand (type, t1, arg1);
9975 /* X | ~X is -1. */
9976 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9977 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9979 t1 = build_int_cst_type (type, -1);
9980 return omit_one_operand (type, t1, arg0);
9983 /* Canonicalize (X & C1) | C2. */
9984 if (TREE_CODE (arg0) == BIT_AND_EXPR
9985 && TREE_CODE (arg1) == INTEGER_CST
9986 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9988 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9989 int width = TYPE_PRECISION (type);
9990 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9991 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9992 hi2 = TREE_INT_CST_HIGH (arg1);
9993 lo2 = TREE_INT_CST_LOW (arg1);
9995 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9996 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9997 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9999 if (width > HOST_BITS_PER_WIDE_INT)
10001 mhi = (unsigned HOST_WIDE_INT) -1
10002 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10003 mlo = -1;
10005 else
10007 mhi = 0;
10008 mlo = (unsigned HOST_WIDE_INT) -1
10009 >> (HOST_BITS_PER_WIDE_INT - width);
10012 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10013 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10014 return fold_build2 (BIT_IOR_EXPR, type,
10015 TREE_OPERAND (arg0, 0), arg1);
10017 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
10018 hi1 &= mhi;
10019 lo1 &= mlo;
10020 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
10021 return fold_build2 (BIT_IOR_EXPR, type,
10022 fold_build2 (BIT_AND_EXPR, type,
10023 TREE_OPERAND (arg0, 0),
10024 build_int_cst_wide (type,
10025 lo1 & ~lo2,
10026 hi1 & ~hi2)),
10027 arg1);
10030 /* (X & Y) | Y is (X, Y). */
10031 if (TREE_CODE (arg0) == BIT_AND_EXPR
10032 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10033 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10034 /* (X & Y) | X is (Y, X). */
10035 if (TREE_CODE (arg0) == BIT_AND_EXPR
10036 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10037 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10038 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10039 /* X | (X & Y) is (Y, X). */
10040 if (TREE_CODE (arg1) == BIT_AND_EXPR
10041 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10042 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10043 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10044 /* X | (Y & X) is (Y, X). */
10045 if (TREE_CODE (arg1) == BIT_AND_EXPR
10046 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10047 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10048 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10050 t1 = distribute_bit_expr (code, type, arg0, arg1);
10051 if (t1 != NULL_TREE)
10052 return t1;
10054 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10056 This results in more efficient code for machines without a NAND
10057 instruction. Combine will canonicalize to the first form
10058 which will allow use of NAND instructions provided by the
10059 backend if they exist. */
10060 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10061 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10063 return fold_build1 (BIT_NOT_EXPR, type,
10064 build2 (BIT_AND_EXPR, type,
10065 TREE_OPERAND (arg0, 0),
10066 TREE_OPERAND (arg1, 0)));
10069 /* See if this can be simplified into a rotate first. If that
10070 is unsuccessful continue in the association code. */
10071 goto bit_rotate;
10073 case BIT_XOR_EXPR:
10074 if (integer_zerop (arg1))
10075 return non_lvalue (fold_convert (type, arg0));
10076 if (integer_all_onesp (arg1))
10077 return fold_build1 (BIT_NOT_EXPR, type, arg0);
10078 if (operand_equal_p (arg0, arg1, 0))
10079 return omit_one_operand (type, integer_zero_node, arg0);
10081 /* ~X ^ X is -1. */
10082 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10083 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10085 t1 = build_int_cst_type (type, -1);
10086 return omit_one_operand (type, t1, arg1);
10089 /* X ^ ~X is -1. */
10090 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10091 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10093 t1 = build_int_cst_type (type, -1);
10094 return omit_one_operand (type, t1, arg0);
10097 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10098 with a constant, and the two constants have no bits in common,
10099 we should treat this as a BIT_IOR_EXPR since this may produce more
10100 simplifications. */
10101 if (TREE_CODE (arg0) == BIT_AND_EXPR
10102 && TREE_CODE (arg1) == BIT_AND_EXPR
10103 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10104 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10105 && integer_zerop (const_binop (BIT_AND_EXPR,
10106 TREE_OPERAND (arg0, 1),
10107 TREE_OPERAND (arg1, 1), 0)))
10109 code = BIT_IOR_EXPR;
10110 goto bit_ior;
10113 /* (X | Y) ^ X -> Y & ~ X*/
10114 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10115 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10117 tree t2 = TREE_OPERAND (arg0, 1);
10118 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10119 arg1);
10120 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10121 fold_convert (type, t1));
10122 return t1;
10125 /* (Y | X) ^ X -> Y & ~ X*/
10126 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10127 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10129 tree t2 = TREE_OPERAND (arg0, 0);
10130 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10131 arg1);
10132 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10133 fold_convert (type, t1));
10134 return t1;
10137 /* X ^ (X | Y) -> Y & ~ X*/
10138 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10139 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10141 tree t2 = TREE_OPERAND (arg1, 1);
10142 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10143 arg0);
10144 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10145 fold_convert (type, t1));
10146 return t1;
10149 /* X ^ (Y | X) -> Y & ~ X*/
10150 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10151 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10153 tree t2 = TREE_OPERAND (arg1, 0);
10154 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10155 arg0);
10156 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10157 fold_convert (type, t1));
10158 return t1;
10161 /* Convert ~X ^ ~Y to X ^ Y. */
10162 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10163 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10164 return fold_build2 (code, type,
10165 fold_convert (type, TREE_OPERAND (arg0, 0)),
10166 fold_convert (type, TREE_OPERAND (arg1, 0)));
10168 /* Convert ~X ^ C to X ^ ~C. */
10169 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10170 && TREE_CODE (arg1) == INTEGER_CST)
10171 return fold_build2 (code, type,
10172 fold_convert (type, TREE_OPERAND (arg0, 0)),
10173 fold_build1 (BIT_NOT_EXPR, type, arg1));
10175 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10176 if (TREE_CODE (arg0) == BIT_AND_EXPR
10177 && integer_onep (TREE_OPERAND (arg0, 1))
10178 && integer_onep (arg1))
10179 return fold_build2 (EQ_EXPR, type, arg0,
10180 build_int_cst (TREE_TYPE (arg0), 0));
10182 /* Fold (X & Y) ^ Y as ~X & Y. */
10183 if (TREE_CODE (arg0) == BIT_AND_EXPR
10184 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10186 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10187 return fold_build2 (BIT_AND_EXPR, type,
10188 fold_build1 (BIT_NOT_EXPR, type, tem),
10189 fold_convert (type, arg1));
10191 /* Fold (X & Y) ^ X as ~Y & X. */
10192 if (TREE_CODE (arg0) == BIT_AND_EXPR
10193 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10194 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10196 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10197 return fold_build2 (BIT_AND_EXPR, type,
10198 fold_build1 (BIT_NOT_EXPR, type, tem),
10199 fold_convert (type, arg1));
10201 /* Fold X ^ (X & Y) as X & ~Y. */
10202 if (TREE_CODE (arg1) == BIT_AND_EXPR
10203 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10205 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10206 return fold_build2 (BIT_AND_EXPR, type,
10207 fold_convert (type, arg0),
10208 fold_build1 (BIT_NOT_EXPR, type, tem));
10210 /* Fold X ^ (Y & X) as ~Y & X. */
10211 if (TREE_CODE (arg1) == BIT_AND_EXPR
10212 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10213 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10215 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10216 return fold_build2 (BIT_AND_EXPR, type,
10217 fold_build1 (BIT_NOT_EXPR, type, tem),
10218 fold_convert (type, arg0));
10221 /* See if this can be simplified into a rotate first. If that
10222 is unsuccessful continue in the association code. */
10223 goto bit_rotate;
10225 case BIT_AND_EXPR:
10226 if (integer_all_onesp (arg1))
10227 return non_lvalue (fold_convert (type, arg0));
10228 if (integer_zerop (arg1))
10229 return omit_one_operand (type, arg1, arg0);
10230 if (operand_equal_p (arg0, arg1, 0))
10231 return non_lvalue (fold_convert (type, arg0));
10233 /* ~X & X is always zero. */
10234 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10235 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10236 return omit_one_operand (type, integer_zero_node, arg1);
10238 /* X & ~X is always zero. */
10239 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10240 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10241 return omit_one_operand (type, integer_zero_node, arg0);
10243 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10244 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10245 && TREE_CODE (arg1) == INTEGER_CST
10246 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10247 return fold_build2 (BIT_IOR_EXPR, type,
10248 fold_build2 (BIT_AND_EXPR, type,
10249 TREE_OPERAND (arg0, 0), arg1),
10250 fold_build2 (BIT_AND_EXPR, type,
10251 TREE_OPERAND (arg0, 1), arg1));
10253 /* (X | Y) & Y is (X, Y). */
10254 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10255 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10256 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10257 /* (X | Y) & X is (Y, X). */
10258 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10259 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10260 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10261 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10262 /* X & (X | Y) is (Y, X). */
10263 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10264 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10265 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10266 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10267 /* X & (Y | X) is (Y, X). */
10268 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10269 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10270 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10271 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10273 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10274 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10275 && integer_onep (TREE_OPERAND (arg0, 1))
10276 && integer_onep (arg1))
10278 tem = TREE_OPERAND (arg0, 0);
10279 return fold_build2 (EQ_EXPR, type,
10280 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10281 build_int_cst (TREE_TYPE (tem), 1)),
10282 build_int_cst (TREE_TYPE (tem), 0));
10284 /* Fold ~X & 1 as (X & 1) == 0. */
10285 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10286 && integer_onep (arg1))
10288 tem = TREE_OPERAND (arg0, 0);
10289 return fold_build2 (EQ_EXPR, type,
10290 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10291 build_int_cst (TREE_TYPE (tem), 1)),
10292 build_int_cst (TREE_TYPE (tem), 0));
10295 /* Fold (X ^ Y) & Y as ~X & Y. */
10296 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10297 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10299 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10300 return fold_build2 (BIT_AND_EXPR, type,
10301 fold_build1 (BIT_NOT_EXPR, type, tem),
10302 fold_convert (type, arg1));
10304 /* Fold (X ^ Y) & X as ~Y & X. */
10305 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10306 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10307 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10309 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10310 return fold_build2 (BIT_AND_EXPR, type,
10311 fold_build1 (BIT_NOT_EXPR, type, tem),
10312 fold_convert (type, arg1));
10314 /* Fold X & (X ^ Y) as X & ~Y. */
10315 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10316 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10318 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10319 return fold_build2 (BIT_AND_EXPR, type,
10320 fold_convert (type, arg0),
10321 fold_build1 (BIT_NOT_EXPR, type, tem));
10323 /* Fold X & (Y ^ X) as ~Y & X. */
10324 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10325 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10326 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10328 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10329 return fold_build2 (BIT_AND_EXPR, type,
10330 fold_build1 (BIT_NOT_EXPR, type, tem),
10331 fold_convert (type, arg0));
10334 t1 = distribute_bit_expr (code, type, arg0, arg1);
10335 if (t1 != NULL_TREE)
10336 return t1;
10337 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10338 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10339 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10341 unsigned int prec
10342 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10344 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10345 && (~TREE_INT_CST_LOW (arg1)
10346 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10347 return fold_convert (type, TREE_OPERAND (arg0, 0));
10350 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10352 This results in more efficient code for machines without a NOR
10353 instruction. Combine will canonicalize to the first form
10354 which will allow use of NOR instructions provided by the
10355 backend if they exist. */
10356 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10357 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10359 return fold_build1 (BIT_NOT_EXPR, type,
10360 build2 (BIT_IOR_EXPR, type,
10361 TREE_OPERAND (arg0, 0),
10362 TREE_OPERAND (arg1, 0)));
10365 goto associate;
10367 case RDIV_EXPR:
10368 /* Don't touch a floating-point divide by zero unless the mode
10369 of the constant can represent infinity. */
10370 if (TREE_CODE (arg1) == REAL_CST
10371 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10372 && real_zerop (arg1))
10373 return NULL_TREE;
10375 /* Optimize A / A to 1.0 if we don't care about
10376 NaNs or Infinities. Skip the transformation
10377 for non-real operands. */
10378 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10379 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10380 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10381 && operand_equal_p (arg0, arg1, 0))
10383 tree r = build_real (TREE_TYPE (arg0), dconst1);
10385 return omit_two_operands (type, r, arg0, arg1);
10388 /* The complex version of the above A / A optimization. */
10389 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10390 && operand_equal_p (arg0, arg1, 0))
10392 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10393 if (! HONOR_NANS (TYPE_MODE (elem_type))
10394 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10396 tree r = build_real (elem_type, dconst1);
10397 /* omit_two_operands will call fold_convert for us. */
10398 return omit_two_operands (type, r, arg0, arg1);
10402 /* (-A) / (-B) -> A / B */
10403 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10404 return fold_build2 (RDIV_EXPR, type,
10405 TREE_OPERAND (arg0, 0),
10406 negate_expr (arg1));
10407 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10408 return fold_build2 (RDIV_EXPR, type,
10409 negate_expr (arg0),
10410 TREE_OPERAND (arg1, 0));
10412 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10413 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10414 && real_onep (arg1))
10415 return non_lvalue (fold_convert (type, arg0));
10417 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10418 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10419 && real_minus_onep (arg1))
10420 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10422 /* If ARG1 is a constant, we can convert this to a multiply by the
10423 reciprocal. This does not have the same rounding properties,
10424 so only do this if -funsafe-math-optimizations. We can actually
10425 always safely do it if ARG1 is a power of two, but it's hard to
10426 tell if it is or not in a portable manner. */
10427 if (TREE_CODE (arg1) == REAL_CST)
10429 if (flag_unsafe_math_optimizations
10430 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10431 arg1, 0)))
10432 return fold_build2 (MULT_EXPR, type, arg0, tem);
10433 /* Find the reciprocal if optimizing and the result is exact. */
10434 if (optimize)
10436 REAL_VALUE_TYPE r;
10437 r = TREE_REAL_CST (arg1);
10438 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10440 tem = build_real (type, r);
10441 return fold_build2 (MULT_EXPR, type,
10442 fold_convert (type, arg0), tem);
10446 /* Convert A/B/C to A/(B*C). */
10447 if (flag_unsafe_math_optimizations
10448 && TREE_CODE (arg0) == RDIV_EXPR)
10449 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10450 fold_build2 (MULT_EXPR, type,
10451 TREE_OPERAND (arg0, 1), arg1));
10453 /* Convert A/(B/C) to (A/B)*C. */
10454 if (flag_unsafe_math_optimizations
10455 && TREE_CODE (arg1) == RDIV_EXPR)
10456 return fold_build2 (MULT_EXPR, type,
10457 fold_build2 (RDIV_EXPR, type, arg0,
10458 TREE_OPERAND (arg1, 0)),
10459 TREE_OPERAND (arg1, 1));
10461 /* Convert C1/(X*C2) into (C1/C2)/X. */
10462 if (flag_unsafe_math_optimizations
10463 && TREE_CODE (arg1) == MULT_EXPR
10464 && TREE_CODE (arg0) == REAL_CST
10465 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10467 tree tem = const_binop (RDIV_EXPR, arg0,
10468 TREE_OPERAND (arg1, 1), 0);
10469 if (tem)
10470 return fold_build2 (RDIV_EXPR, type, tem,
10471 TREE_OPERAND (arg1, 0));
10474 if (flag_unsafe_math_optimizations)
10476 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10477 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10479 /* Optimize sin(x)/cos(x) as tan(x). */
10480 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10481 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10482 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10483 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10484 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10486 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10488 if (tanfn != NULL_TREE)
10489 return build_function_call_expr (tanfn,
10490 TREE_OPERAND (arg0, 1));
10493 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10494 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10495 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10496 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10497 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10498 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10500 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10502 if (tanfn != NULL_TREE)
10504 tree tmp = TREE_OPERAND (arg0, 1);
10505 tmp = build_function_call_expr (tanfn, tmp);
10506 return fold_build2 (RDIV_EXPR, type,
10507 build_real (type, dconst1), tmp);
10511 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10512 NaNs or Infinities. */
10513 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10514 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10515 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10517 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10518 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10520 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10521 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10522 && operand_equal_p (arg00, arg01, 0))
10524 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10526 if (cosfn != NULL_TREE)
10527 return build_function_call_expr (cosfn,
10528 TREE_OPERAND (arg0, 1));
10532 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10533 NaNs or Infinities. */
10534 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10535 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10536 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10538 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10539 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10541 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10542 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10543 && operand_equal_p (arg00, arg01, 0))
10545 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10547 if (cosfn != NULL_TREE)
10549 tree tmp = TREE_OPERAND (arg0, 1);
10550 tmp = build_function_call_expr (cosfn, tmp);
10551 return fold_build2 (RDIV_EXPR, type,
10552 build_real (type, dconst1),
10553 tmp);
10558 /* Optimize pow(x,c)/x as pow(x,c-1). */
10559 if (fcode0 == BUILT_IN_POW
10560 || fcode0 == BUILT_IN_POWF
10561 || fcode0 == BUILT_IN_POWL)
10563 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10564 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
10565 if (TREE_CODE (arg01) == REAL_CST
10566 && !TREE_OVERFLOW (arg01)
10567 && operand_equal_p (arg1, arg00, 0))
10569 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10570 REAL_VALUE_TYPE c;
10571 tree arg, arglist;
10573 c = TREE_REAL_CST (arg01);
10574 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10575 arg = build_real (type, c);
10576 arglist = build_tree_list (NULL_TREE, arg);
10577 arglist = tree_cons (NULL_TREE, arg1, arglist);
10578 return build_function_call_expr (powfn, arglist);
10582 /* Optimize x/expN(y) into x*expN(-y). */
10583 if (BUILTIN_EXPONENT_P (fcode1))
10585 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10586 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
10587 tree arglist = build_tree_list (NULL_TREE,
10588 fold_convert (type, arg));
10589 arg1 = build_function_call_expr (expfn, arglist);
10590 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10593 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10594 if (fcode1 == BUILT_IN_POW
10595 || fcode1 == BUILT_IN_POWF
10596 || fcode1 == BUILT_IN_POWL)
10598 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10599 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10600 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
10601 tree neg11 = fold_convert (type, negate_expr (arg11));
10602 tree arglist = tree_cons (NULL_TREE, arg10,
10603 build_tree_list (NULL_TREE, neg11));
10604 arg1 = build_function_call_expr (powfn, arglist);
10605 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10608 return NULL_TREE;
10610 case TRUNC_DIV_EXPR:
10611 case FLOOR_DIV_EXPR:
10612 /* Simplify A / (B << N) where A and B are positive and B is
10613 a power of 2, to A >> (N + log2(B)). */
10614 strict_overflow_p = false;
10615 if (TREE_CODE (arg1) == LSHIFT_EXPR
10616 && (TYPE_UNSIGNED (type)
10617 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10619 tree sval = TREE_OPERAND (arg1, 0);
10620 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10622 tree sh_cnt = TREE_OPERAND (arg1, 1);
10623 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10625 if (strict_overflow_p)
10626 fold_overflow_warning (("assuming signed overflow does not "
10627 "occur when simplifying A / (B << N)"),
10628 WARN_STRICT_OVERFLOW_MISC);
10630 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10631 sh_cnt, build_int_cst (NULL_TREE, pow2));
10632 return fold_build2 (RSHIFT_EXPR, type,
10633 fold_convert (type, arg0), sh_cnt);
10636 /* Fall thru */
10638 case ROUND_DIV_EXPR:
10639 case CEIL_DIV_EXPR:
10640 case EXACT_DIV_EXPR:
10641 if (integer_onep (arg1))
10642 return non_lvalue (fold_convert (type, arg0));
10643 if (integer_zerop (arg1))
10644 return NULL_TREE;
10645 /* X / -1 is -X. */
10646 if (!TYPE_UNSIGNED (type)
10647 && TREE_CODE (arg1) == INTEGER_CST
10648 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10649 && TREE_INT_CST_HIGH (arg1) == -1)
10650 return fold_convert (type, negate_expr (arg0));
10652 /* Convert -A / -B to A / B when the type is signed and overflow is
10653 undefined. */
10654 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10655 && TREE_CODE (arg0) == NEGATE_EXPR
10656 && negate_expr_p (arg1))
10658 if (INTEGRAL_TYPE_P (type))
10659 fold_overflow_warning (("assuming signed overflow does not occur "
10660 "when distributing negation across "
10661 "division"),
10662 WARN_STRICT_OVERFLOW_MISC);
10663 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10664 negate_expr (arg1));
10666 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10667 && TREE_CODE (arg1) == NEGATE_EXPR
10668 && negate_expr_p (arg0))
10670 if (INTEGRAL_TYPE_P (type))
10671 fold_overflow_warning (("assuming signed overflow does not occur "
10672 "when distributing negation across "
10673 "division"),
10674 WARN_STRICT_OVERFLOW_MISC);
10675 return fold_build2 (code, type, negate_expr (arg0),
10676 TREE_OPERAND (arg1, 0));
10679 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10680 operation, EXACT_DIV_EXPR.
10682 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10683 At one time others generated faster code, it's not clear if they do
10684 after the last round to changes to the DIV code in expmed.c. */
10685 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10686 && multiple_of_p (type, arg0, arg1))
10687 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10689 strict_overflow_p = false;
10690 if (TREE_CODE (arg1) == INTEGER_CST
10691 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10692 &strict_overflow_p)))
10694 if (strict_overflow_p)
10695 fold_overflow_warning (("assuming signed overflow does not occur "
10696 "when simplifying division"),
10697 WARN_STRICT_OVERFLOW_MISC);
10698 return fold_convert (type, tem);
10701 return NULL_TREE;
10703 case CEIL_MOD_EXPR:
10704 case FLOOR_MOD_EXPR:
10705 case ROUND_MOD_EXPR:
10706 case TRUNC_MOD_EXPR:
10707 /* X % 1 is always zero, but be sure to preserve any side
10708 effects in X. */
10709 if (integer_onep (arg1))
10710 return omit_one_operand (type, integer_zero_node, arg0);
10712 /* X % 0, return X % 0 unchanged so that we can get the
10713 proper warnings and errors. */
10714 if (integer_zerop (arg1))
10715 return NULL_TREE;
10717 /* 0 % X is always zero, but be sure to preserve any side
10718 effects in X. Place this after checking for X == 0. */
10719 if (integer_zerop (arg0))
10720 return omit_one_operand (type, integer_zero_node, arg1);
10722 /* X % -1 is zero. */
10723 if (!TYPE_UNSIGNED (type)
10724 && TREE_CODE (arg1) == INTEGER_CST
10725 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10726 && TREE_INT_CST_HIGH (arg1) == -1)
10727 return omit_one_operand (type, integer_zero_node, arg0);
10729 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10730 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10731 strict_overflow_p = false;
10732 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10733 && (TYPE_UNSIGNED (type)
10734 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10736 tree c = arg1;
10737 /* Also optimize A % (C << N) where C is a power of 2,
10738 to A & ((C << N) - 1). */
10739 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10740 c = TREE_OPERAND (arg1, 0);
10742 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10744 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10745 build_int_cst (TREE_TYPE (arg1), 1));
10746 if (strict_overflow_p)
10747 fold_overflow_warning (("assuming signed overflow does not "
10748 "occur when simplifying "
10749 "X % (power of two)"),
10750 WARN_STRICT_OVERFLOW_MISC);
10751 return fold_build2 (BIT_AND_EXPR, type,
10752 fold_convert (type, arg0),
10753 fold_convert (type, mask));
10757 /* X % -C is the same as X % C. */
10758 if (code == TRUNC_MOD_EXPR
10759 && !TYPE_UNSIGNED (type)
10760 && TREE_CODE (arg1) == INTEGER_CST
10761 && !TREE_OVERFLOW (arg1)
10762 && TREE_INT_CST_HIGH (arg1) < 0
10763 && !TYPE_OVERFLOW_TRAPS (type)
10764 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10765 && !sign_bit_p (arg1, arg1))
10766 return fold_build2 (code, type, fold_convert (type, arg0),
10767 fold_convert (type, negate_expr (arg1)));
10769 /* X % -Y is the same as X % Y. */
10770 if (code == TRUNC_MOD_EXPR
10771 && !TYPE_UNSIGNED (type)
10772 && TREE_CODE (arg1) == NEGATE_EXPR
10773 && !TYPE_OVERFLOW_TRAPS (type))
10774 return fold_build2 (code, type, fold_convert (type, arg0),
10775 fold_convert (type, TREE_OPERAND (arg1, 0)));
10777 if (TREE_CODE (arg1) == INTEGER_CST
10778 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10779 &strict_overflow_p)))
10781 if (strict_overflow_p)
10782 fold_overflow_warning (("assuming signed overflow does not occur "
10783 "when simplifying modulos"),
10784 WARN_STRICT_OVERFLOW_MISC);
10785 return fold_convert (type, tem);
10788 return NULL_TREE;
10790 case LROTATE_EXPR:
10791 case RROTATE_EXPR:
10792 if (integer_all_onesp (arg0))
10793 return omit_one_operand (type, arg0, arg1);
10794 goto shift;
10796 case RSHIFT_EXPR:
10797 /* Optimize -1 >> x for arithmetic right shifts. */
10798 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10799 return omit_one_operand (type, arg0, arg1);
10800 /* ... fall through ... */
10802 case LSHIFT_EXPR:
10803 shift:
10804 if (integer_zerop (arg1))
10805 return non_lvalue (fold_convert (type, arg0));
10806 if (integer_zerop (arg0))
10807 return omit_one_operand (type, arg0, arg1);
10809 /* Since negative shift count is not well-defined,
10810 don't try to compute it in the compiler. */
10811 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10812 return NULL_TREE;
10814 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10815 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10816 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10817 && host_integerp (TREE_OPERAND (arg0, 1), false)
10818 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10820 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10821 + TREE_INT_CST_LOW (arg1));
10823 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10824 being well defined. */
10825 if (low >= TYPE_PRECISION (type))
10827 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10828 low = low % TYPE_PRECISION (type);
10829 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10830 return build_int_cst (type, 0);
10831 else
10832 low = TYPE_PRECISION (type) - 1;
10835 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10836 build_int_cst (type, low));
10839 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10840 into x & ((unsigned)-1 >> c) for unsigned types. */
10841 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10842 || (TYPE_UNSIGNED (type)
10843 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10844 && host_integerp (arg1, false)
10845 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10846 && host_integerp (TREE_OPERAND (arg0, 1), false)
10847 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10849 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10850 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10851 tree lshift;
10852 tree arg00;
10854 if (low0 == low1)
10856 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10858 lshift = build_int_cst (type, -1);
10859 lshift = int_const_binop (code, lshift, arg1, 0);
10861 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10865 /* Rewrite an LROTATE_EXPR by a constant into an
10866 RROTATE_EXPR by a new constant. */
10867 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10869 tree tem = build_int_cst (TREE_TYPE (arg1),
10870 GET_MODE_BITSIZE (TYPE_MODE (type)));
10871 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10872 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10875 /* If we have a rotate of a bit operation with the rotate count and
10876 the second operand of the bit operation both constant,
10877 permute the two operations. */
10878 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10879 && (TREE_CODE (arg0) == BIT_AND_EXPR
10880 || TREE_CODE (arg0) == BIT_IOR_EXPR
10881 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10882 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10883 return fold_build2 (TREE_CODE (arg0), type,
10884 fold_build2 (code, type,
10885 TREE_OPERAND (arg0, 0), arg1),
10886 fold_build2 (code, type,
10887 TREE_OPERAND (arg0, 1), arg1));
10889 /* Two consecutive rotates adding up to the width of the mode can
10890 be ignored. */
10891 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10892 && TREE_CODE (arg0) == RROTATE_EXPR
10893 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10894 && TREE_INT_CST_HIGH (arg1) == 0
10895 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10896 && ((TREE_INT_CST_LOW (arg1)
10897 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10898 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10899 return TREE_OPERAND (arg0, 0);
10901 return NULL_TREE;
10903 case MIN_EXPR:
10904 if (operand_equal_p (arg0, arg1, 0))
10905 return omit_one_operand (type, arg0, arg1);
10906 if (INTEGRAL_TYPE_P (type)
10907 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10908 return omit_one_operand (type, arg1, arg0);
10909 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10910 if (tem)
10911 return tem;
10912 goto associate;
10914 case MAX_EXPR:
10915 if (operand_equal_p (arg0, arg1, 0))
10916 return omit_one_operand (type, arg0, arg1);
10917 if (INTEGRAL_TYPE_P (type)
10918 && TYPE_MAX_VALUE (type)
10919 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10920 return omit_one_operand (type, arg1, arg0);
10921 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10922 if (tem)
10923 return tem;
10924 goto associate;
10926 case TRUTH_ANDIF_EXPR:
10927 /* Note that the operands of this must be ints
10928 and their values must be 0 or 1.
10929 ("true" is a fixed value perhaps depending on the language.) */
10930 /* If first arg is constant zero, return it. */
10931 if (integer_zerop (arg0))
10932 return fold_convert (type, arg0);
10933 case TRUTH_AND_EXPR:
10934 /* If either arg is constant true, drop it. */
10935 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10936 return non_lvalue (fold_convert (type, arg1));
10937 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10938 /* Preserve sequence points. */
10939 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10940 return non_lvalue (fold_convert (type, arg0));
10941 /* If second arg is constant zero, result is zero, but first arg
10942 must be evaluated. */
10943 if (integer_zerop (arg1))
10944 return omit_one_operand (type, arg1, arg0);
10945 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10946 case will be handled here. */
10947 if (integer_zerop (arg0))
10948 return omit_one_operand (type, arg0, arg1);
10950 /* !X && X is always false. */
10951 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10952 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10953 return omit_one_operand (type, integer_zero_node, arg1);
10954 /* X && !X is always false. */
10955 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10956 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10957 return omit_one_operand (type, integer_zero_node, arg0);
10959 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10960 means A >= Y && A != MAX, but in this case we know that
10961 A < X <= MAX. */
10963 if (!TREE_SIDE_EFFECTS (arg0)
10964 && !TREE_SIDE_EFFECTS (arg1))
10966 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10967 if (tem && !operand_equal_p (tem, arg0, 0))
10968 return fold_build2 (code, type, tem, arg1);
10970 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10971 if (tem && !operand_equal_p (tem, arg1, 0))
10972 return fold_build2 (code, type, arg0, tem);
10975 truth_andor:
10976 /* We only do these simplifications if we are optimizing. */
10977 if (!optimize)
10978 return NULL_TREE;
10980 /* Check for things like (A || B) && (A || C). We can convert this
10981 to A || (B && C). Note that either operator can be any of the four
10982 truth and/or operations and the transformation will still be
10983 valid. Also note that we only care about order for the
10984 ANDIF and ORIF operators. If B contains side effects, this
10985 might change the truth-value of A. */
10986 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10987 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10988 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10989 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10990 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10991 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10993 tree a00 = TREE_OPERAND (arg0, 0);
10994 tree a01 = TREE_OPERAND (arg0, 1);
10995 tree a10 = TREE_OPERAND (arg1, 0);
10996 tree a11 = TREE_OPERAND (arg1, 1);
10997 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10998 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10999 && (code == TRUTH_AND_EXPR
11000 || code == TRUTH_OR_EXPR));
11002 if (operand_equal_p (a00, a10, 0))
11003 return fold_build2 (TREE_CODE (arg0), type, a00,
11004 fold_build2 (code, type, a01, a11));
11005 else if (commutative && operand_equal_p (a00, a11, 0))
11006 return fold_build2 (TREE_CODE (arg0), type, a00,
11007 fold_build2 (code, type, a01, a10));
11008 else if (commutative && operand_equal_p (a01, a10, 0))
11009 return fold_build2 (TREE_CODE (arg0), type, a01,
11010 fold_build2 (code, type, a00, a11));
11012 /* This case if tricky because we must either have commutative
11013 operators or else A10 must not have side-effects. */
11015 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11016 && operand_equal_p (a01, a11, 0))
11017 return fold_build2 (TREE_CODE (arg0), type,
11018 fold_build2 (code, type, a00, a10),
11019 a01);
11022 /* See if we can build a range comparison. */
11023 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11024 return tem;
11026 /* Check for the possibility of merging component references. If our
11027 lhs is another similar operation, try to merge its rhs with our
11028 rhs. Then try to merge our lhs and rhs. */
11029 if (TREE_CODE (arg0) == code
11030 && 0 != (tem = fold_truthop (code, type,
11031 TREE_OPERAND (arg0, 1), arg1)))
11032 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11034 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11035 return tem;
11037 return NULL_TREE;
11039 case TRUTH_ORIF_EXPR:
11040 /* Note that the operands of this must be ints
11041 and their values must be 0 or true.
11042 ("true" is a fixed value perhaps depending on the language.) */
11043 /* If first arg is constant true, return it. */
11044 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11045 return fold_convert (type, arg0);
11046 case TRUTH_OR_EXPR:
11047 /* If either arg is constant zero, drop it. */
11048 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11049 return non_lvalue (fold_convert (type, arg1));
11050 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11051 /* Preserve sequence points. */
11052 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11053 return non_lvalue (fold_convert (type, arg0));
11054 /* If second arg is constant true, result is true, but we must
11055 evaluate first arg. */
11056 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11057 return omit_one_operand (type, arg1, arg0);
11058 /* Likewise for first arg, but note this only occurs here for
11059 TRUTH_OR_EXPR. */
11060 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11061 return omit_one_operand (type, arg0, arg1);
11063 /* !X || X is always true. */
11064 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11065 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11066 return omit_one_operand (type, integer_one_node, arg1);
11067 /* X || !X is always true. */
11068 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11069 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11070 return omit_one_operand (type, integer_one_node, arg0);
11072 goto truth_andor;
11074 case TRUTH_XOR_EXPR:
11075 /* If the second arg is constant zero, drop it. */
11076 if (integer_zerop (arg1))
11077 return non_lvalue (fold_convert (type, arg0));
11078 /* If the second arg is constant true, this is a logical inversion. */
11079 if (integer_onep (arg1))
11081 /* Only call invert_truthvalue if operand is a truth value. */
11082 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11083 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11084 else
11085 tem = invert_truthvalue (arg0);
11086 return non_lvalue (fold_convert (type, tem));
11088 /* Identical arguments cancel to zero. */
11089 if (operand_equal_p (arg0, arg1, 0))
11090 return omit_one_operand (type, integer_zero_node, arg0);
11092 /* !X ^ X is always true. */
11093 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11094 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11095 return omit_one_operand (type, integer_one_node, arg1);
11097 /* X ^ !X is always true. */
11098 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11099 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11100 return omit_one_operand (type, integer_one_node, arg0);
11102 return NULL_TREE;
11104 case EQ_EXPR:
11105 case NE_EXPR:
11106 tem = fold_comparison (code, type, op0, op1);
11107 if (tem != NULL_TREE)
11108 return tem;
11110 /* bool_var != 0 becomes bool_var. */
11111 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11112 && code == NE_EXPR)
11113 return non_lvalue (fold_convert (type, arg0));
11115 /* bool_var == 1 becomes bool_var. */
11116 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11117 && code == EQ_EXPR)
11118 return non_lvalue (fold_convert (type, arg0));
11120 /* bool_var != 1 becomes !bool_var. */
11121 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11122 && code == NE_EXPR)
11123 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11125 /* bool_var == 0 becomes !bool_var. */
11126 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11127 && code == EQ_EXPR)
11128 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11130 /* If this is an equality comparison of the address of a non-weak
11131 object against zero, then we know the result. */
11132 if (TREE_CODE (arg0) == ADDR_EXPR
11133 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11134 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11135 && integer_zerop (arg1))
11136 return constant_boolean_node (code != EQ_EXPR, type);
11138 /* If this is an equality comparison of the address of two non-weak,
11139 unaliased symbols neither of which are extern (since we do not
11140 have access to attributes for externs), then we know the result. */
11141 if (TREE_CODE (arg0) == ADDR_EXPR
11142 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11143 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11144 && ! lookup_attribute ("alias",
11145 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11146 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11147 && TREE_CODE (arg1) == ADDR_EXPR
11148 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11149 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11150 && ! lookup_attribute ("alias",
11151 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11152 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11154 /* We know that we're looking at the address of two
11155 non-weak, unaliased, static _DECL nodes.
11157 It is both wasteful and incorrect to call operand_equal_p
11158 to compare the two ADDR_EXPR nodes. It is wasteful in that
11159 all we need to do is test pointer equality for the arguments
11160 to the two ADDR_EXPR nodes. It is incorrect to use
11161 operand_equal_p as that function is NOT equivalent to a
11162 C equality test. It can in fact return false for two
11163 objects which would test as equal using the C equality
11164 operator. */
11165 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11166 return constant_boolean_node (equal
11167 ? code == EQ_EXPR : code != EQ_EXPR,
11168 type);
11171 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11172 a MINUS_EXPR of a constant, we can convert it into a comparison with
11173 a revised constant as long as no overflow occurs. */
11174 if (TREE_CODE (arg1) == INTEGER_CST
11175 && (TREE_CODE (arg0) == PLUS_EXPR
11176 || TREE_CODE (arg0) == MINUS_EXPR)
11177 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11178 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11179 ? MINUS_EXPR : PLUS_EXPR,
11180 fold_convert (TREE_TYPE (arg0), arg1),
11181 TREE_OPERAND (arg0, 1), 0))
11182 && !TREE_OVERFLOW (tem))
11183 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11185 /* Similarly for a NEGATE_EXPR. */
11186 if (TREE_CODE (arg0) == NEGATE_EXPR
11187 && TREE_CODE (arg1) == INTEGER_CST
11188 && 0 != (tem = negate_expr (arg1))
11189 && TREE_CODE (tem) == INTEGER_CST
11190 && !TREE_OVERFLOW (tem))
11191 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11193 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11194 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11195 && TREE_CODE (arg1) == INTEGER_CST
11196 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11197 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11198 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11199 fold_convert (TREE_TYPE (arg0), arg1),
11200 TREE_OPERAND (arg0, 1)));
11202 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11203 for !=. Don't do this for ordered comparisons due to overflow. */
11204 if (TREE_CODE (arg0) == MINUS_EXPR
11205 && integer_zerop (arg1))
11206 return fold_build2 (code, type,
11207 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11209 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11210 if (TREE_CODE (arg0) == ABS_EXPR
11211 && (integer_zerop (arg1) || real_zerop (arg1)))
11212 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11214 /* If this is an EQ or NE comparison with zero and ARG0 is
11215 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11216 two operations, but the latter can be done in one less insn
11217 on machines that have only two-operand insns or on which a
11218 constant cannot be the first operand. */
11219 if (TREE_CODE (arg0) == BIT_AND_EXPR
11220 && integer_zerop (arg1))
11222 tree arg00 = TREE_OPERAND (arg0, 0);
11223 tree arg01 = TREE_OPERAND (arg0, 1);
11224 if (TREE_CODE (arg00) == LSHIFT_EXPR
11225 && integer_onep (TREE_OPERAND (arg00, 0)))
11226 return
11227 fold_build2 (code, type,
11228 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11229 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11230 arg01, TREE_OPERAND (arg00, 1)),
11231 fold_convert (TREE_TYPE (arg0),
11232 integer_one_node)),
11233 arg1);
11234 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
11235 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
11236 return
11237 fold_build2 (code, type,
11238 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11239 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11240 arg00, TREE_OPERAND (arg01, 1)),
11241 fold_convert (TREE_TYPE (arg0),
11242 integer_one_node)),
11243 arg1);
11246 /* If this is an NE or EQ comparison of zero against the result of a
11247 signed MOD operation whose second operand is a power of 2, make
11248 the MOD operation unsigned since it is simpler and equivalent. */
11249 if (integer_zerop (arg1)
11250 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11251 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11252 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11253 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11254 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11255 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11257 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
11258 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11259 fold_convert (newtype,
11260 TREE_OPERAND (arg0, 0)),
11261 fold_convert (newtype,
11262 TREE_OPERAND (arg0, 1)));
11264 return fold_build2 (code, type, newmod,
11265 fold_convert (newtype, arg1));
11268 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11269 C1 is a valid shift constant, and C2 is a power of two, i.e.
11270 a single bit. */
11271 if (TREE_CODE (arg0) == BIT_AND_EXPR
11272 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11273 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11274 == INTEGER_CST
11275 && integer_pow2p (TREE_OPERAND (arg0, 1))
11276 && integer_zerop (arg1))
11278 tree itype = TREE_TYPE (arg0);
11279 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11280 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11282 /* Check for a valid shift count. */
11283 if (TREE_INT_CST_HIGH (arg001) == 0
11284 && TREE_INT_CST_LOW (arg001) < prec)
11286 tree arg01 = TREE_OPERAND (arg0, 1);
11287 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11288 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11289 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11290 can be rewritten as (X & (C2 << C1)) != 0. */
11291 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11293 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11294 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11295 return fold_build2 (code, type, tem, arg1);
11297 /* Otherwise, for signed (arithmetic) shifts,
11298 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11299 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11300 else if (!TYPE_UNSIGNED (itype))
11301 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11302 arg000, build_int_cst (itype, 0));
11303 /* Otherwise, of unsigned (logical) shifts,
11304 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11305 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11306 else
11307 return omit_one_operand (type,
11308 code == EQ_EXPR ? integer_one_node
11309 : integer_zero_node,
11310 arg000);
11314 /* If this is an NE comparison of zero with an AND of one, remove the
11315 comparison since the AND will give the correct value. */
11316 if (code == NE_EXPR
11317 && integer_zerop (arg1)
11318 && TREE_CODE (arg0) == BIT_AND_EXPR
11319 && integer_onep (TREE_OPERAND (arg0, 1)))
11320 return fold_convert (type, arg0);
11322 /* If we have (A & C) == C where C is a power of 2, convert this into
11323 (A & C) != 0. Similarly for NE_EXPR. */
11324 if (TREE_CODE (arg0) == BIT_AND_EXPR
11325 && integer_pow2p (TREE_OPERAND (arg0, 1))
11326 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11327 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11328 arg0, fold_convert (TREE_TYPE (arg0),
11329 integer_zero_node));
11331 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11332 bit, then fold the expression into A < 0 or A >= 0. */
11333 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11334 if (tem)
11335 return tem;
11337 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11338 Similarly for NE_EXPR. */
11339 if (TREE_CODE (arg0) == BIT_AND_EXPR
11340 && TREE_CODE (arg1) == INTEGER_CST
11341 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11343 tree notc = fold_build1 (BIT_NOT_EXPR,
11344 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11345 TREE_OPERAND (arg0, 1));
11346 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11347 arg1, notc);
11348 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11349 if (integer_nonzerop (dandnotc))
11350 return omit_one_operand (type, rslt, arg0);
11353 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11354 Similarly for NE_EXPR. */
11355 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11356 && TREE_CODE (arg1) == INTEGER_CST
11357 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11359 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11360 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11361 TREE_OPERAND (arg0, 1), notd);
11362 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11363 if (integer_nonzerop (candnotd))
11364 return omit_one_operand (type, rslt, arg0);
11367 /* If this is a comparison of a field, we may be able to simplify it. */
11368 if ((TREE_CODE (arg0) == COMPONENT_REF
11369 || TREE_CODE (arg0) == BIT_FIELD_REF)
11370 /* Handle the constant case even without -O
11371 to make sure the warnings are given. */
11372 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11374 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
11375 if (t1)
11376 return t1;
11379 /* Optimize comparisons of strlen vs zero to a compare of the
11380 first character of the string vs zero. To wit,
11381 strlen(ptr) == 0 => *ptr == 0
11382 strlen(ptr) != 0 => *ptr != 0
11383 Other cases should reduce to one of these two (or a constant)
11384 due to the return value of strlen being unsigned. */
11385 if (TREE_CODE (arg0) == CALL_EXPR
11386 && integer_zerop (arg1))
11388 tree fndecl = get_callee_fndecl (arg0);
11389 tree arglist;
11391 if (fndecl
11392 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11393 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11394 && (arglist = TREE_OPERAND (arg0, 1))
11395 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
11396 && ! TREE_CHAIN (arglist))
11398 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
11399 return fold_build2 (code, type, iref,
11400 build_int_cst (TREE_TYPE (iref), 0));
11404 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11405 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11406 if (TREE_CODE (arg0) == RSHIFT_EXPR
11407 && integer_zerop (arg1)
11408 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11410 tree arg00 = TREE_OPERAND (arg0, 0);
11411 tree arg01 = TREE_OPERAND (arg0, 1);
11412 tree itype = TREE_TYPE (arg00);
11413 if (TREE_INT_CST_HIGH (arg01) == 0
11414 && TREE_INT_CST_LOW (arg01)
11415 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
11417 if (TYPE_UNSIGNED (itype))
11419 itype = lang_hooks.types.signed_type (itype);
11420 arg00 = fold_convert (itype, arg00);
11422 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11423 type, arg00, build_int_cst (itype, 0));
11427 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11428 if (integer_zerop (arg1)
11429 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11430 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11431 TREE_OPERAND (arg0, 1));
11433 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11434 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11435 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11436 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11437 build_int_cst (TREE_TYPE (arg1), 0));
11438 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11439 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11440 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11441 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11442 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11443 build_int_cst (TREE_TYPE (arg1), 0));
11445 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11446 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11447 && TREE_CODE (arg1) == INTEGER_CST
11448 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11449 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11450 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11451 TREE_OPERAND (arg0, 1), arg1));
11453 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11454 (X & C) == 0 when C is a single bit. */
11455 if (TREE_CODE (arg0) == BIT_AND_EXPR
11456 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11457 && integer_zerop (arg1)
11458 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11460 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11461 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11462 TREE_OPERAND (arg0, 1));
11463 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11464 type, tem, arg1);
11467 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11468 constant C is a power of two, i.e. a single bit. */
11469 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11470 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11471 && integer_zerop (arg1)
11472 && integer_pow2p (TREE_OPERAND (arg0, 1))
11473 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11474 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11476 tree arg00 = TREE_OPERAND (arg0, 0);
11477 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11478 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11481 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11482 when is C is a power of two, i.e. a single bit. */
11483 if (TREE_CODE (arg0) == BIT_AND_EXPR
11484 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11485 && integer_zerop (arg1)
11486 && integer_pow2p (TREE_OPERAND (arg0, 1))
11487 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11488 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11490 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11491 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11492 arg000, TREE_OPERAND (arg0, 1));
11493 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11494 tem, build_int_cst (TREE_TYPE (tem), 0));
11497 if (integer_zerop (arg1)
11498 && tree_expr_nonzero_p (arg0))
11500 tree res = constant_boolean_node (code==NE_EXPR, type);
11501 return omit_one_operand (type, res, arg0);
11504 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11505 if (TREE_CODE (arg0) == NEGATE_EXPR
11506 && TREE_CODE (arg1) == NEGATE_EXPR)
11507 return fold_build2 (code, type,
11508 TREE_OPERAND (arg0, 0),
11509 TREE_OPERAND (arg1, 0));
11511 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11512 if (TREE_CODE (arg0) == BIT_AND_EXPR
11513 && TREE_CODE (arg1) == BIT_AND_EXPR)
11515 tree arg00 = TREE_OPERAND (arg0, 0);
11516 tree arg01 = TREE_OPERAND (arg0, 1);
11517 tree arg10 = TREE_OPERAND (arg1, 0);
11518 tree arg11 = TREE_OPERAND (arg1, 1);
11519 tree itype = TREE_TYPE (arg0);
11521 if (operand_equal_p (arg01, arg11, 0))
11522 return fold_build2 (code, type,
11523 fold_build2 (BIT_AND_EXPR, itype,
11524 fold_build2 (BIT_XOR_EXPR, itype,
11525 arg00, arg10),
11526 arg01),
11527 build_int_cst (itype, 0));
11529 if (operand_equal_p (arg01, arg10, 0))
11530 return fold_build2 (code, type,
11531 fold_build2 (BIT_AND_EXPR, itype,
11532 fold_build2 (BIT_XOR_EXPR, itype,
11533 arg00, arg11),
11534 arg01),
11535 build_int_cst (itype, 0));
11537 if (operand_equal_p (arg00, arg11, 0))
11538 return fold_build2 (code, type,
11539 fold_build2 (BIT_AND_EXPR, itype,
11540 fold_build2 (BIT_XOR_EXPR, itype,
11541 arg01, arg10),
11542 arg00),
11543 build_int_cst (itype, 0));
11545 if (operand_equal_p (arg00, arg10, 0))
11546 return fold_build2 (code, type,
11547 fold_build2 (BIT_AND_EXPR, itype,
11548 fold_build2 (BIT_XOR_EXPR, itype,
11549 arg01, arg11),
11550 arg00),
11551 build_int_cst (itype, 0));
11554 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11555 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11557 tree arg00 = TREE_OPERAND (arg0, 0);
11558 tree arg01 = TREE_OPERAND (arg0, 1);
11559 tree arg10 = TREE_OPERAND (arg1, 0);
11560 tree arg11 = TREE_OPERAND (arg1, 1);
11561 tree itype = TREE_TYPE (arg0);
11563 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11564 operand_equal_p guarantees no side-effects so we don't need
11565 to use omit_one_operand on Z. */
11566 if (operand_equal_p (arg01, arg11, 0))
11567 return fold_build2 (code, type, arg00, arg10);
11568 if (operand_equal_p (arg01, arg10, 0))
11569 return fold_build2 (code, type, arg00, arg11);
11570 if (operand_equal_p (arg00, arg11, 0))
11571 return fold_build2 (code, type, arg01, arg10);
11572 if (operand_equal_p (arg00, arg10, 0))
11573 return fold_build2 (code, type, arg01, arg11);
11575 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11576 if (TREE_CODE (arg01) == INTEGER_CST
11577 && TREE_CODE (arg11) == INTEGER_CST)
11578 return fold_build2 (code, type,
11579 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11580 fold_build2 (BIT_XOR_EXPR, itype,
11581 arg01, arg11)),
11582 arg10);
11584 return NULL_TREE;
11586 case LT_EXPR:
11587 case GT_EXPR:
11588 case LE_EXPR:
11589 case GE_EXPR:
11590 tem = fold_comparison (code, type, op0, op1);
11591 if (tem != NULL_TREE)
11592 return tem;
11594 /* Transform comparisons of the form X +- C CMP X. */
11595 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11596 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11597 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11598 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11599 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11600 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11602 tree arg01 = TREE_OPERAND (arg0, 1);
11603 enum tree_code code0 = TREE_CODE (arg0);
11604 int is_positive;
11606 if (TREE_CODE (arg01) == REAL_CST)
11607 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11608 else
11609 is_positive = tree_int_cst_sgn (arg01);
11611 /* (X - c) > X becomes false. */
11612 if (code == GT_EXPR
11613 && ((code0 == MINUS_EXPR && is_positive >= 0)
11614 || (code0 == PLUS_EXPR && is_positive <= 0)))
11616 if (TREE_CODE (arg01) == INTEGER_CST
11617 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11618 fold_overflow_warning (("assuming signed overflow does not "
11619 "occur when assuming that (X - c) > X "
11620 "is always false"),
11621 WARN_STRICT_OVERFLOW_ALL);
11622 return constant_boolean_node (0, type);
11625 /* Likewise (X + c) < X becomes false. */
11626 if (code == LT_EXPR
11627 && ((code0 == PLUS_EXPR && is_positive >= 0)
11628 || (code0 == MINUS_EXPR && is_positive <= 0)))
11630 if (TREE_CODE (arg01) == INTEGER_CST
11631 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11632 fold_overflow_warning (("assuming signed overflow does not "
11633 "occur when assuming that "
11634 "(X + c) < X is always false"),
11635 WARN_STRICT_OVERFLOW_ALL);
11636 return constant_boolean_node (0, type);
11639 /* Convert (X - c) <= X to true. */
11640 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11641 && code == LE_EXPR
11642 && ((code0 == MINUS_EXPR && is_positive >= 0)
11643 || (code0 == PLUS_EXPR && is_positive <= 0)))
11645 if (TREE_CODE (arg01) == INTEGER_CST
11646 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11647 fold_overflow_warning (("assuming signed overflow does not "
11648 "occur when assuming that "
11649 "(X - c) <= X is always true"),
11650 WARN_STRICT_OVERFLOW_ALL);
11651 return constant_boolean_node (1, type);
11654 /* Convert (X + c) >= X to true. */
11655 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11656 && code == GE_EXPR
11657 && ((code0 == PLUS_EXPR && is_positive >= 0)
11658 || (code0 == MINUS_EXPR && is_positive <= 0)))
11660 if (TREE_CODE (arg01) == INTEGER_CST
11661 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11662 fold_overflow_warning (("assuming signed overflow does not "
11663 "occur when assuming that "
11664 "(X + c) >= X is always true"),
11665 WARN_STRICT_OVERFLOW_ALL);
11666 return constant_boolean_node (1, type);
11669 if (TREE_CODE (arg01) == INTEGER_CST)
11671 /* Convert X + c > X and X - c < X to true for integers. */
11672 if (code == GT_EXPR
11673 && ((code0 == PLUS_EXPR && is_positive > 0)
11674 || (code0 == MINUS_EXPR && is_positive < 0)))
11676 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11677 fold_overflow_warning (("assuming signed overflow does "
11678 "not occur when assuming that "
11679 "(X + c) > X is always true"),
11680 WARN_STRICT_OVERFLOW_ALL);
11681 return constant_boolean_node (1, type);
11684 if (code == LT_EXPR
11685 && ((code0 == MINUS_EXPR && is_positive > 0)
11686 || (code0 == PLUS_EXPR && is_positive < 0)))
11688 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11689 fold_overflow_warning (("assuming signed overflow does "
11690 "not occur when assuming that "
11691 "(X - c) < X is always true"),
11692 WARN_STRICT_OVERFLOW_ALL);
11693 return constant_boolean_node (1, type);
11696 /* Convert X + c <= X and X - c >= X to false for integers. */
11697 if (code == LE_EXPR
11698 && ((code0 == PLUS_EXPR && is_positive > 0)
11699 || (code0 == MINUS_EXPR && is_positive < 0)))
11701 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11702 fold_overflow_warning (("assuming signed overflow does "
11703 "not occur when assuming that "
11704 "(X + c) <= X is always false"),
11705 WARN_STRICT_OVERFLOW_ALL);
11706 return constant_boolean_node (0, type);
11709 if (code == GE_EXPR
11710 && ((code0 == MINUS_EXPR && is_positive > 0)
11711 || (code0 == PLUS_EXPR && is_positive < 0)))
11713 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11714 fold_overflow_warning (("assuming signed overflow does "
11715 "not occur when assuming that "
11716 "(X - c) >= X is always true"),
11717 WARN_STRICT_OVERFLOW_ALL);
11718 return constant_boolean_node (0, type);
11723 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11724 This transformation affects the cases which are handled in later
11725 optimizations involving comparisons with non-negative constants. */
11726 if (TREE_CODE (arg1) == INTEGER_CST
11727 && TREE_CODE (arg0) != INTEGER_CST
11728 && tree_int_cst_sgn (arg1) > 0)
11730 if (code == GE_EXPR)
11732 arg1 = const_binop (MINUS_EXPR, arg1,
11733 build_int_cst (TREE_TYPE (arg1), 1), 0);
11734 return fold_build2 (GT_EXPR, type, arg0,
11735 fold_convert (TREE_TYPE (arg0), arg1));
11737 if (code == LT_EXPR)
11739 arg1 = const_binop (MINUS_EXPR, arg1,
11740 build_int_cst (TREE_TYPE (arg1), 1), 0);
11741 return fold_build2 (LE_EXPR, type, arg0,
11742 fold_convert (TREE_TYPE (arg0), arg1));
11746 /* Comparisons with the highest or lowest possible integer of
11747 the specified precision will have known values. */
11749 tree arg1_type = TREE_TYPE (arg1);
11750 unsigned int width = TYPE_PRECISION (arg1_type);
11752 if (TREE_CODE (arg1) == INTEGER_CST
11753 && !TREE_OVERFLOW (arg1)
11754 && width <= 2 * HOST_BITS_PER_WIDE_INT
11755 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11757 HOST_WIDE_INT signed_max_hi;
11758 unsigned HOST_WIDE_INT signed_max_lo;
11759 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11761 if (width <= HOST_BITS_PER_WIDE_INT)
11763 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11764 - 1;
11765 signed_max_hi = 0;
11766 max_hi = 0;
11768 if (TYPE_UNSIGNED (arg1_type))
11770 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11771 min_lo = 0;
11772 min_hi = 0;
11774 else
11776 max_lo = signed_max_lo;
11777 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11778 min_hi = -1;
11781 else
11783 width -= HOST_BITS_PER_WIDE_INT;
11784 signed_max_lo = -1;
11785 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11786 - 1;
11787 max_lo = -1;
11788 min_lo = 0;
11790 if (TYPE_UNSIGNED (arg1_type))
11792 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11793 min_hi = 0;
11795 else
11797 max_hi = signed_max_hi;
11798 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11802 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11803 && TREE_INT_CST_LOW (arg1) == max_lo)
11804 switch (code)
11806 case GT_EXPR:
11807 return omit_one_operand (type, integer_zero_node, arg0);
11809 case GE_EXPR:
11810 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11812 case LE_EXPR:
11813 return omit_one_operand (type, integer_one_node, arg0);
11815 case LT_EXPR:
11816 return fold_build2 (NE_EXPR, type, arg0, arg1);
11818 /* The GE_EXPR and LT_EXPR cases above are not normally
11819 reached because of previous transformations. */
11821 default:
11822 break;
11824 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11825 == max_hi
11826 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11827 switch (code)
11829 case GT_EXPR:
11830 arg1 = const_binop (PLUS_EXPR, arg1,
11831 build_int_cst (TREE_TYPE (arg1), 1), 0);
11832 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11833 case LE_EXPR:
11834 arg1 = const_binop (PLUS_EXPR, arg1,
11835 build_int_cst (TREE_TYPE (arg1), 1), 0);
11836 return fold_build2 (NE_EXPR, type, arg0, arg1);
11837 default:
11838 break;
11840 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11841 == min_hi
11842 && TREE_INT_CST_LOW (arg1) == min_lo)
11843 switch (code)
11845 case LT_EXPR:
11846 return omit_one_operand (type, integer_zero_node, arg0);
11848 case LE_EXPR:
11849 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11851 case GE_EXPR:
11852 return omit_one_operand (type, integer_one_node, arg0);
11854 case GT_EXPR:
11855 return fold_build2 (NE_EXPR, type, op0, op1);
11857 default:
11858 break;
11860 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11861 == min_hi
11862 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11863 switch (code)
11865 case GE_EXPR:
11866 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11867 return fold_build2 (NE_EXPR, type, arg0, arg1);
11868 case LT_EXPR:
11869 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11870 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11871 default:
11872 break;
11875 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11876 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11877 && TYPE_UNSIGNED (arg1_type)
11878 /* We will flip the signedness of the comparison operator
11879 associated with the mode of arg1, so the sign bit is
11880 specified by this mode. Check that arg1 is the signed
11881 max associated with this sign bit. */
11882 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
11883 /* signed_type does not work on pointer types. */
11884 && INTEGRAL_TYPE_P (arg1_type))
11886 /* The following case also applies to X < signed_max+1
11887 and X >= signed_max+1 because previous transformations. */
11888 if (code == LE_EXPR || code == GT_EXPR)
11890 tree st0, st1;
11891 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11892 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11893 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11894 type, fold_convert (st0, arg0),
11895 build_int_cst (st1, 0));
11901 /* If we are comparing an ABS_EXPR with a constant, we can
11902 convert all the cases into explicit comparisons, but they may
11903 well not be faster than doing the ABS and one comparison.
11904 But ABS (X) <= C is a range comparison, which becomes a subtraction
11905 and a comparison, and is probably faster. */
11906 if (code == LE_EXPR
11907 && TREE_CODE (arg1) == INTEGER_CST
11908 && TREE_CODE (arg0) == ABS_EXPR
11909 && ! TREE_SIDE_EFFECTS (arg0)
11910 && (0 != (tem = negate_expr (arg1)))
11911 && TREE_CODE (tem) == INTEGER_CST
11912 && !TREE_OVERFLOW (tem))
11913 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11914 build2 (GE_EXPR, type,
11915 TREE_OPERAND (arg0, 0), tem),
11916 build2 (LE_EXPR, type,
11917 TREE_OPERAND (arg0, 0), arg1));
11919 /* Convert ABS_EXPR<x> >= 0 to true. */
11920 strict_overflow_p = false;
11921 if (code == GE_EXPR
11922 && (integer_zerop (arg1)
11923 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11924 && real_zerop (arg1)))
11925 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11927 if (strict_overflow_p)
11928 fold_overflow_warning (("assuming signed overflow does not occur "
11929 "when simplifying comparison of "
11930 "absolute value and zero"),
11931 WARN_STRICT_OVERFLOW_CONDITIONAL);
11932 return omit_one_operand (type, integer_one_node, arg0);
11935 /* Convert ABS_EXPR<x> < 0 to false. */
11936 strict_overflow_p = false;
11937 if (code == LT_EXPR
11938 && (integer_zerop (arg1) || real_zerop (arg1))
11939 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11941 if (strict_overflow_p)
11942 fold_overflow_warning (("assuming signed overflow does not occur "
11943 "when simplifying comparison of "
11944 "absolute value and zero"),
11945 WARN_STRICT_OVERFLOW_CONDITIONAL);
11946 return omit_one_operand (type, integer_zero_node, arg0);
11949 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11950 and similarly for >= into !=. */
11951 if ((code == LT_EXPR || code == GE_EXPR)
11952 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11953 && TREE_CODE (arg1) == LSHIFT_EXPR
11954 && integer_onep (TREE_OPERAND (arg1, 0)))
11955 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11956 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11957 TREE_OPERAND (arg1, 1)),
11958 build_int_cst (TREE_TYPE (arg0), 0));
11960 if ((code == LT_EXPR || code == GE_EXPR)
11961 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11962 && (TREE_CODE (arg1) == NOP_EXPR
11963 || TREE_CODE (arg1) == CONVERT_EXPR)
11964 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11965 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11966 return
11967 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11968 fold_convert (TREE_TYPE (arg0),
11969 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11970 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11971 1))),
11972 build_int_cst (TREE_TYPE (arg0), 0));
11974 return NULL_TREE;
11976 case UNORDERED_EXPR:
11977 case ORDERED_EXPR:
11978 case UNLT_EXPR:
11979 case UNLE_EXPR:
11980 case UNGT_EXPR:
11981 case UNGE_EXPR:
11982 case UNEQ_EXPR:
11983 case LTGT_EXPR:
11984 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11986 t1 = fold_relational_const (code, type, arg0, arg1);
11987 if (t1 != NULL_TREE)
11988 return t1;
11991 /* If the first operand is NaN, the result is constant. */
11992 if (TREE_CODE (arg0) == REAL_CST
11993 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11994 && (code != LTGT_EXPR || ! flag_trapping_math))
11996 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11997 ? integer_zero_node
11998 : integer_one_node;
11999 return omit_one_operand (type, t1, arg1);
12002 /* If the second operand is NaN, the result is constant. */
12003 if (TREE_CODE (arg1) == REAL_CST
12004 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12005 && (code != LTGT_EXPR || ! flag_trapping_math))
12007 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12008 ? integer_zero_node
12009 : integer_one_node;
12010 return omit_one_operand (type, t1, arg0);
12013 /* Simplify unordered comparison of something with itself. */
12014 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12015 && operand_equal_p (arg0, arg1, 0))
12016 return constant_boolean_node (1, type);
12018 if (code == LTGT_EXPR
12019 && !flag_trapping_math
12020 && operand_equal_p (arg0, arg1, 0))
12021 return constant_boolean_node (0, type);
12023 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12025 tree targ0 = strip_float_extensions (arg0);
12026 tree targ1 = strip_float_extensions (arg1);
12027 tree newtype = TREE_TYPE (targ0);
12029 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12030 newtype = TREE_TYPE (targ1);
12032 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12033 return fold_build2 (code, type, fold_convert (newtype, targ0),
12034 fold_convert (newtype, targ1));
12037 return NULL_TREE;
12039 case COMPOUND_EXPR:
12040 /* When pedantic, a compound expression can be neither an lvalue
12041 nor an integer constant expression. */
12042 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12043 return NULL_TREE;
12044 /* Don't let (0, 0) be null pointer constant. */
12045 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12046 : fold_convert (type, arg1);
12047 return pedantic_non_lvalue (tem);
12049 case COMPLEX_EXPR:
12050 if ((TREE_CODE (arg0) == REAL_CST
12051 && TREE_CODE (arg1) == REAL_CST)
12052 || (TREE_CODE (arg0) == INTEGER_CST
12053 && TREE_CODE (arg1) == INTEGER_CST))
12054 return build_complex (type, arg0, arg1);
12055 return NULL_TREE;
12057 case ASSERT_EXPR:
12058 /* An ASSERT_EXPR should never be passed to fold_binary. */
12059 gcc_unreachable ();
12061 default:
12062 return NULL_TREE;
12063 } /* switch (code) */
12066 /* Callback for walk_tree, looking for LABEL_EXPR.
12067 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12068 Do not check the sub-tree of GOTO_EXPR. */
12070 static tree
12071 contains_label_1 (tree *tp,
12072 int *walk_subtrees,
12073 void *data ATTRIBUTE_UNUSED)
12075 switch (TREE_CODE (*tp))
12077 case LABEL_EXPR:
12078 return *tp;
12079 case GOTO_EXPR:
12080 *walk_subtrees = 0;
12081 /* no break */
12082 default:
12083 return NULL_TREE;
12087 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12088 accessible from outside the sub-tree. Returns NULL_TREE if no
12089 addressable label is found. */
12091 static bool
12092 contains_label_p (tree st)
12094 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12097 /* Fold a ternary expression of code CODE and type TYPE with operands
12098 OP0, OP1, and OP2. Return the folded expression if folding is
12099 successful. Otherwise, return NULL_TREE. */
12101 tree
12102 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12104 tree tem;
12105 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12106 enum tree_code_class kind = TREE_CODE_CLASS (code);
12108 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12109 && TREE_CODE_LENGTH (code) == 3);
12111 /* Strip any conversions that don't change the mode. This is safe
12112 for every expression, except for a comparison expression because
12113 its signedness is derived from its operands. So, in the latter
12114 case, only strip conversions that don't change the signedness.
12116 Note that this is done as an internal manipulation within the
12117 constant folder, in order to find the simplest representation of
12118 the arguments so that their form can be studied. In any cases,
12119 the appropriate type conversions should be put back in the tree
12120 that will get out of the constant folder. */
12121 if (op0)
12123 arg0 = op0;
12124 STRIP_NOPS (arg0);
12127 if (op1)
12129 arg1 = op1;
12130 STRIP_NOPS (arg1);
12133 switch (code)
12135 case COMPONENT_REF:
12136 if (TREE_CODE (arg0) == CONSTRUCTOR
12137 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12139 unsigned HOST_WIDE_INT idx;
12140 tree field, value;
12141 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12142 if (field == arg1)
12143 return value;
12145 return NULL_TREE;
12147 case COND_EXPR:
12148 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12149 so all simple results must be passed through pedantic_non_lvalue. */
12150 if (TREE_CODE (arg0) == INTEGER_CST)
12152 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12153 tem = integer_zerop (arg0) ? op2 : op1;
12154 /* Only optimize constant conditions when the selected branch
12155 has the same type as the COND_EXPR. This avoids optimizing
12156 away "c ? x : throw", where the throw has a void type.
12157 Avoid throwing away that operand which contains label. */
12158 if ((!TREE_SIDE_EFFECTS (unused_op)
12159 || !contains_label_p (unused_op))
12160 && (! VOID_TYPE_P (TREE_TYPE (tem))
12161 || VOID_TYPE_P (type)))
12162 return pedantic_non_lvalue (tem);
12163 return NULL_TREE;
12165 if (operand_equal_p (arg1, op2, 0))
12166 return pedantic_omit_one_operand (type, arg1, arg0);
12168 /* If we have A op B ? A : C, we may be able to convert this to a
12169 simpler expression, depending on the operation and the values
12170 of B and C. Signed zeros prevent all of these transformations,
12171 for reasons given above each one.
12173 Also try swapping the arguments and inverting the conditional. */
12174 if (COMPARISON_CLASS_P (arg0)
12175 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12176 arg1, TREE_OPERAND (arg0, 1))
12177 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12179 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12180 if (tem)
12181 return tem;
12184 if (COMPARISON_CLASS_P (arg0)
12185 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12186 op2,
12187 TREE_OPERAND (arg0, 1))
12188 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12190 tem = fold_truth_not_expr (arg0);
12191 if (tem && COMPARISON_CLASS_P (tem))
12193 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12194 if (tem)
12195 return tem;
12199 /* If the second operand is simpler than the third, swap them
12200 since that produces better jump optimization results. */
12201 if (truth_value_p (TREE_CODE (arg0))
12202 && tree_swap_operands_p (op1, op2, false))
12204 /* See if this can be inverted. If it can't, possibly because
12205 it was a floating-point inequality comparison, don't do
12206 anything. */
12207 tem = fold_truth_not_expr (arg0);
12208 if (tem)
12209 return fold_build3 (code, type, tem, op2, op1);
12212 /* Convert A ? 1 : 0 to simply A. */
12213 if (integer_onep (op1)
12214 && integer_zerop (op2)
12215 /* If we try to convert OP0 to our type, the
12216 call to fold will try to move the conversion inside
12217 a COND, which will recurse. In that case, the COND_EXPR
12218 is probably the best choice, so leave it alone. */
12219 && type == TREE_TYPE (arg0))
12220 return pedantic_non_lvalue (arg0);
12222 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12223 over COND_EXPR in cases such as floating point comparisons. */
12224 if (integer_zerop (op1)
12225 && integer_onep (op2)
12226 && truth_value_p (TREE_CODE (arg0)))
12227 return pedantic_non_lvalue (fold_convert (type,
12228 invert_truthvalue (arg0)));
12230 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12231 if (TREE_CODE (arg0) == LT_EXPR
12232 && integer_zerop (TREE_OPERAND (arg0, 1))
12233 && integer_zerop (op2)
12234 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12236 /* sign_bit_p only checks ARG1 bits within A's precision.
12237 If <sign bit of A> has wider type than A, bits outside
12238 of A's precision in <sign bit of A> need to be checked.
12239 If they are all 0, this optimization needs to be done
12240 in unsigned A's type, if they are all 1 in signed A's type,
12241 otherwise this can't be done. */
12242 if (TYPE_PRECISION (TREE_TYPE (tem))
12243 < TYPE_PRECISION (TREE_TYPE (arg1))
12244 && TYPE_PRECISION (TREE_TYPE (tem))
12245 < TYPE_PRECISION (type))
12247 unsigned HOST_WIDE_INT mask_lo;
12248 HOST_WIDE_INT mask_hi;
12249 int inner_width, outer_width;
12250 tree tem_type;
12252 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12253 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12254 if (outer_width > TYPE_PRECISION (type))
12255 outer_width = TYPE_PRECISION (type);
12257 if (outer_width > HOST_BITS_PER_WIDE_INT)
12259 mask_hi = ((unsigned HOST_WIDE_INT) -1
12260 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12261 mask_lo = -1;
12263 else
12265 mask_hi = 0;
12266 mask_lo = ((unsigned HOST_WIDE_INT) -1
12267 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12269 if (inner_width > HOST_BITS_PER_WIDE_INT)
12271 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12272 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12273 mask_lo = 0;
12275 else
12276 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12277 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12279 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12280 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12282 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
12283 tem = fold_convert (tem_type, tem);
12285 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12286 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12288 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
12289 tem = fold_convert (tem_type, tem);
12291 else
12292 tem = NULL;
12295 if (tem)
12296 return fold_convert (type,
12297 fold_build2 (BIT_AND_EXPR,
12298 TREE_TYPE (tem), tem,
12299 fold_convert (TREE_TYPE (tem),
12300 arg1)));
12303 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12304 already handled above. */
12305 if (TREE_CODE (arg0) == BIT_AND_EXPR
12306 && integer_onep (TREE_OPERAND (arg0, 1))
12307 && integer_zerop (op2)
12308 && integer_pow2p (arg1))
12310 tree tem = TREE_OPERAND (arg0, 0);
12311 STRIP_NOPS (tem);
12312 if (TREE_CODE (tem) == RSHIFT_EXPR
12313 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
12314 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
12315 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
12316 return fold_build2 (BIT_AND_EXPR, type,
12317 TREE_OPERAND (tem, 0), arg1);
12320 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12321 is probably obsolete because the first operand should be a
12322 truth value (that's why we have the two cases above), but let's
12323 leave it in until we can confirm this for all front-ends. */
12324 if (integer_zerop (op2)
12325 && TREE_CODE (arg0) == NE_EXPR
12326 && integer_zerop (TREE_OPERAND (arg0, 1))
12327 && integer_pow2p (arg1)
12328 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12329 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12330 arg1, OEP_ONLY_CONST))
12331 return pedantic_non_lvalue (fold_convert (type,
12332 TREE_OPERAND (arg0, 0)));
12334 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12335 if (integer_zerop (op2)
12336 && truth_value_p (TREE_CODE (arg0))
12337 && truth_value_p (TREE_CODE (arg1)))
12338 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12339 fold_convert (type, arg0),
12340 arg1);
12342 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12343 if (integer_onep (op2)
12344 && truth_value_p (TREE_CODE (arg0))
12345 && truth_value_p (TREE_CODE (arg1)))
12347 /* Only perform transformation if ARG0 is easily inverted. */
12348 tem = fold_truth_not_expr (arg0);
12349 if (tem)
12350 return fold_build2 (TRUTH_ORIF_EXPR, type,
12351 fold_convert (type, tem),
12352 arg1);
12355 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12356 if (integer_zerop (arg1)
12357 && truth_value_p (TREE_CODE (arg0))
12358 && truth_value_p (TREE_CODE (op2)))
12360 /* Only perform transformation if ARG0 is easily inverted. */
12361 tem = fold_truth_not_expr (arg0);
12362 if (tem)
12363 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12364 fold_convert (type, tem),
12365 op2);
12368 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12369 if (integer_onep (arg1)
12370 && truth_value_p (TREE_CODE (arg0))
12371 && truth_value_p (TREE_CODE (op2)))
12372 return fold_build2 (TRUTH_ORIF_EXPR, type,
12373 fold_convert (type, arg0),
12374 op2);
12376 return NULL_TREE;
12378 case CALL_EXPR:
12379 /* Check for a built-in function. */
12380 if (TREE_CODE (op0) == ADDR_EXPR
12381 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
12382 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
12383 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
12384 return NULL_TREE;
12386 case BIT_FIELD_REF:
12387 if (TREE_CODE (arg0) == VECTOR_CST
12388 && type == TREE_TYPE (TREE_TYPE (arg0))
12389 && host_integerp (arg1, 1)
12390 && host_integerp (op2, 1))
12392 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
12393 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
12395 if (width != 0
12396 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
12397 && (idx % width) == 0
12398 && (idx = idx / width)
12399 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
12401 tree elements = TREE_VECTOR_CST_ELTS (arg0);
12402 while (idx-- > 0 && elements)
12403 elements = TREE_CHAIN (elements);
12404 if (elements)
12405 return TREE_VALUE (elements);
12406 else
12407 return fold_convert (type, integer_zero_node);
12410 return NULL_TREE;
12412 default:
12413 return NULL_TREE;
12414 } /* switch (code) */
12417 /* Perform constant folding and related simplification of EXPR.
12418 The related simplifications include x*1 => x, x*0 => 0, etc.,
12419 and application of the associative law.
12420 NOP_EXPR conversions may be removed freely (as long as we
12421 are careful not to change the type of the overall expression).
12422 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12423 but we can constant-fold them if they have constant operands. */
12425 #ifdef ENABLE_FOLD_CHECKING
12426 # define fold(x) fold_1 (x)
12427 static tree fold_1 (tree);
12428 static
12429 #endif
12430 tree
12431 fold (tree expr)
12433 const tree t = expr;
12434 enum tree_code code = TREE_CODE (t);
12435 enum tree_code_class kind = TREE_CODE_CLASS (code);
12436 tree tem;
12438 /* Return right away if a constant. */
12439 if (kind == tcc_constant)
12440 return t;
12442 if (IS_EXPR_CODE_CLASS (kind)
12443 || IS_GIMPLE_STMT_CODE_CLASS (kind))
12445 tree type = TREE_TYPE (t);
12446 tree op0, op1, op2;
12448 switch (TREE_CODE_LENGTH (code))
12450 case 1:
12451 op0 = TREE_OPERAND (t, 0);
12452 tem = fold_unary (code, type, op0);
12453 return tem ? tem : expr;
12454 case 2:
12455 op0 = TREE_OPERAND (t, 0);
12456 op1 = TREE_OPERAND (t, 1);
12457 tem = fold_binary (code, type, op0, op1);
12458 return tem ? tem : expr;
12459 case 3:
12460 op0 = TREE_OPERAND (t, 0);
12461 op1 = TREE_OPERAND (t, 1);
12462 op2 = TREE_OPERAND (t, 2);
12463 tem = fold_ternary (code, type, op0, op1, op2);
12464 return tem ? tem : expr;
12465 default:
12466 break;
12470 switch (code)
12472 case CONST_DECL:
12473 return fold (DECL_INITIAL (t));
12475 default:
12476 return t;
12477 } /* switch (code) */
12480 #ifdef ENABLE_FOLD_CHECKING
12481 #undef fold
12483 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
12484 static void fold_check_failed (tree, tree);
12485 void print_fold_checksum (tree);
12487 /* When --enable-checking=fold, compute a digest of expr before
12488 and after actual fold call to see if fold did not accidentally
12489 change original expr. */
12491 tree
12492 fold (tree expr)
12494 tree ret;
12495 struct md5_ctx ctx;
12496 unsigned char checksum_before[16], checksum_after[16];
12497 htab_t ht;
12499 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12500 md5_init_ctx (&ctx);
12501 fold_checksum_tree (expr, &ctx, ht);
12502 md5_finish_ctx (&ctx, checksum_before);
12503 htab_empty (ht);
12505 ret = fold_1 (expr);
12507 md5_init_ctx (&ctx);
12508 fold_checksum_tree (expr, &ctx, ht);
12509 md5_finish_ctx (&ctx, checksum_after);
12510 htab_delete (ht);
12512 if (memcmp (checksum_before, checksum_after, 16))
12513 fold_check_failed (expr, ret);
12515 return ret;
12518 void
12519 print_fold_checksum (tree expr)
12521 struct md5_ctx ctx;
12522 unsigned char checksum[16], cnt;
12523 htab_t ht;
12525 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12526 md5_init_ctx (&ctx);
12527 fold_checksum_tree (expr, &ctx, ht);
12528 md5_finish_ctx (&ctx, checksum);
12529 htab_delete (ht);
12530 for (cnt = 0; cnt < 16; ++cnt)
12531 fprintf (stderr, "%02x", checksum[cnt]);
12532 putc ('\n', stderr);
12535 static void
12536 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12538 internal_error ("fold check: original tree changed by fold");
12541 static void
12542 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12544 void **slot;
12545 enum tree_code code;
12546 struct tree_function_decl buf;
12547 int i, len;
12549 recursive_label:
12551 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12552 <= sizeof (struct tree_function_decl))
12553 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12554 if (expr == NULL)
12555 return;
12556 slot = htab_find_slot (ht, expr, INSERT);
12557 if (*slot != NULL)
12558 return;
12559 *slot = expr;
12560 code = TREE_CODE (expr);
12561 if (TREE_CODE_CLASS (code) == tcc_declaration
12562 && DECL_ASSEMBLER_NAME_SET_P (expr))
12564 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12565 memcpy ((char *) &buf, expr, tree_size (expr));
12566 expr = (tree) &buf;
12567 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12569 else if (TREE_CODE_CLASS (code) == tcc_type
12570 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12571 || TYPE_CACHED_VALUES_P (expr)
12572 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12574 /* Allow these fields to be modified. */
12575 memcpy ((char *) &buf, expr, tree_size (expr));
12576 expr = (tree) &buf;
12577 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12578 TYPE_POINTER_TO (expr) = NULL;
12579 TYPE_REFERENCE_TO (expr) = NULL;
12580 if (TYPE_CACHED_VALUES_P (expr))
12582 TYPE_CACHED_VALUES_P (expr) = 0;
12583 TYPE_CACHED_VALUES (expr) = NULL;
12586 md5_process_bytes (expr, tree_size (expr), ctx);
12587 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12588 if (TREE_CODE_CLASS (code) != tcc_type
12589 && TREE_CODE_CLASS (code) != tcc_declaration
12590 && code != TREE_LIST)
12591 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12592 switch (TREE_CODE_CLASS (code))
12594 case tcc_constant:
12595 switch (code)
12597 case STRING_CST:
12598 md5_process_bytes (TREE_STRING_POINTER (expr),
12599 TREE_STRING_LENGTH (expr), ctx);
12600 break;
12601 case COMPLEX_CST:
12602 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12603 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12604 break;
12605 case VECTOR_CST:
12606 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12607 break;
12608 default:
12609 break;
12611 break;
12612 case tcc_exceptional:
12613 switch (code)
12615 case TREE_LIST:
12616 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12617 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12618 expr = TREE_CHAIN (expr);
12619 goto recursive_label;
12620 break;
12621 case TREE_VEC:
12622 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12623 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12624 break;
12625 default:
12626 break;
12628 break;
12629 case tcc_expression:
12630 case tcc_reference:
12631 case tcc_comparison:
12632 case tcc_unary:
12633 case tcc_binary:
12634 case tcc_statement:
12635 len = TREE_CODE_LENGTH (code);
12636 for (i = 0; i < len; ++i)
12637 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12638 break;
12639 case tcc_declaration:
12640 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12641 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12642 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12644 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12645 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12646 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12647 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12648 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12650 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12651 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12653 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12655 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12656 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12657 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12659 break;
12660 case tcc_type:
12661 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12662 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12663 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12664 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12665 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12666 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12667 if (INTEGRAL_TYPE_P (expr)
12668 || SCALAR_FLOAT_TYPE_P (expr))
12670 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12671 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12673 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12674 if (TREE_CODE (expr) == RECORD_TYPE
12675 || TREE_CODE (expr) == UNION_TYPE
12676 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12677 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12678 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12679 break;
12680 default:
12681 break;
12685 #endif
12687 /* Fold a unary tree expression with code CODE of type TYPE with an
12688 operand OP0. Return a folded expression if successful. Otherwise,
12689 return a tree expression with code CODE of type TYPE with an
12690 operand OP0. */
12692 tree
12693 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12695 tree tem;
12696 #ifdef ENABLE_FOLD_CHECKING
12697 unsigned char checksum_before[16], checksum_after[16];
12698 struct md5_ctx ctx;
12699 htab_t ht;
12701 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12702 md5_init_ctx (&ctx);
12703 fold_checksum_tree (op0, &ctx, ht);
12704 md5_finish_ctx (&ctx, checksum_before);
12705 htab_empty (ht);
12706 #endif
12708 tem = fold_unary (code, type, op0);
12709 if (!tem)
12710 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12712 #ifdef ENABLE_FOLD_CHECKING
12713 md5_init_ctx (&ctx);
12714 fold_checksum_tree (op0, &ctx, ht);
12715 md5_finish_ctx (&ctx, checksum_after);
12716 htab_delete (ht);
12718 if (memcmp (checksum_before, checksum_after, 16))
12719 fold_check_failed (op0, tem);
12720 #endif
12721 return tem;
12724 /* Fold a binary tree expression with code CODE of type TYPE with
12725 operands OP0 and OP1. Return a folded expression if successful.
12726 Otherwise, return a tree expression with code CODE of type TYPE
12727 with operands OP0 and OP1. */
12729 tree
12730 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12731 MEM_STAT_DECL)
12733 tree tem;
12734 #ifdef ENABLE_FOLD_CHECKING
12735 unsigned char checksum_before_op0[16],
12736 checksum_before_op1[16],
12737 checksum_after_op0[16],
12738 checksum_after_op1[16];
12739 struct md5_ctx ctx;
12740 htab_t ht;
12742 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12743 md5_init_ctx (&ctx);
12744 fold_checksum_tree (op0, &ctx, ht);
12745 md5_finish_ctx (&ctx, checksum_before_op0);
12746 htab_empty (ht);
12748 md5_init_ctx (&ctx);
12749 fold_checksum_tree (op1, &ctx, ht);
12750 md5_finish_ctx (&ctx, checksum_before_op1);
12751 htab_empty (ht);
12752 #endif
12754 tem = fold_binary (code, type, op0, op1);
12755 if (!tem)
12756 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12758 #ifdef ENABLE_FOLD_CHECKING
12759 md5_init_ctx (&ctx);
12760 fold_checksum_tree (op0, &ctx, ht);
12761 md5_finish_ctx (&ctx, checksum_after_op0);
12762 htab_empty (ht);
12764 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12765 fold_check_failed (op0, tem);
12767 md5_init_ctx (&ctx);
12768 fold_checksum_tree (op1, &ctx, ht);
12769 md5_finish_ctx (&ctx, checksum_after_op1);
12770 htab_delete (ht);
12772 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12773 fold_check_failed (op1, tem);
12774 #endif
12775 return tem;
12778 /* Fold a ternary tree expression with code CODE of type TYPE with
12779 operands OP0, OP1, and OP2. Return a folded expression if
12780 successful. Otherwise, return a tree expression with code CODE of
12781 type TYPE with operands OP0, OP1, and OP2. */
12783 tree
12784 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12785 MEM_STAT_DECL)
12787 tree tem;
12788 #ifdef ENABLE_FOLD_CHECKING
12789 unsigned char checksum_before_op0[16],
12790 checksum_before_op1[16],
12791 checksum_before_op2[16],
12792 checksum_after_op0[16],
12793 checksum_after_op1[16],
12794 checksum_after_op2[16];
12795 struct md5_ctx ctx;
12796 htab_t ht;
12798 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12799 md5_init_ctx (&ctx);
12800 fold_checksum_tree (op0, &ctx, ht);
12801 md5_finish_ctx (&ctx, checksum_before_op0);
12802 htab_empty (ht);
12804 md5_init_ctx (&ctx);
12805 fold_checksum_tree (op1, &ctx, ht);
12806 md5_finish_ctx (&ctx, checksum_before_op1);
12807 htab_empty (ht);
12809 md5_init_ctx (&ctx);
12810 fold_checksum_tree (op2, &ctx, ht);
12811 md5_finish_ctx (&ctx, checksum_before_op2);
12812 htab_empty (ht);
12813 #endif
12815 tem = fold_ternary (code, type, op0, op1, op2);
12816 if (!tem)
12817 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12819 #ifdef ENABLE_FOLD_CHECKING
12820 md5_init_ctx (&ctx);
12821 fold_checksum_tree (op0, &ctx, ht);
12822 md5_finish_ctx (&ctx, checksum_after_op0);
12823 htab_empty (ht);
12825 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12826 fold_check_failed (op0, tem);
12828 md5_init_ctx (&ctx);
12829 fold_checksum_tree (op1, &ctx, ht);
12830 md5_finish_ctx (&ctx, checksum_after_op1);
12831 htab_empty (ht);
12833 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12834 fold_check_failed (op1, tem);
12836 md5_init_ctx (&ctx);
12837 fold_checksum_tree (op2, &ctx, ht);
12838 md5_finish_ctx (&ctx, checksum_after_op2);
12839 htab_delete (ht);
12841 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12842 fold_check_failed (op2, tem);
12843 #endif
12844 return tem;
12847 /* Perform constant folding and related simplification of initializer
12848 expression EXPR. These behave identically to "fold_buildN" but ignore
12849 potential run-time traps and exceptions that fold must preserve. */
12851 #define START_FOLD_INIT \
12852 int saved_signaling_nans = flag_signaling_nans;\
12853 int saved_trapping_math = flag_trapping_math;\
12854 int saved_rounding_math = flag_rounding_math;\
12855 int saved_trapv = flag_trapv;\
12856 int saved_folding_initializer = folding_initializer;\
12857 flag_signaling_nans = 0;\
12858 flag_trapping_math = 0;\
12859 flag_rounding_math = 0;\
12860 flag_trapv = 0;\
12861 folding_initializer = 1;
12863 #define END_FOLD_INIT \
12864 flag_signaling_nans = saved_signaling_nans;\
12865 flag_trapping_math = saved_trapping_math;\
12866 flag_rounding_math = saved_rounding_math;\
12867 flag_trapv = saved_trapv;\
12868 folding_initializer = saved_folding_initializer;
12870 tree
12871 fold_build1_initializer (enum tree_code code, tree type, tree op)
12873 tree result;
12874 START_FOLD_INIT;
12876 result = fold_build1 (code, type, op);
12878 END_FOLD_INIT;
12879 return result;
12882 tree
12883 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12885 tree result;
12886 START_FOLD_INIT;
12888 result = fold_build2 (code, type, op0, op1);
12890 END_FOLD_INIT;
12891 return result;
12894 tree
12895 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12896 tree op2)
12898 tree result;
12899 START_FOLD_INIT;
12901 result = fold_build3 (code, type, op0, op1, op2);
12903 END_FOLD_INIT;
12904 return result;
12907 #undef START_FOLD_INIT
12908 #undef END_FOLD_INIT
12910 /* Determine if first argument is a multiple of second argument. Return 0 if
12911 it is not, or we cannot easily determined it to be.
12913 An example of the sort of thing we care about (at this point; this routine
12914 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12915 fold cases do now) is discovering that
12917 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12919 is a multiple of
12921 SAVE_EXPR (J * 8)
12923 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12925 This code also handles discovering that
12927 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12929 is a multiple of 8 so we don't have to worry about dealing with a
12930 possible remainder.
12932 Note that we *look* inside a SAVE_EXPR only to determine how it was
12933 calculated; it is not safe for fold to do much of anything else with the
12934 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12935 at run time. For example, the latter example above *cannot* be implemented
12936 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12937 evaluation time of the original SAVE_EXPR is not necessarily the same at
12938 the time the new expression is evaluated. The only optimization of this
12939 sort that would be valid is changing
12941 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12943 divided by 8 to
12945 SAVE_EXPR (I) * SAVE_EXPR (J)
12947 (where the same SAVE_EXPR (J) is used in the original and the
12948 transformed version). */
12950 static int
12951 multiple_of_p (tree type, tree top, tree bottom)
12953 if (operand_equal_p (top, bottom, 0))
12954 return 1;
12956 if (TREE_CODE (type) != INTEGER_TYPE)
12957 return 0;
12959 switch (TREE_CODE (top))
12961 case BIT_AND_EXPR:
12962 /* Bitwise and provides a power of two multiple. If the mask is
12963 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12964 if (!integer_pow2p (bottom))
12965 return 0;
12966 /* FALLTHRU */
12968 case MULT_EXPR:
12969 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12970 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12972 case PLUS_EXPR:
12973 case MINUS_EXPR:
12974 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12975 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12977 case LSHIFT_EXPR:
12978 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12980 tree op1, t1;
12982 op1 = TREE_OPERAND (top, 1);
12983 /* const_binop may not detect overflow correctly,
12984 so check for it explicitly here. */
12985 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12986 > TREE_INT_CST_LOW (op1)
12987 && TREE_INT_CST_HIGH (op1) == 0
12988 && 0 != (t1 = fold_convert (type,
12989 const_binop (LSHIFT_EXPR,
12990 size_one_node,
12991 op1, 0)))
12992 && !TREE_OVERFLOW (t1))
12993 return multiple_of_p (type, t1, bottom);
12995 return 0;
12997 case NOP_EXPR:
12998 /* Can't handle conversions from non-integral or wider integral type. */
12999 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13000 || (TYPE_PRECISION (type)
13001 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13002 return 0;
13004 /* .. fall through ... */
13006 case SAVE_EXPR:
13007 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13009 case INTEGER_CST:
13010 if (TREE_CODE (bottom) != INTEGER_CST
13011 || (TYPE_UNSIGNED (type)
13012 && (tree_int_cst_sgn (top) < 0
13013 || tree_int_cst_sgn (bottom) < 0)))
13014 return 0;
13015 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13016 top, bottom, 0));
13018 default:
13019 return 0;
13023 /* Return true if `t' is known to be non-negative. If the return
13024 value is based on the assumption that signed overflow is undefined,
13025 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13026 *STRICT_OVERFLOW_P. */
13028 bool
13029 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13031 if (t == error_mark_node)
13032 return false;
13034 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13035 return true;
13037 switch (TREE_CODE (t))
13039 case SSA_NAME:
13040 /* Query VRP to see if it has recorded any information about
13041 the range of this object. */
13042 return ssa_name_nonnegative_p (t);
13044 case ABS_EXPR:
13045 /* We can't return 1 if flag_wrapv is set because
13046 ABS_EXPR<INT_MIN> = INT_MIN. */
13047 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13048 return true;
13049 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
13051 *strict_overflow_p = true;
13052 return true;
13054 break;
13056 case INTEGER_CST:
13057 return tree_int_cst_sgn (t) >= 0;
13059 case REAL_CST:
13060 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13062 case PLUS_EXPR:
13063 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13064 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13065 strict_overflow_p)
13066 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13067 strict_overflow_p));
13069 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13070 both unsigned and at least 2 bits shorter than the result. */
13071 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13072 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13073 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13075 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13076 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13077 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13078 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13080 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13081 TYPE_PRECISION (inner2)) + 1;
13082 return prec < TYPE_PRECISION (TREE_TYPE (t));
13085 break;
13087 case MULT_EXPR:
13088 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13090 /* x * x for floating point x is always non-negative. */
13091 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
13092 return true;
13093 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13094 strict_overflow_p)
13095 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13096 strict_overflow_p));
13099 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13100 both unsigned and their total bits is shorter than the result. */
13101 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13102 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13103 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13105 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13106 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13107 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13108 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13109 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
13110 < TYPE_PRECISION (TREE_TYPE (t));
13112 return false;
13114 case BIT_AND_EXPR:
13115 case MAX_EXPR:
13116 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13117 strict_overflow_p)
13118 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13119 strict_overflow_p));
13121 case BIT_IOR_EXPR:
13122 case BIT_XOR_EXPR:
13123 case MIN_EXPR:
13124 case RDIV_EXPR:
13125 case TRUNC_DIV_EXPR:
13126 case CEIL_DIV_EXPR:
13127 case FLOOR_DIV_EXPR:
13128 case ROUND_DIV_EXPR:
13129 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13130 strict_overflow_p)
13131 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13132 strict_overflow_p));
13134 case TRUNC_MOD_EXPR:
13135 case CEIL_MOD_EXPR:
13136 case FLOOR_MOD_EXPR:
13137 case ROUND_MOD_EXPR:
13138 case SAVE_EXPR:
13139 case NON_LVALUE_EXPR:
13140 case FLOAT_EXPR:
13141 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13142 strict_overflow_p);
13144 case COMPOUND_EXPR:
13145 case MODIFY_EXPR:
13146 case GIMPLE_MODIFY_STMT:
13147 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13148 strict_overflow_p);
13150 case BIND_EXPR:
13151 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
13152 strict_overflow_p);
13154 case COND_EXPR:
13155 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13156 strict_overflow_p)
13157 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
13158 strict_overflow_p));
13160 case NOP_EXPR:
13162 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13163 tree outer_type = TREE_TYPE (t);
13165 if (TREE_CODE (outer_type) == REAL_TYPE)
13167 if (TREE_CODE (inner_type) == REAL_TYPE)
13168 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13169 strict_overflow_p);
13170 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13172 if (TYPE_UNSIGNED (inner_type))
13173 return true;
13174 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13175 strict_overflow_p);
13178 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13180 if (TREE_CODE (inner_type) == REAL_TYPE)
13181 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
13182 strict_overflow_p);
13183 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13184 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13185 && TYPE_UNSIGNED (inner_type);
13188 break;
13190 case TARGET_EXPR:
13192 tree temp = TARGET_EXPR_SLOT (t);
13193 t = TARGET_EXPR_INITIAL (t);
13195 /* If the initializer is non-void, then it's a normal expression
13196 that will be assigned to the slot. */
13197 if (!VOID_TYPE_P (t))
13198 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
13200 /* Otherwise, the initializer sets the slot in some way. One common
13201 way is an assignment statement at the end of the initializer. */
13202 while (1)
13204 if (TREE_CODE (t) == BIND_EXPR)
13205 t = expr_last (BIND_EXPR_BODY (t));
13206 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13207 || TREE_CODE (t) == TRY_CATCH_EXPR)
13208 t = expr_last (TREE_OPERAND (t, 0));
13209 else if (TREE_CODE (t) == STATEMENT_LIST)
13210 t = expr_last (t);
13211 else
13212 break;
13214 if ((TREE_CODE (t) == MODIFY_EXPR
13215 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
13216 && GENERIC_TREE_OPERAND (t, 0) == temp)
13217 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13218 strict_overflow_p);
13220 return false;
13223 case CALL_EXPR:
13225 tree fndecl = get_callee_fndecl (t);
13226 tree arglist = TREE_OPERAND (t, 1);
13227 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13228 switch (DECL_FUNCTION_CODE (fndecl))
13230 CASE_FLT_FN (BUILT_IN_ACOS):
13231 CASE_FLT_FN (BUILT_IN_ACOSH):
13232 CASE_FLT_FN (BUILT_IN_CABS):
13233 CASE_FLT_FN (BUILT_IN_COSH):
13234 CASE_FLT_FN (BUILT_IN_ERFC):
13235 CASE_FLT_FN (BUILT_IN_EXP):
13236 CASE_FLT_FN (BUILT_IN_EXP10):
13237 CASE_FLT_FN (BUILT_IN_EXP2):
13238 CASE_FLT_FN (BUILT_IN_FABS):
13239 CASE_FLT_FN (BUILT_IN_FDIM):
13240 CASE_FLT_FN (BUILT_IN_HYPOT):
13241 CASE_FLT_FN (BUILT_IN_POW10):
13242 CASE_INT_FN (BUILT_IN_FFS):
13243 CASE_INT_FN (BUILT_IN_PARITY):
13244 CASE_INT_FN (BUILT_IN_POPCOUNT):
13245 case BUILT_IN_BSWAP32:
13246 case BUILT_IN_BSWAP64:
13247 /* Always true. */
13248 return true;
13250 CASE_FLT_FN (BUILT_IN_SQRT):
13251 /* sqrt(-0.0) is -0.0. */
13252 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
13253 return true;
13254 return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
13255 strict_overflow_p);
13257 CASE_FLT_FN (BUILT_IN_ASINH):
13258 CASE_FLT_FN (BUILT_IN_ATAN):
13259 CASE_FLT_FN (BUILT_IN_ATANH):
13260 CASE_FLT_FN (BUILT_IN_CBRT):
13261 CASE_FLT_FN (BUILT_IN_CEIL):
13262 CASE_FLT_FN (BUILT_IN_ERF):
13263 CASE_FLT_FN (BUILT_IN_EXPM1):
13264 CASE_FLT_FN (BUILT_IN_FLOOR):
13265 CASE_FLT_FN (BUILT_IN_FMOD):
13266 CASE_FLT_FN (BUILT_IN_FREXP):
13267 CASE_FLT_FN (BUILT_IN_LCEIL):
13268 CASE_FLT_FN (BUILT_IN_LDEXP):
13269 CASE_FLT_FN (BUILT_IN_LFLOOR):
13270 CASE_FLT_FN (BUILT_IN_LLCEIL):
13271 CASE_FLT_FN (BUILT_IN_LLFLOOR):
13272 CASE_FLT_FN (BUILT_IN_LLRINT):
13273 CASE_FLT_FN (BUILT_IN_LLROUND):
13274 CASE_FLT_FN (BUILT_IN_LRINT):
13275 CASE_FLT_FN (BUILT_IN_LROUND):
13276 CASE_FLT_FN (BUILT_IN_MODF):
13277 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13278 CASE_FLT_FN (BUILT_IN_RINT):
13279 CASE_FLT_FN (BUILT_IN_ROUND):
13280 CASE_FLT_FN (BUILT_IN_SIGNBIT):
13281 CASE_FLT_FN (BUILT_IN_SINH):
13282 CASE_FLT_FN (BUILT_IN_TANH):
13283 CASE_FLT_FN (BUILT_IN_TRUNC):
13284 /* True if the 1st argument is nonnegative. */
13285 return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
13286 strict_overflow_p);
13288 CASE_FLT_FN (BUILT_IN_FMAX):
13289 /* True if the 1st OR 2nd arguments are nonnegative. */
13290 return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
13291 strict_overflow_p)
13292 || (tree_expr_nonnegative_warnv_p
13293 (TREE_VALUE (TREE_CHAIN (arglist)),
13294 strict_overflow_p)));
13296 CASE_FLT_FN (BUILT_IN_FMIN):
13297 /* True if the 1st AND 2nd arguments are nonnegative. */
13298 return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
13299 strict_overflow_p)
13300 && (tree_expr_nonnegative_warnv_p
13301 (TREE_VALUE (TREE_CHAIN (arglist)),
13302 strict_overflow_p)));
13304 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13305 /* True if the 2nd argument is nonnegative. */
13306 return (tree_expr_nonnegative_warnv_p
13307 (TREE_VALUE (TREE_CHAIN (arglist)),
13308 strict_overflow_p));
13310 CASE_FLT_FN (BUILT_IN_POWI):
13311 /* True if the 1st argument is nonnegative or the second
13312 argument is an even integer. */
13313 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == INTEGER_CST)
13315 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
13316 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
13317 return true;
13319 return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
13320 strict_overflow_p);
13322 CASE_FLT_FN (BUILT_IN_POW):
13323 /* True if the 1st argument is nonnegative or the second
13324 argument is an even integer valued real. */
13325 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == REAL_CST)
13327 REAL_VALUE_TYPE c;
13328 HOST_WIDE_INT n;
13330 c = TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist)));
13331 n = real_to_integer (&c);
13332 if ((n & 1) == 0)
13334 REAL_VALUE_TYPE cint;
13335 real_from_integer (&cint, VOIDmode, n,
13336 n < 0 ? -1 : 0, 0);
13337 if (real_identical (&c, &cint))
13338 return true;
13341 return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
13342 strict_overflow_p);
13344 default:
13345 break;
13349 /* ... fall through ... */
13351 default:
13352 if (truth_value_p (TREE_CODE (t)))
13353 /* Truth values evaluate to 0 or 1, which is nonnegative. */
13354 return true;
13357 /* We don't know sign of `t', so be conservative and return false. */
13358 return false;
13361 /* Return true if `t' is known to be non-negative. Handle warnings
13362 about undefined signed overflow. */
13364 bool
13365 tree_expr_nonnegative_p (tree t)
13367 bool ret, strict_overflow_p;
13369 strict_overflow_p = false;
13370 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13371 if (strict_overflow_p)
13372 fold_overflow_warning (("assuming signed overflow does not occur when "
13373 "determining that expression is always "
13374 "non-negative"),
13375 WARN_STRICT_OVERFLOW_MISC);
13376 return ret;
13379 /* Return true when T is an address and is known to be nonzero.
13380 For floating point we further ensure that T is not denormal.
13381 Similar logic is present in nonzero_address in rtlanal.h.
13383 If the return value is based on the assumption that signed overflow
13384 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13385 change *STRICT_OVERFLOW_P. */
13387 bool
13388 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13390 tree type = TREE_TYPE (t);
13391 bool sub_strict_overflow_p;
13393 /* Doing something useful for floating point would need more work. */
13394 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
13395 return false;
13397 switch (TREE_CODE (t))
13399 case SSA_NAME:
13400 /* Query VRP to see if it has recorded any information about
13401 the range of this object. */
13402 return ssa_name_nonzero_p (t);
13404 case ABS_EXPR:
13405 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13406 strict_overflow_p);
13408 case INTEGER_CST:
13409 return !integer_zerop (t);
13411 case PLUS_EXPR:
13412 if (TYPE_OVERFLOW_UNDEFINED (type))
13414 /* With the presence of negative values it is hard
13415 to say something. */
13416 sub_strict_overflow_p = false;
13417 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13418 &sub_strict_overflow_p)
13419 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13420 &sub_strict_overflow_p))
13421 return false;
13422 /* One of operands must be positive and the other non-negative. */
13423 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13424 overflows, on a twos-complement machine the sum of two
13425 nonnegative numbers can never be zero. */
13426 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13427 strict_overflow_p)
13428 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13429 strict_overflow_p));
13431 break;
13433 case MULT_EXPR:
13434 if (TYPE_OVERFLOW_UNDEFINED (type))
13436 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13437 strict_overflow_p)
13438 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13439 strict_overflow_p))
13441 *strict_overflow_p = true;
13442 return true;
13445 break;
13447 case NOP_EXPR:
13449 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13450 tree outer_type = TREE_TYPE (t);
13452 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13453 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13454 strict_overflow_p));
13456 break;
13458 case ADDR_EXPR:
13460 tree base = get_base_address (TREE_OPERAND (t, 0));
13462 if (!base)
13463 return false;
13465 /* Weak declarations may link to NULL. */
13466 if (VAR_OR_FUNCTION_DECL_P (base))
13467 return !DECL_WEAK (base);
13469 /* Constants are never weak. */
13470 if (CONSTANT_CLASS_P (base))
13471 return true;
13473 return false;
13476 case COND_EXPR:
13477 sub_strict_overflow_p = false;
13478 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13479 &sub_strict_overflow_p)
13480 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13481 &sub_strict_overflow_p))
13483 if (sub_strict_overflow_p)
13484 *strict_overflow_p = true;
13485 return true;
13487 break;
13489 case MIN_EXPR:
13490 sub_strict_overflow_p = false;
13491 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13492 &sub_strict_overflow_p)
13493 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13494 &sub_strict_overflow_p))
13496 if (sub_strict_overflow_p)
13497 *strict_overflow_p = true;
13499 break;
13501 case MAX_EXPR:
13502 sub_strict_overflow_p = false;
13503 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13504 &sub_strict_overflow_p))
13506 if (sub_strict_overflow_p)
13507 *strict_overflow_p = true;
13509 /* When both operands are nonzero, then MAX must be too. */
13510 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13511 strict_overflow_p))
13512 return true;
13514 /* MAX where operand 0 is positive is positive. */
13515 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13516 strict_overflow_p);
13518 /* MAX where operand 1 is positive is positive. */
13519 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13520 &sub_strict_overflow_p)
13521 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13522 &sub_strict_overflow_p))
13524 if (sub_strict_overflow_p)
13525 *strict_overflow_p = true;
13526 return true;
13528 break;
13530 case COMPOUND_EXPR:
13531 case MODIFY_EXPR:
13532 case GIMPLE_MODIFY_STMT:
13533 case BIND_EXPR:
13534 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13535 strict_overflow_p);
13537 case SAVE_EXPR:
13538 case NON_LVALUE_EXPR:
13539 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13540 strict_overflow_p);
13542 case BIT_IOR_EXPR:
13543 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13544 strict_overflow_p)
13545 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13546 strict_overflow_p));
13548 case CALL_EXPR:
13549 return alloca_call_p (t);
13551 default:
13552 break;
13554 return false;
13557 /* Return true when T is an address and is known to be nonzero.
13558 Handle warnings about undefined signed overflow. */
13560 bool
13561 tree_expr_nonzero_p (tree t)
13563 bool ret, strict_overflow_p;
13565 strict_overflow_p = false;
13566 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
13567 if (strict_overflow_p)
13568 fold_overflow_warning (("assuming signed overflow does not occur when "
13569 "determining that expression is always "
13570 "non-zero"),
13571 WARN_STRICT_OVERFLOW_MISC);
13572 return ret;
13575 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13576 attempt to fold the expression to a constant without modifying TYPE,
13577 OP0 or OP1.
13579 If the expression could be simplified to a constant, then return
13580 the constant. If the expression would not be simplified to a
13581 constant, then return NULL_TREE. */
13583 tree
13584 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13586 tree tem = fold_binary (code, type, op0, op1);
13587 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13590 /* Given the components of a unary expression CODE, TYPE and OP0,
13591 attempt to fold the expression to a constant without modifying
13592 TYPE or OP0.
13594 If the expression could be simplified to a constant, then return
13595 the constant. If the expression would not be simplified to a
13596 constant, then return NULL_TREE. */
13598 tree
13599 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13601 tree tem = fold_unary (code, type, op0);
13602 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13605 /* If EXP represents referencing an element in a constant string
13606 (either via pointer arithmetic or array indexing), return the
13607 tree representing the value accessed, otherwise return NULL. */
13609 tree
13610 fold_read_from_constant_string (tree exp)
13612 if ((TREE_CODE (exp) == INDIRECT_REF
13613 || TREE_CODE (exp) == ARRAY_REF)
13614 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13616 tree exp1 = TREE_OPERAND (exp, 0);
13617 tree index;
13618 tree string;
13620 if (TREE_CODE (exp) == INDIRECT_REF)
13621 string = string_constant (exp1, &index);
13622 else
13624 tree low_bound = array_ref_low_bound (exp);
13625 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
13627 /* Optimize the special-case of a zero lower bound.
13629 We convert the low_bound to sizetype to avoid some problems
13630 with constant folding. (E.g. suppose the lower bound is 1,
13631 and its mode is QI. Without the conversion,l (ARRAY
13632 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13633 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13634 if (! integer_zerop (low_bound))
13635 index = size_diffop (index, fold_convert (sizetype, low_bound));
13637 string = exp1;
13640 if (string
13641 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13642 && TREE_CODE (string) == STRING_CST
13643 && TREE_CODE (index) == INTEGER_CST
13644 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13645 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13646 == MODE_INT)
13647 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13648 return fold_convert (TREE_TYPE (exp),
13649 build_int_cst (NULL_TREE,
13650 (TREE_STRING_POINTER (string)
13651 [TREE_INT_CST_LOW (index)])));
13653 return NULL;
13656 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13657 an integer constant or real constant.
13659 TYPE is the type of the result. */
13661 static tree
13662 fold_negate_const (tree arg0, tree type)
13664 tree t = NULL_TREE;
13666 switch (TREE_CODE (arg0))
13668 case INTEGER_CST:
13670 unsigned HOST_WIDE_INT low;
13671 HOST_WIDE_INT high;
13672 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13673 TREE_INT_CST_HIGH (arg0),
13674 &low, &high);
13675 t = force_fit_type_double (type, low, high, 1,
13676 (overflow | TREE_OVERFLOW (arg0))
13677 && !TYPE_UNSIGNED (type));
13678 break;
13681 case REAL_CST:
13682 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13683 break;
13685 default:
13686 gcc_unreachable ();
13689 return t;
13692 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13693 an integer constant or real constant.
13695 TYPE is the type of the result. */
13697 tree
13698 fold_abs_const (tree arg0, tree type)
13700 tree t = NULL_TREE;
13702 switch (TREE_CODE (arg0))
13704 case INTEGER_CST:
13705 /* If the value is unsigned, then the absolute value is
13706 the same as the ordinary value. */
13707 if (TYPE_UNSIGNED (type))
13708 t = arg0;
13709 /* Similarly, if the value is non-negative. */
13710 else if (INT_CST_LT (integer_minus_one_node, arg0))
13711 t = arg0;
13712 /* If the value is negative, then the absolute value is
13713 its negation. */
13714 else
13716 unsigned HOST_WIDE_INT low;
13717 HOST_WIDE_INT high;
13718 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13719 TREE_INT_CST_HIGH (arg0),
13720 &low, &high);
13721 t = force_fit_type_double (type, low, high, -1,
13722 overflow | TREE_OVERFLOW (arg0));
13724 break;
13726 case REAL_CST:
13727 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13728 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13729 else
13730 t = arg0;
13731 break;
13733 default:
13734 gcc_unreachable ();
13737 return t;
13740 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13741 constant. TYPE is the type of the result. */
13743 static tree
13744 fold_not_const (tree arg0, tree type)
13746 tree t = NULL_TREE;
13748 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13750 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
13751 ~TREE_INT_CST_HIGH (arg0), 0,
13752 TREE_OVERFLOW (arg0));
13754 return t;
13757 /* Given CODE, a relational operator, the target type, TYPE and two
13758 constant operands OP0 and OP1, return the result of the
13759 relational operation. If the result is not a compile time
13760 constant, then return NULL_TREE. */
13762 static tree
13763 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13765 int result, invert;
13767 /* From here on, the only cases we handle are when the result is
13768 known to be a constant. */
13770 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13772 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13773 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13775 /* Handle the cases where either operand is a NaN. */
13776 if (real_isnan (c0) || real_isnan (c1))
13778 switch (code)
13780 case EQ_EXPR:
13781 case ORDERED_EXPR:
13782 result = 0;
13783 break;
13785 case NE_EXPR:
13786 case UNORDERED_EXPR:
13787 case UNLT_EXPR:
13788 case UNLE_EXPR:
13789 case UNGT_EXPR:
13790 case UNGE_EXPR:
13791 case UNEQ_EXPR:
13792 result = 1;
13793 break;
13795 case LT_EXPR:
13796 case LE_EXPR:
13797 case GT_EXPR:
13798 case GE_EXPR:
13799 case LTGT_EXPR:
13800 if (flag_trapping_math)
13801 return NULL_TREE;
13802 result = 0;
13803 break;
13805 default:
13806 gcc_unreachable ();
13809 return constant_boolean_node (result, type);
13812 return constant_boolean_node (real_compare (code, c0, c1), type);
13815 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13817 To compute GT, swap the arguments and do LT.
13818 To compute GE, do LT and invert the result.
13819 To compute LE, swap the arguments, do LT and invert the result.
13820 To compute NE, do EQ and invert the result.
13822 Therefore, the code below must handle only EQ and LT. */
13824 if (code == LE_EXPR || code == GT_EXPR)
13826 tree tem = op0;
13827 op0 = op1;
13828 op1 = tem;
13829 code = swap_tree_comparison (code);
13832 /* Note that it is safe to invert for real values here because we
13833 have already handled the one case that it matters. */
13835 invert = 0;
13836 if (code == NE_EXPR || code == GE_EXPR)
13838 invert = 1;
13839 code = invert_tree_comparison (code, false);
13842 /* Compute a result for LT or EQ if args permit;
13843 Otherwise return T. */
13844 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13846 if (code == EQ_EXPR)
13847 result = tree_int_cst_equal (op0, op1);
13848 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13849 result = INT_CST_LT_UNSIGNED (op0, op1);
13850 else
13851 result = INT_CST_LT (op0, op1);
13853 else
13854 return NULL_TREE;
13856 if (invert)
13857 result ^= 1;
13858 return constant_boolean_node (result, type);
13861 /* Build an expression for the a clean point containing EXPR with type TYPE.
13862 Don't build a cleanup point expression for EXPR which don't have side
13863 effects. */
13865 tree
13866 fold_build_cleanup_point_expr (tree type, tree expr)
13868 /* If the expression does not have side effects then we don't have to wrap
13869 it with a cleanup point expression. */
13870 if (!TREE_SIDE_EFFECTS (expr))
13871 return expr;
13873 /* If the expression is a return, check to see if the expression inside the
13874 return has no side effects or the right hand side of the modify expression
13875 inside the return. If either don't have side effects set we don't need to
13876 wrap the expression in a cleanup point expression. Note we don't check the
13877 left hand side of the modify because it should always be a return decl. */
13878 if (TREE_CODE (expr) == RETURN_EXPR)
13880 tree op = TREE_OPERAND (expr, 0);
13881 if (!op || !TREE_SIDE_EFFECTS (op))
13882 return expr;
13883 op = TREE_OPERAND (op, 1);
13884 if (!TREE_SIDE_EFFECTS (op))
13885 return expr;
13888 return build1 (CLEANUP_POINT_EXPR, type, expr);
13891 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13892 avoid confusing the gimplify process. */
13894 tree
13895 build_fold_addr_expr_with_type (tree t, tree ptrtype)
13897 /* The size of the object is not relevant when talking about its address. */
13898 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13899 t = TREE_OPERAND (t, 0);
13901 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13902 if (TREE_CODE (t) == INDIRECT_REF
13903 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13905 t = TREE_OPERAND (t, 0);
13906 if (TREE_TYPE (t) != ptrtype)
13907 t = build1 (NOP_EXPR, ptrtype, t);
13909 else
13911 tree base = t;
13913 while (handled_component_p (base))
13914 base = TREE_OPERAND (base, 0);
13915 if (DECL_P (base))
13916 TREE_ADDRESSABLE (base) = 1;
13918 t = build1 (ADDR_EXPR, ptrtype, t);
13921 return t;
13924 tree
13925 build_fold_addr_expr (tree t)
13927 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13930 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13931 of an indirection through OP0, or NULL_TREE if no simplification is
13932 possible. */
13934 tree
13935 fold_indirect_ref_1 (tree type, tree op0)
13937 tree sub = op0;
13938 tree subtype;
13940 STRIP_NOPS (sub);
13941 subtype = TREE_TYPE (sub);
13942 if (!POINTER_TYPE_P (subtype))
13943 return NULL_TREE;
13945 if (TREE_CODE (sub) == ADDR_EXPR)
13947 tree op = TREE_OPERAND (sub, 0);
13948 tree optype = TREE_TYPE (op);
13949 /* *&CONST_DECL -> to the value of the const decl. */
13950 if (TREE_CODE (op) == CONST_DECL)
13951 return DECL_INITIAL (op);
13952 /* *&p => p; make sure to handle *&"str"[cst] here. */
13953 if (type == optype)
13955 tree fop = fold_read_from_constant_string (op);
13956 if (fop)
13957 return fop;
13958 else
13959 return op;
13961 /* *(foo *)&fooarray => fooarray[0] */
13962 else if (TREE_CODE (optype) == ARRAY_TYPE
13963 && type == TREE_TYPE (optype))
13965 tree type_domain = TYPE_DOMAIN (optype);
13966 tree min_val = size_zero_node;
13967 if (type_domain && TYPE_MIN_VALUE (type_domain))
13968 min_val = TYPE_MIN_VALUE (type_domain);
13969 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13971 /* *(foo *)&complexfoo => __real__ complexfoo */
13972 else if (TREE_CODE (optype) == COMPLEX_TYPE
13973 && type == TREE_TYPE (optype))
13974 return fold_build1 (REALPART_EXPR, type, op);
13975 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13976 else if (TREE_CODE (optype) == VECTOR_TYPE
13977 && type == TREE_TYPE (optype))
13979 tree part_width = TYPE_SIZE (type);
13980 tree index = bitsize_int (0);
13981 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
13985 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13986 if (TREE_CODE (sub) == PLUS_EXPR
13987 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13989 tree op00 = TREE_OPERAND (sub, 0);
13990 tree op01 = TREE_OPERAND (sub, 1);
13991 tree op00type;
13993 STRIP_NOPS (op00);
13994 op00type = TREE_TYPE (op00);
13995 if (TREE_CODE (op00) == ADDR_EXPR
13996 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13997 && type == TREE_TYPE (TREE_TYPE (op00type)))
13999 tree size = TYPE_SIZE_UNIT (type);
14000 if (tree_int_cst_equal (size, op01))
14001 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
14005 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14006 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14007 && type == TREE_TYPE (TREE_TYPE (subtype)))
14009 tree type_domain;
14010 tree min_val = size_zero_node;
14011 sub = build_fold_indirect_ref (sub);
14012 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14013 if (type_domain && TYPE_MIN_VALUE (type_domain))
14014 min_val = TYPE_MIN_VALUE (type_domain);
14015 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
14018 return NULL_TREE;
14021 /* Builds an expression for an indirection through T, simplifying some
14022 cases. */
14024 tree
14025 build_fold_indirect_ref (tree t)
14027 tree type = TREE_TYPE (TREE_TYPE (t));
14028 tree sub = fold_indirect_ref_1 (type, t);
14030 if (sub)
14031 return sub;
14032 else
14033 return build1 (INDIRECT_REF, type, t);
14036 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14038 tree
14039 fold_indirect_ref (tree t)
14041 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
14043 if (sub)
14044 return sub;
14045 else
14046 return t;
14049 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14050 whose result is ignored. The type of the returned tree need not be
14051 the same as the original expression. */
14053 tree
14054 fold_ignored_result (tree t)
14056 if (!TREE_SIDE_EFFECTS (t))
14057 return integer_zero_node;
14059 for (;;)
14060 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14062 case tcc_unary:
14063 t = TREE_OPERAND (t, 0);
14064 break;
14066 case tcc_binary:
14067 case tcc_comparison:
14068 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14069 t = TREE_OPERAND (t, 0);
14070 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14071 t = TREE_OPERAND (t, 1);
14072 else
14073 return t;
14074 break;
14076 case tcc_expression:
14077 switch (TREE_CODE (t))
14079 case COMPOUND_EXPR:
14080 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14081 return t;
14082 t = TREE_OPERAND (t, 0);
14083 break;
14085 case COND_EXPR:
14086 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14087 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14088 return t;
14089 t = TREE_OPERAND (t, 0);
14090 break;
14092 default:
14093 return t;
14095 break;
14097 default:
14098 return t;
14102 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
14103 This can only be applied to objects of a sizetype. */
14105 tree
14106 round_up (tree value, int divisor)
14108 tree div = NULL_TREE;
14110 gcc_assert (divisor > 0);
14111 if (divisor == 1)
14112 return value;
14114 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14115 have to do anything. Only do this when we are not given a const,
14116 because in that case, this check is more expensive than just
14117 doing it. */
14118 if (TREE_CODE (value) != INTEGER_CST)
14120 div = build_int_cst (TREE_TYPE (value), divisor);
14122 if (multiple_of_p (TREE_TYPE (value), value, div))
14123 return value;
14126 /* If divisor is a power of two, simplify this to bit manipulation. */
14127 if (divisor == (divisor & -divisor))
14129 if (TREE_CODE (value) == INTEGER_CST)
14131 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
14132 unsigned HOST_WIDE_INT high;
14133 bool overflow_p;
14135 if ((low & (divisor - 1)) == 0)
14136 return value;
14138 overflow_p = TREE_OVERFLOW (value);
14139 high = TREE_INT_CST_HIGH (value);
14140 low &= ~(divisor - 1);
14141 low += divisor;
14142 if (low == 0)
14144 high++;
14145 if (high == 0)
14146 overflow_p = true;
14149 return force_fit_type_double (TREE_TYPE (value), low, high,
14150 -1, overflow_p);
14152 else
14154 tree t;
14156 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14157 value = size_binop (PLUS_EXPR, value, t);
14158 t = build_int_cst (TREE_TYPE (value), -divisor);
14159 value = size_binop (BIT_AND_EXPR, value, t);
14162 else
14164 if (!div)
14165 div = build_int_cst (TREE_TYPE (value), divisor);
14166 value = size_binop (CEIL_DIV_EXPR, value, div);
14167 value = size_binop (MULT_EXPR, value, div);
14170 return value;
14173 /* Likewise, but round down. */
14175 tree
14176 round_down (tree value, int divisor)
14178 tree div = NULL_TREE;
14180 gcc_assert (divisor > 0);
14181 if (divisor == 1)
14182 return value;
14184 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14185 have to do anything. Only do this when we are not given a const,
14186 because in that case, this check is more expensive than just
14187 doing it. */
14188 if (TREE_CODE (value) != INTEGER_CST)
14190 div = build_int_cst (TREE_TYPE (value), divisor);
14192 if (multiple_of_p (TREE_TYPE (value), value, div))
14193 return value;
14196 /* If divisor is a power of two, simplify this to bit manipulation. */
14197 if (divisor == (divisor & -divisor))
14199 tree t;
14201 t = build_int_cst (TREE_TYPE (value), -divisor);
14202 value = size_binop (BIT_AND_EXPR, value, t);
14204 else
14206 if (!div)
14207 div = build_int_cst (TREE_TYPE (value), divisor);
14208 value = size_binop (FLOOR_DIV_EXPR, value, div);
14209 value = size_binop (MULT_EXPR, value, div);
14212 return value;
14215 /* Returns the pointer to the base of the object addressed by EXP and
14216 extracts the information about the offset of the access, storing it
14217 to PBITPOS and POFFSET. */
14219 static tree
14220 split_address_to_core_and_offset (tree exp,
14221 HOST_WIDE_INT *pbitpos, tree *poffset)
14223 tree core;
14224 enum machine_mode mode;
14225 int unsignedp, volatilep;
14226 HOST_WIDE_INT bitsize;
14228 if (TREE_CODE (exp) == ADDR_EXPR)
14230 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14231 poffset, &mode, &unsignedp, &volatilep,
14232 false);
14233 core = build_fold_addr_expr (core);
14235 else
14237 core = exp;
14238 *pbitpos = 0;
14239 *poffset = NULL_TREE;
14242 return core;
14245 /* Returns true if addresses of E1 and E2 differ by a constant, false
14246 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14248 bool
14249 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14251 tree core1, core2;
14252 HOST_WIDE_INT bitpos1, bitpos2;
14253 tree toffset1, toffset2, tdiff, type;
14255 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14256 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14258 if (bitpos1 % BITS_PER_UNIT != 0
14259 || bitpos2 % BITS_PER_UNIT != 0
14260 || !operand_equal_p (core1, core2, 0))
14261 return false;
14263 if (toffset1 && toffset2)
14265 type = TREE_TYPE (toffset1);
14266 if (type != TREE_TYPE (toffset2))
14267 toffset2 = fold_convert (type, toffset2);
14269 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14270 if (!cst_and_fits_in_hwi (tdiff))
14271 return false;
14273 *diff = int_cst_value (tdiff);
14275 else if (toffset1 || toffset2)
14277 /* If only one of the offsets is non-constant, the difference cannot
14278 be a constant. */
14279 return false;
14281 else
14282 *diff = 0;
14284 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14285 return true;
14288 /* Simplify the floating point expression EXP when the sign of the
14289 result is not significant. Return NULL_TREE if no simplification
14290 is possible. */
14292 tree
14293 fold_strip_sign_ops (tree exp)
14295 tree arg0, arg1;
14297 switch (TREE_CODE (exp))
14299 case ABS_EXPR:
14300 case NEGATE_EXPR:
14301 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14302 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
14304 case MULT_EXPR:
14305 case RDIV_EXPR:
14306 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
14307 return NULL_TREE;
14308 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14309 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14310 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
14311 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
14312 arg0 ? arg0 : TREE_OPERAND (exp, 0),
14313 arg1 ? arg1 : TREE_OPERAND (exp, 1));
14314 break;
14316 case COMPOUND_EXPR:
14317 arg0 = TREE_OPERAND (exp, 0);
14318 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14319 if (arg1)
14320 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
14321 break;
14323 case COND_EXPR:
14324 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14325 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
14326 if (arg0 || arg1)
14327 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
14328 arg0 ? arg0 : TREE_OPERAND (exp, 1),
14329 arg1 ? arg1 : TREE_OPERAND (exp, 2));
14330 break;
14332 case CALL_EXPR:
14334 const enum built_in_function fcode = builtin_mathfn_code (exp);
14335 switch (fcode)
14337 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14338 /* Strip copysign function call, return the 1st argument. */
14339 arg0 = TREE_VALUE (TREE_OPERAND (exp, 1));
14340 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (exp, 1)));
14341 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
14343 default:
14344 /* Strip sign ops from the argument of "odd" math functions. */
14345 if (negate_mathfn_p (fcode))
14347 arg0 = fold_strip_sign_ops (TREE_VALUE (TREE_OPERAND (exp, 1)));
14348 if (arg0)
14349 return build_function_call_expr (get_callee_fndecl (exp),
14350 build_tree_list (NULL_TREE,
14351 arg0));
14353 break;
14356 break;
14358 default:
14359 break;
14361 return NULL_TREE;