tweak changelog
[official-gcc/constexpr.git] / gcc / fold-const.c
blob292b89f94961274532569b11c99ab4093131f246
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "toplev.h"
63 #include "intl.h"
64 #include "ggc.h"
65 #include "hashtab.h"
66 #include "langhooks.h"
67 #include "md5.h"
68 #include "gimple.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
71 otherwise. */
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
78 COMPCODE_FALSE = 0,
79 COMPCODE_LT = 1,
80 COMPCODE_EQ = 2,
81 COMPCODE_LE = 3,
82 COMPCODE_GT = 4,
83 COMPCODE_LTGT = 5,
84 COMPCODE_GE = 6,
85 COMPCODE_ORD = 7,
86 COMPCODE_UNORD = 8,
87 COMPCODE_UNLT = 9,
88 COMPCODE_UNEQ = 10,
89 COMPCODE_UNLE = 11,
90 COMPCODE_UNGT = 12,
91 COMPCODE_NE = 13,
92 COMPCODE_UNGE = 14,
93 COMPCODE_TRUE = 15
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
110 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (location_t, tree, tree,
112 HOST_WIDE_INT, HOST_WIDE_INT, int);
113 static tree optimize_bit_field_compare (location_t, enum tree_code,
114 tree, tree, tree);
115 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
116 HOST_WIDE_INT *,
117 enum machine_mode *, int *, int *,
118 tree *, tree *);
119 static int all_ones_mask_p (const_tree, int);
120 static tree sign_bit_p (tree, const_tree);
121 static int simple_operand_p (const_tree);
122 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
123 static tree range_predecessor (tree);
124 static tree range_successor (tree);
125 extern tree make_range (tree, int *, tree *, tree *, bool *);
126 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
127 tree, tree);
128 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
129 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
130 static tree unextend (tree, int, int, tree);
131 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
132 static tree optimize_minmax_comparison (location_t, enum tree_code,
133 tree, tree, tree);
134 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
135 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
136 static tree fold_binary_op_with_conditional_arg (location_t,
137 enum tree_code, tree,
138 tree, tree,
139 tree, tree, int);
140 static tree fold_mathfn_compare (location_t,
141 enum built_in_function, enum tree_code,
142 tree, tree, tree);
143 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
144 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
145 static bool reorder_operands_p (const_tree, const_tree);
146 static tree fold_negate_const (tree, tree);
147 static tree fold_not_const (tree, tree);
148 static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 static tree fold_convert_const (enum tree_code, tree, tree);
152 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
153 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
154 and SUM1. Then this yields nonzero if overflow occurred during the
155 addition.
157 Overflow occurs if A and B have the same sign, but A and SUM differ in
158 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
159 sign. */
160 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
162 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
163 We do that by representing the two-word integer in 4 words, with only
164 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
165 number. The value of the word is LOWPART + HIGHPART * BASE. */
167 #define LOWPART(x) \
168 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
169 #define HIGHPART(x) \
170 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
171 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
173 /* Unpack a two-word integer into 4 words.
174 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
175 WORDS points to the array of HOST_WIDE_INTs. */
177 static void
178 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
180 words[0] = LOWPART (low);
181 words[1] = HIGHPART (low);
182 words[2] = LOWPART (hi);
183 words[3] = HIGHPART (hi);
186 /* Pack an array of 4 words into a two-word integer.
187 WORDS points to the array of words.
188 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
190 static void
191 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
192 HOST_WIDE_INT *hi)
194 *low = words[0] + words[1] * BASE;
195 *hi = words[2] + words[3] * BASE;
198 /* Force the double-word integer L1, H1 to be within the range of the
199 integer type TYPE. Stores the properly truncated and sign-extended
200 double-word integer in *LV, *HV. Returns true if the operation
201 overflows, that is, argument and result are different. */
204 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
205 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
207 unsigned HOST_WIDE_INT low0 = l1;
208 HOST_WIDE_INT high0 = h1;
209 unsigned int prec = TYPE_PRECISION (type);
210 int sign_extended_type;
212 /* Size types *are* sign extended. */
213 sign_extended_type = (!TYPE_UNSIGNED (type)
214 || (TREE_CODE (type) == INTEGER_TYPE
215 && TYPE_IS_SIZETYPE (type)));
217 /* First clear all bits that are beyond the type's precision. */
218 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
220 else if (prec > HOST_BITS_PER_WIDE_INT)
221 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
222 else
224 h1 = 0;
225 if (prec < HOST_BITS_PER_WIDE_INT)
226 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
229 /* Then do sign extension if necessary. */
230 if (!sign_extended_type)
231 /* No sign extension */;
232 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
233 /* Correct width already. */;
234 else if (prec > HOST_BITS_PER_WIDE_INT)
236 /* Sign extend top half? */
237 if (h1 & ((unsigned HOST_WIDE_INT)1
238 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
239 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
241 else if (prec == HOST_BITS_PER_WIDE_INT)
243 if ((HOST_WIDE_INT)l1 < 0)
244 h1 = -1;
246 else
248 /* Sign extend bottom half? */
249 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
251 h1 = -1;
252 l1 |= (HOST_WIDE_INT)(-1) << prec;
256 *lv = l1;
257 *hv = h1;
259 /* If the value didn't fit, signal overflow. */
260 return l1 != low0 || h1 != high0;
263 /* We force the double-int HIGH:LOW to the range of the type TYPE by
264 sign or zero extending it.
265 OVERFLOWABLE indicates if we are interested
266 in overflow of the value, when >0 we are only interested in signed
267 overflow, for <0 we are interested in any overflow. OVERFLOWED
268 indicates whether overflow has already occurred. CONST_OVERFLOWED
269 indicates whether constant overflow has already occurred. We force
270 T's value to be within range of T's type (by setting to 0 or 1 all
271 the bits outside the type's range). We set TREE_OVERFLOWED if,
272 OVERFLOWED is nonzero,
273 or OVERFLOWABLE is >0 and signed overflow occurs
274 or OVERFLOWABLE is <0 and any overflow occurs
275 We return a new tree node for the extended double-int. The node
276 is shared if no overflow flags are set. */
278 tree
279 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
280 HOST_WIDE_INT high, int overflowable,
281 bool overflowed)
283 int sign_extended_type;
284 bool overflow;
286 /* Size types *are* sign extended. */
287 sign_extended_type = (!TYPE_UNSIGNED (type)
288 || (TREE_CODE (type) == INTEGER_TYPE
289 && TYPE_IS_SIZETYPE (type)));
291 overflow = fit_double_type (low, high, &low, &high, type);
293 /* If we need to set overflow flags, return a new unshared node. */
294 if (overflowed || overflow)
296 if (overflowed
297 || overflowable < 0
298 || (overflowable > 0 && sign_extended_type))
300 tree t = make_node (INTEGER_CST);
301 TREE_INT_CST_LOW (t) = low;
302 TREE_INT_CST_HIGH (t) = high;
303 TREE_TYPE (t) = type;
304 TREE_OVERFLOW (t) = 1;
305 return t;
309 /* Else build a shared node. */
310 return build_int_cst_wide (type, low, high);
313 /* Add two doubleword integers with doubleword result.
314 Return nonzero if the operation overflows according to UNSIGNED_P.
315 Each argument is given as two `HOST_WIDE_INT' pieces.
316 One argument is L1 and H1; the other, L2 and H2.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
322 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
323 bool unsigned_p)
325 unsigned HOST_WIDE_INT l;
326 HOST_WIDE_INT h;
328 l = l1 + l2;
329 h = (HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) h1
330 + (unsigned HOST_WIDE_INT) h2
331 + (l < l1));
333 *lv = l;
334 *hv = h;
336 if (unsigned_p)
337 return ((unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1
338 || (h == h1
339 && l < l1));
340 else
341 return OVERFLOW_SUM_SIGN (h1, h2, h);
344 /* Negate a doubleword integer with doubleword result.
345 Return nonzero if the operation overflows, assuming it's signed.
346 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
353 if (l1 == 0)
355 *lv = 0;
356 *hv = - h1;
357 return (*hv & h1) < 0;
359 else
361 *lv = -l1;
362 *hv = ~h1;
363 return 0;
367 /* Multiply two doubleword integers with doubleword result.
368 Return nonzero if the operation overflows according to UNSIGNED_P.
369 Each argument is given as two `HOST_WIDE_INT' pieces.
370 One argument is L1 and H1; the other, L2 and H2.
371 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
374 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
375 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
376 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
377 bool unsigned_p)
379 HOST_WIDE_INT arg1[4];
380 HOST_WIDE_INT arg2[4];
381 HOST_WIDE_INT prod[4 * 2];
382 unsigned HOST_WIDE_INT carry;
383 int i, j, k;
384 unsigned HOST_WIDE_INT toplow, neglow;
385 HOST_WIDE_INT tophigh, neghigh;
387 encode (arg1, l1, h1);
388 encode (arg2, l2, h2);
390 memset (prod, 0, sizeof prod);
392 for (i = 0; i < 4; i++)
394 carry = 0;
395 for (j = 0; j < 4; j++)
397 k = i + j;
398 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
399 carry += arg1[i] * arg2[j];
400 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
401 carry += prod[k];
402 prod[k] = LOWPART (carry);
403 carry = HIGHPART (carry);
405 prod[i + 4] = carry;
408 decode (prod, lv, hv);
409 decode (prod + 4, &toplow, &tophigh);
411 /* Unsigned overflow is immediate. */
412 if (unsigned_p)
413 return (toplow | tophigh) != 0;
415 /* Check for signed overflow by calculating the signed representation of the
416 top half of the result; it should agree with the low half's sign bit. */
417 if (h1 < 0)
419 neg_double (l2, h2, &neglow, &neghigh);
420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
422 if (h2 < 0)
424 neg_double (l1, h1, &neglow, &neghigh);
425 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
427 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
430 /* Shift the doubleword integer in L1, H1 left by COUNT places
431 keeping only PREC bits of result.
432 Shift right if COUNT is negative.
433 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
434 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
436 void
437 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
438 HOST_WIDE_INT count, unsigned int prec,
439 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
441 unsigned HOST_WIDE_INT signmask;
443 if (count < 0)
445 rshift_double (l1, h1, -count, prec, lv, hv, arith);
446 return;
449 if (SHIFT_COUNT_TRUNCATED)
450 count %= prec;
452 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
454 /* Shifting by the host word size is undefined according to the
455 ANSI standard, so we must handle this as a special case. */
456 *hv = 0;
457 *lv = 0;
459 else if (count >= HOST_BITS_PER_WIDE_INT)
461 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
462 *lv = 0;
464 else
466 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
467 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
468 *lv = l1 << count;
471 /* Sign extend all bits that are beyond the precision. */
473 signmask = -((prec > HOST_BITS_PER_WIDE_INT
474 ? ((unsigned HOST_WIDE_INT) *hv
475 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
476 : (*lv >> (prec - 1))) & 1);
478 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
480 else if (prec >= HOST_BITS_PER_WIDE_INT)
482 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
483 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
485 else
487 *hv = signmask;
488 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
489 *lv |= signmask << prec;
493 /* Shift the doubleword integer in L1, H1 right by COUNT places
494 keeping only PREC bits of result. COUNT must be positive.
495 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
496 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
498 void
499 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
500 HOST_WIDE_INT count, unsigned int prec,
501 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
502 int arith)
504 unsigned HOST_WIDE_INT signmask;
506 signmask = (arith
507 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
508 : 0);
510 if (SHIFT_COUNT_TRUNCATED)
511 count %= prec;
513 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
515 /* Shifting by the host word size is undefined according to the
516 ANSI standard, so we must handle this as a special case. */
517 *hv = 0;
518 *lv = 0;
520 else if (count >= HOST_BITS_PER_WIDE_INT)
522 *hv = 0;
523 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
525 else
527 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
528 *lv = ((l1 >> count)
529 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
532 /* Zero / sign extend all bits that are beyond the precision. */
534 if (count >= (HOST_WIDE_INT)prec)
536 *hv = signmask;
537 *lv = signmask;
539 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
541 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
543 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
544 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
546 else
548 *hv = signmask;
549 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
550 *lv |= signmask << (prec - count);
554 /* Rotate the doubleword integer in L1, H1 left by COUNT places
555 keeping only PREC bits of result.
556 Rotate right if COUNT is negative.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
559 void
560 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561 HOST_WIDE_INT count, unsigned int prec,
562 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
564 unsigned HOST_WIDE_INT s1l, s2l;
565 HOST_WIDE_INT s1h, s2h;
567 count %= prec;
568 if (count < 0)
569 count += prec;
571 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
573 *lv = s1l | s2l;
574 *hv = s1h | s2h;
577 /* Rotate the doubleword integer in L1, H1 left by COUNT places
578 keeping only PREC bits of result. COUNT must be positive.
579 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
581 void
582 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
583 HOST_WIDE_INT count, unsigned int prec,
584 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
586 unsigned HOST_WIDE_INT s1l, s2l;
587 HOST_WIDE_INT s1h, s2h;
589 count %= prec;
590 if (count < 0)
591 count += prec;
593 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
594 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
595 *lv = s1l | s2l;
596 *hv = s1h | s2h;
599 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
600 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
601 CODE is a tree code for a kind of division, one of
602 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
603 or EXACT_DIV_EXPR
604 It controls how the quotient is rounded to an integer.
605 Return nonzero if the operation overflows.
606 UNS nonzero says do unsigned division. */
609 div_and_round_double (enum tree_code code, int uns,
610 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
611 HOST_WIDE_INT hnum_orig,
612 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
613 HOST_WIDE_INT hden_orig,
614 unsigned HOST_WIDE_INT *lquo,
615 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
616 HOST_WIDE_INT *hrem)
618 int quo_neg = 0;
619 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
620 HOST_WIDE_INT den[4], quo[4];
621 int i, j;
622 unsigned HOST_WIDE_INT work;
623 unsigned HOST_WIDE_INT carry = 0;
624 unsigned HOST_WIDE_INT lnum = lnum_orig;
625 HOST_WIDE_INT hnum = hnum_orig;
626 unsigned HOST_WIDE_INT lden = lden_orig;
627 HOST_WIDE_INT hden = hden_orig;
628 int overflow = 0;
630 if (hden == 0 && lden == 0)
631 overflow = 1, lden = 1;
633 /* Calculate quotient sign and convert operands to unsigned. */
634 if (!uns)
636 if (hnum < 0)
638 quo_neg = ~ quo_neg;
639 /* (minimum integer) / (-1) is the only overflow case. */
640 if (neg_double (lnum, hnum, &lnum, &hnum)
641 && ((HOST_WIDE_INT) lden & hden) == -1)
642 overflow = 1;
644 if (hden < 0)
646 quo_neg = ~ quo_neg;
647 neg_double (lden, hden, &lden, &hden);
651 if (hnum == 0 && hden == 0)
652 { /* single precision */
653 *hquo = *hrem = 0;
654 /* This unsigned division rounds toward zero. */
655 *lquo = lnum / lden;
656 goto finish_up;
659 if (hnum == 0)
660 { /* trivial case: dividend < divisor */
661 /* hden != 0 already checked. */
662 *hquo = *lquo = 0;
663 *hrem = hnum;
664 *lrem = lnum;
665 goto finish_up;
668 memset (quo, 0, sizeof quo);
670 memset (num, 0, sizeof num); /* to zero 9th element */
671 memset (den, 0, sizeof den);
673 encode (num, lnum, hnum);
674 encode (den, lden, hden);
676 /* Special code for when the divisor < BASE. */
677 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
679 /* hnum != 0 already checked. */
680 for (i = 4 - 1; i >= 0; i--)
682 work = num[i] + carry * BASE;
683 quo[i] = work / lden;
684 carry = work % lden;
687 else
689 /* Full double precision division,
690 with thanks to Don Knuth's "Seminumerical Algorithms". */
691 int num_hi_sig, den_hi_sig;
692 unsigned HOST_WIDE_INT quo_est, scale;
694 /* Find the highest nonzero divisor digit. */
695 for (i = 4 - 1;; i--)
696 if (den[i] != 0)
698 den_hi_sig = i;
699 break;
702 /* Insure that the first digit of the divisor is at least BASE/2.
703 This is required by the quotient digit estimation algorithm. */
705 scale = BASE / (den[den_hi_sig] + 1);
706 if (scale > 1)
707 { /* scale divisor and dividend */
708 carry = 0;
709 for (i = 0; i <= 4 - 1; i++)
711 work = (num[i] * scale) + carry;
712 num[i] = LOWPART (work);
713 carry = HIGHPART (work);
716 num[4] = carry;
717 carry = 0;
718 for (i = 0; i <= 4 - 1; i++)
720 work = (den[i] * scale) + carry;
721 den[i] = LOWPART (work);
722 carry = HIGHPART (work);
723 if (den[i] != 0) den_hi_sig = i;
727 num_hi_sig = 4;
729 /* Main loop */
730 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
732 /* Guess the next quotient digit, quo_est, by dividing the first
733 two remaining dividend digits by the high order quotient digit.
734 quo_est is never low and is at most 2 high. */
735 unsigned HOST_WIDE_INT tmp;
737 num_hi_sig = i + den_hi_sig + 1;
738 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
739 if (num[num_hi_sig] != den[den_hi_sig])
740 quo_est = work / den[den_hi_sig];
741 else
742 quo_est = BASE - 1;
744 /* Refine quo_est so it's usually correct, and at most one high. */
745 tmp = work - quo_est * den[den_hi_sig];
746 if (tmp < BASE
747 && (den[den_hi_sig - 1] * quo_est
748 > (tmp * BASE + num[num_hi_sig - 2])))
749 quo_est--;
751 /* Try QUO_EST as the quotient digit, by multiplying the
752 divisor by QUO_EST and subtracting from the remaining dividend.
753 Keep in mind that QUO_EST is the I - 1st digit. */
755 carry = 0;
756 for (j = 0; j <= den_hi_sig; j++)
758 work = quo_est * den[j] + carry;
759 carry = HIGHPART (work);
760 work = num[i + j] - LOWPART (work);
761 num[i + j] = LOWPART (work);
762 carry += HIGHPART (work) != 0;
765 /* If quo_est was high by one, then num[i] went negative and
766 we need to correct things. */
767 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
769 quo_est--;
770 carry = 0; /* add divisor back in */
771 for (j = 0; j <= den_hi_sig; j++)
773 work = num[i + j] + den[j] + carry;
774 carry = HIGHPART (work);
775 num[i + j] = LOWPART (work);
778 num [num_hi_sig] += carry;
781 /* Store the quotient digit. */
782 quo[i] = quo_est;
786 decode (quo, lquo, hquo);
788 finish_up:
789 /* If result is negative, make it so. */
790 if (quo_neg)
791 neg_double (*lquo, *hquo, lquo, hquo);
793 /* Compute trial remainder: rem = num - (quo * den) */
794 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
795 neg_double (*lrem, *hrem, lrem, hrem);
796 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
798 switch (code)
800 case TRUNC_DIV_EXPR:
801 case TRUNC_MOD_EXPR: /* round toward zero */
802 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
803 return overflow;
805 case FLOOR_DIV_EXPR:
806 case FLOOR_MOD_EXPR: /* round toward negative infinity */
807 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
809 /* quo = quo - 1; */
810 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
811 lquo, hquo);
813 else
814 return overflow;
815 break;
817 case CEIL_DIV_EXPR:
818 case CEIL_MOD_EXPR: /* round toward positive infinity */
819 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
821 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
822 lquo, hquo);
824 else
825 return overflow;
826 break;
828 case ROUND_DIV_EXPR:
829 case ROUND_MOD_EXPR: /* round to closest integer */
831 unsigned HOST_WIDE_INT labs_rem = *lrem;
832 HOST_WIDE_INT habs_rem = *hrem;
833 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
834 HOST_WIDE_INT habs_den = hden, htwice;
836 /* Get absolute values. */
837 if (*hrem < 0)
838 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
839 if (hden < 0)
840 neg_double (lden, hden, &labs_den, &habs_den);
842 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
843 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
844 labs_rem, habs_rem, &ltwice, &htwice);
846 if (((unsigned HOST_WIDE_INT) habs_den
847 < (unsigned HOST_WIDE_INT) htwice)
848 || (((unsigned HOST_WIDE_INT) habs_den
849 == (unsigned HOST_WIDE_INT) htwice)
850 && (labs_den <= ltwice)))
852 if (*hquo < 0)
853 /* quo = quo - 1; */
854 add_double (*lquo, *hquo,
855 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
856 else
857 /* quo = quo + 1; */
858 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
859 lquo, hquo);
861 else
862 return overflow;
864 break;
866 default:
867 gcc_unreachable ();
870 /* Compute true remainder: rem = num - (quo * den) */
871 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
872 neg_double (*lrem, *hrem, lrem, hrem);
873 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
874 return overflow;
877 /* If ARG2 divides ARG1 with zero remainder, carries out the division
878 of type CODE and returns the quotient.
879 Otherwise returns NULL_TREE. */
881 tree
882 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
884 unsigned HOST_WIDE_INT int1l, int2l;
885 HOST_WIDE_INT int1h, int2h;
886 unsigned HOST_WIDE_INT quol, reml;
887 HOST_WIDE_INT quoh, remh;
888 int uns;
890 /* The sign of the division is according to operand two, that
891 does the correct thing for POINTER_PLUS_EXPR where we want
892 a signed division. */
893 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
894 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
895 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
896 uns = false;
898 int1l = TREE_INT_CST_LOW (arg1);
899 int1h = TREE_INT_CST_HIGH (arg1);
900 int2l = TREE_INT_CST_LOW (arg2);
901 int2h = TREE_INT_CST_HIGH (arg2);
903 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
904 &quol, &quoh, &reml, &remh);
905 if (remh != 0 || reml != 0)
906 return NULL_TREE;
908 return build_int_cst_wide (TREE_TYPE (arg1), quol, quoh);
911 /* This is nonzero if we should defer warnings about undefined
912 overflow. This facility exists because these warnings are a
913 special case. The code to estimate loop iterations does not want
914 to issue any warnings, since it works with expressions which do not
915 occur in user code. Various bits of cleanup code call fold(), but
916 only use the result if it has certain characteristics (e.g., is a
917 constant); that code only wants to issue a warning if the result is
918 used. */
920 static int fold_deferring_overflow_warnings;
922 /* If a warning about undefined overflow is deferred, this is the
923 warning. Note that this may cause us to turn two warnings into
924 one, but that is fine since it is sufficient to only give one
925 warning per expression. */
927 static const char* fold_deferred_overflow_warning;
929 /* If a warning about undefined overflow is deferred, this is the
930 level at which the warning should be emitted. */
932 static enum warn_strict_overflow_code fold_deferred_overflow_code;
934 /* Start deferring overflow warnings. We could use a stack here to
935 permit nested calls, but at present it is not necessary. */
937 void
938 fold_defer_overflow_warnings (void)
940 ++fold_deferring_overflow_warnings;
943 /* Stop deferring overflow warnings. If there is a pending warning,
944 and ISSUE is true, then issue the warning if appropriate. STMT is
945 the statement with which the warning should be associated (used for
946 location information); STMT may be NULL. CODE is the level of the
947 warning--a warn_strict_overflow_code value. This function will use
948 the smaller of CODE and the deferred code when deciding whether to
949 issue the warning. CODE may be zero to mean to always use the
950 deferred code. */
952 void
953 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
955 const char *warnmsg;
956 location_t locus;
958 gcc_assert (fold_deferring_overflow_warnings > 0);
959 --fold_deferring_overflow_warnings;
960 if (fold_deferring_overflow_warnings > 0)
962 if (fold_deferred_overflow_warning != NULL
963 && code != 0
964 && code < (int) fold_deferred_overflow_code)
965 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
966 return;
969 warnmsg = fold_deferred_overflow_warning;
970 fold_deferred_overflow_warning = NULL;
972 if (!issue || warnmsg == NULL)
973 return;
975 if (gimple_no_warning_p (stmt))
976 return;
978 /* Use the smallest code level when deciding to issue the
979 warning. */
980 if (code == 0 || code > (int) fold_deferred_overflow_code)
981 code = fold_deferred_overflow_code;
983 if (!issue_strict_overflow_warning (code))
984 return;
986 if (stmt == NULL)
987 locus = input_location;
988 else
989 locus = gimple_location (stmt);
990 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
993 /* Stop deferring overflow warnings, ignoring any deferred
994 warnings. */
996 void
997 fold_undefer_and_ignore_overflow_warnings (void)
999 fold_undefer_overflow_warnings (false, NULL, 0);
1002 /* Whether we are deferring overflow warnings. */
1004 bool
1005 fold_deferring_overflow_warnings_p (void)
1007 return fold_deferring_overflow_warnings > 0;
1010 /* This is called when we fold something based on the fact that signed
1011 overflow is undefined. */
1013 static void
1014 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1016 if (fold_deferring_overflow_warnings > 0)
1018 if (fold_deferred_overflow_warning == NULL
1019 || wc < fold_deferred_overflow_code)
1021 fold_deferred_overflow_warning = gmsgid;
1022 fold_deferred_overflow_code = wc;
1025 else if (issue_strict_overflow_warning (wc))
1026 warning (OPT_Wstrict_overflow, gmsgid);
1029 /* Return true if the built-in mathematical function specified by CODE
1030 is odd, i.e. -f(x) == f(-x). */
1032 static bool
1033 negate_mathfn_p (enum built_in_function code)
1035 switch (code)
1037 CASE_FLT_FN (BUILT_IN_ASIN):
1038 CASE_FLT_FN (BUILT_IN_ASINH):
1039 CASE_FLT_FN (BUILT_IN_ATAN):
1040 CASE_FLT_FN (BUILT_IN_ATANH):
1041 CASE_FLT_FN (BUILT_IN_CASIN):
1042 CASE_FLT_FN (BUILT_IN_CASINH):
1043 CASE_FLT_FN (BUILT_IN_CATAN):
1044 CASE_FLT_FN (BUILT_IN_CATANH):
1045 CASE_FLT_FN (BUILT_IN_CBRT):
1046 CASE_FLT_FN (BUILT_IN_CPROJ):
1047 CASE_FLT_FN (BUILT_IN_CSIN):
1048 CASE_FLT_FN (BUILT_IN_CSINH):
1049 CASE_FLT_FN (BUILT_IN_CTAN):
1050 CASE_FLT_FN (BUILT_IN_CTANH):
1051 CASE_FLT_FN (BUILT_IN_ERF):
1052 CASE_FLT_FN (BUILT_IN_LLROUND):
1053 CASE_FLT_FN (BUILT_IN_LROUND):
1054 CASE_FLT_FN (BUILT_IN_ROUND):
1055 CASE_FLT_FN (BUILT_IN_SIN):
1056 CASE_FLT_FN (BUILT_IN_SINH):
1057 CASE_FLT_FN (BUILT_IN_TAN):
1058 CASE_FLT_FN (BUILT_IN_TANH):
1059 CASE_FLT_FN (BUILT_IN_TRUNC):
1060 return true;
1062 CASE_FLT_FN (BUILT_IN_LLRINT):
1063 CASE_FLT_FN (BUILT_IN_LRINT):
1064 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1065 CASE_FLT_FN (BUILT_IN_RINT):
1066 return !flag_rounding_math;
1068 default:
1069 break;
1071 return false;
1074 /* Check whether we may negate an integer constant T without causing
1075 overflow. */
1077 bool
1078 may_negate_without_overflow_p (const_tree t)
1080 unsigned HOST_WIDE_INT val;
1081 unsigned int prec;
1082 tree type;
1084 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1086 type = TREE_TYPE (t);
1087 if (TYPE_UNSIGNED (type))
1088 return false;
1090 prec = TYPE_PRECISION (type);
1091 if (prec > HOST_BITS_PER_WIDE_INT)
1093 if (TREE_INT_CST_LOW (t) != 0)
1094 return true;
1095 prec -= HOST_BITS_PER_WIDE_INT;
1096 val = TREE_INT_CST_HIGH (t);
1098 else
1099 val = TREE_INT_CST_LOW (t);
1100 if (prec < HOST_BITS_PER_WIDE_INT)
1101 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1102 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1105 /* Determine whether an expression T can be cheaply negated using
1106 the function negate_expr without introducing undefined overflow. */
1108 static bool
1109 negate_expr_p (tree t)
1111 tree type;
1113 if (t == 0)
1114 return false;
1116 type = TREE_TYPE (t);
1118 STRIP_SIGN_NOPS (t);
1119 switch (TREE_CODE (t))
1121 case INTEGER_CST:
1122 if (TYPE_OVERFLOW_WRAPS (type))
1123 return true;
1125 /* Check that -CST will not overflow type. */
1126 return may_negate_without_overflow_p (t);
1127 case BIT_NOT_EXPR:
1128 return (INTEGRAL_TYPE_P (type)
1129 && TYPE_OVERFLOW_WRAPS (type));
1131 case FIXED_CST:
1132 case NEGATE_EXPR:
1133 return true;
1135 case REAL_CST:
1136 /* We want to canonicalize to positive real constants. Pretend
1137 that only negative ones can be easily negated. */
1138 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
1140 case COMPLEX_CST:
1141 return negate_expr_p (TREE_REALPART (t))
1142 && negate_expr_p (TREE_IMAGPART (t));
1144 case COMPLEX_EXPR:
1145 return negate_expr_p (TREE_OPERAND (t, 0))
1146 && negate_expr_p (TREE_OPERAND (t, 1));
1148 case CONJ_EXPR:
1149 return negate_expr_p (TREE_OPERAND (t, 0));
1151 case PLUS_EXPR:
1152 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1153 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1154 return false;
1155 /* -(A + B) -> (-B) - A. */
1156 if (negate_expr_p (TREE_OPERAND (t, 1))
1157 && reorder_operands_p (TREE_OPERAND (t, 0),
1158 TREE_OPERAND (t, 1)))
1159 return true;
1160 /* -(A + B) -> (-A) - B. */
1161 return negate_expr_p (TREE_OPERAND (t, 0));
1163 case MINUS_EXPR:
1164 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1165 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1166 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1167 && reorder_operands_p (TREE_OPERAND (t, 0),
1168 TREE_OPERAND (t, 1));
1170 case MULT_EXPR:
1171 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1172 break;
1174 /* Fall through. */
1176 case RDIV_EXPR:
1177 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1178 return negate_expr_p (TREE_OPERAND (t, 1))
1179 || negate_expr_p (TREE_OPERAND (t, 0));
1180 break;
1182 case TRUNC_DIV_EXPR:
1183 case ROUND_DIV_EXPR:
1184 case FLOOR_DIV_EXPR:
1185 case CEIL_DIV_EXPR:
1186 case EXACT_DIV_EXPR:
1187 /* In general we can't negate A / B, because if A is INT_MIN and
1188 B is 1, we may turn this into INT_MIN / -1 which is undefined
1189 and actually traps on some architectures. But if overflow is
1190 undefined, we can negate, because - (INT_MIN / 1) is an
1191 overflow. */
1192 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1193 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1194 break;
1195 return negate_expr_p (TREE_OPERAND (t, 1))
1196 || negate_expr_p (TREE_OPERAND (t, 0));
1198 case NOP_EXPR:
1199 /* Negate -((double)float) as (double)(-float). */
1200 if (TREE_CODE (type) == REAL_TYPE)
1202 tree tem = strip_float_extensions (t);
1203 if (tem != t)
1204 return negate_expr_p (tem);
1206 break;
1208 case CALL_EXPR:
1209 /* Negate -f(x) as f(-x). */
1210 if (negate_mathfn_p (builtin_mathfn_code (t)))
1211 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1212 break;
1214 case RSHIFT_EXPR:
1215 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1216 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1218 tree op1 = TREE_OPERAND (t, 1);
1219 if (TREE_INT_CST_HIGH (op1) == 0
1220 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1221 == TREE_INT_CST_LOW (op1))
1222 return true;
1224 break;
1226 default:
1227 break;
1229 return false;
1232 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1233 simplification is possible.
1234 If negate_expr_p would return true for T, NULL_TREE will never be
1235 returned. */
1237 static tree
1238 fold_negate_expr (location_t loc, tree t)
1240 tree type = TREE_TYPE (t);
1241 tree tem;
1243 switch (TREE_CODE (t))
1245 /* Convert - (~A) to A + 1. */
1246 case BIT_NOT_EXPR:
1247 if (INTEGRAL_TYPE_P (type))
1248 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
1249 build_int_cst (type, 1));
1250 break;
1252 case INTEGER_CST:
1253 tem = fold_negate_const (t, type);
1254 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1255 || !TYPE_OVERFLOW_TRAPS (type))
1256 return tem;
1257 break;
1259 case REAL_CST:
1260 tem = fold_negate_const (t, type);
1261 /* Two's complement FP formats, such as c4x, may overflow. */
1262 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1263 return tem;
1264 break;
1266 case FIXED_CST:
1267 tem = fold_negate_const (t, type);
1268 return tem;
1270 case COMPLEX_CST:
1272 tree rpart = negate_expr (TREE_REALPART (t));
1273 tree ipart = negate_expr (TREE_IMAGPART (t));
1275 if ((TREE_CODE (rpart) == REAL_CST
1276 && TREE_CODE (ipart) == REAL_CST)
1277 || (TREE_CODE (rpart) == INTEGER_CST
1278 && TREE_CODE (ipart) == INTEGER_CST))
1279 return build_complex (type, rpart, ipart);
1281 break;
1283 case COMPLEX_EXPR:
1284 if (negate_expr_p (t))
1285 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1286 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
1287 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
1288 break;
1290 case CONJ_EXPR:
1291 if (negate_expr_p (t))
1292 return fold_build1_loc (loc, CONJ_EXPR, type,
1293 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
1294 break;
1296 case NEGATE_EXPR:
1297 return TREE_OPERAND (t, 0);
1299 case PLUS_EXPR:
1300 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1301 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1303 /* -(A + B) -> (-B) - A. */
1304 if (negate_expr_p (TREE_OPERAND (t, 1))
1305 && reorder_operands_p (TREE_OPERAND (t, 0),
1306 TREE_OPERAND (t, 1)))
1308 tem = negate_expr (TREE_OPERAND (t, 1));
1309 return fold_build2_loc (loc, MINUS_EXPR, type,
1310 tem, TREE_OPERAND (t, 0));
1313 /* -(A + B) -> (-A) - B. */
1314 if (negate_expr_p (TREE_OPERAND (t, 0)))
1316 tem = negate_expr (TREE_OPERAND (t, 0));
1317 return fold_build2_loc (loc, MINUS_EXPR, type,
1318 tem, TREE_OPERAND (t, 1));
1321 break;
1323 case MINUS_EXPR:
1324 /* - (A - B) -> B - A */
1325 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1326 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1327 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1328 return fold_build2_loc (loc, MINUS_EXPR, type,
1329 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1330 break;
1332 case MULT_EXPR:
1333 if (TYPE_UNSIGNED (type))
1334 break;
1336 /* Fall through. */
1338 case RDIV_EXPR:
1339 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1341 tem = TREE_OPERAND (t, 1);
1342 if (negate_expr_p (tem))
1343 return fold_build2_loc (loc, TREE_CODE (t), type,
1344 TREE_OPERAND (t, 0), negate_expr (tem));
1345 tem = TREE_OPERAND (t, 0);
1346 if (negate_expr_p (tem))
1347 return fold_build2_loc (loc, TREE_CODE (t), type,
1348 negate_expr (tem), TREE_OPERAND (t, 1));
1350 break;
1352 case TRUNC_DIV_EXPR:
1353 case ROUND_DIV_EXPR:
1354 case FLOOR_DIV_EXPR:
1355 case CEIL_DIV_EXPR:
1356 case EXACT_DIV_EXPR:
1357 /* In general we can't negate A / B, because if A is INT_MIN and
1358 B is 1, we may turn this into INT_MIN / -1 which is undefined
1359 and actually traps on some architectures. But if overflow is
1360 undefined, we can negate, because - (INT_MIN / 1) is an
1361 overflow. */
1362 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1364 const char * const warnmsg = G_("assuming signed overflow does not "
1365 "occur when negating a division");
1366 tem = TREE_OPERAND (t, 1);
1367 if (negate_expr_p (tem))
1369 if (INTEGRAL_TYPE_P (type)
1370 && (TREE_CODE (tem) != INTEGER_CST
1371 || integer_onep (tem)))
1372 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1373 return fold_build2_loc (loc, TREE_CODE (t), type,
1374 TREE_OPERAND (t, 0), negate_expr (tem));
1376 tem = TREE_OPERAND (t, 0);
1377 if (negate_expr_p (tem))
1379 if (INTEGRAL_TYPE_P (type)
1380 && (TREE_CODE (tem) != INTEGER_CST
1381 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1382 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1383 return fold_build2_loc (loc, TREE_CODE (t), type,
1384 negate_expr (tem), TREE_OPERAND (t, 1));
1387 break;
1389 case NOP_EXPR:
1390 /* Convert -((double)float) into (double)(-float). */
1391 if (TREE_CODE (type) == REAL_TYPE)
1393 tem = strip_float_extensions (t);
1394 if (tem != t && negate_expr_p (tem))
1395 return fold_convert_loc (loc, type, negate_expr (tem));
1397 break;
1399 case CALL_EXPR:
1400 /* Negate -f(x) as f(-x). */
1401 if (negate_mathfn_p (builtin_mathfn_code (t))
1402 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1404 tree fndecl, arg;
1406 fndecl = get_callee_fndecl (t);
1407 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1408 return build_call_expr_loc (loc, fndecl, 1, arg);
1410 break;
1412 case RSHIFT_EXPR:
1413 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1414 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1416 tree op1 = TREE_OPERAND (t, 1);
1417 if (TREE_INT_CST_HIGH (op1) == 0
1418 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1419 == TREE_INT_CST_LOW (op1))
1421 tree ntype = TYPE_UNSIGNED (type)
1422 ? signed_type_for (type)
1423 : unsigned_type_for (type);
1424 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
1425 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
1426 return fold_convert_loc (loc, type, temp);
1429 break;
1431 default:
1432 break;
1435 return NULL_TREE;
1438 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1439 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1440 return NULL_TREE. */
1442 static tree
1443 negate_expr (tree t)
1445 tree type, tem;
1446 location_t loc;
1448 if (t == NULL_TREE)
1449 return NULL_TREE;
1451 loc = EXPR_LOCATION (t);
1452 type = TREE_TYPE (t);
1453 STRIP_SIGN_NOPS (t);
1455 tem = fold_negate_expr (loc, t);
1456 if (!tem)
1458 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1459 SET_EXPR_LOCATION (tem, loc);
1461 return fold_convert_loc (loc, type, tem);
1464 /* Split a tree IN into a constant, literal and variable parts that could be
1465 combined with CODE to make IN. "constant" means an expression with
1466 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1467 commutative arithmetic operation. Store the constant part into *CONP,
1468 the literal in *LITP and return the variable part. If a part isn't
1469 present, set it to null. If the tree does not decompose in this way,
1470 return the entire tree as the variable part and the other parts as null.
1472 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1473 case, we negate an operand that was subtracted. Except if it is a
1474 literal for which we use *MINUS_LITP instead.
1476 If NEGATE_P is true, we are negating all of IN, again except a literal
1477 for which we use *MINUS_LITP instead.
1479 If IN is itself a literal or constant, return it as appropriate.
1481 Note that we do not guarantee that any of the three values will be the
1482 same type as IN, but they will have the same signedness and mode. */
1484 static tree
1485 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1486 tree *minus_litp, int negate_p)
1488 tree var = 0;
1490 *conp = 0;
1491 *litp = 0;
1492 *minus_litp = 0;
1494 /* Strip any conversions that don't change the machine mode or signedness. */
1495 STRIP_SIGN_NOPS (in);
1497 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1498 || TREE_CODE (in) == FIXED_CST)
1499 *litp = in;
1500 else if (TREE_CODE (in) == code
1501 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1502 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1503 /* We can associate addition and subtraction together (even
1504 though the C standard doesn't say so) for integers because
1505 the value is not affected. For reals, the value might be
1506 affected, so we can't. */
1507 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1508 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1510 tree op0 = TREE_OPERAND (in, 0);
1511 tree op1 = TREE_OPERAND (in, 1);
1512 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1513 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1515 /* First see if either of the operands is a literal, then a constant. */
1516 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1517 || TREE_CODE (op0) == FIXED_CST)
1518 *litp = op0, op0 = 0;
1519 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1520 || TREE_CODE (op1) == FIXED_CST)
1521 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1523 if (op0 != 0 && TREE_CONSTANT (op0))
1524 *conp = op0, op0 = 0;
1525 else if (op1 != 0 && TREE_CONSTANT (op1))
1526 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1528 /* If we haven't dealt with either operand, this is not a case we can
1529 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1530 if (op0 != 0 && op1 != 0)
1531 var = in;
1532 else if (op0 != 0)
1533 var = op0;
1534 else
1535 var = op1, neg_var_p = neg1_p;
1537 /* Now do any needed negations. */
1538 if (neg_litp_p)
1539 *minus_litp = *litp, *litp = 0;
1540 if (neg_conp_p)
1541 *conp = negate_expr (*conp);
1542 if (neg_var_p)
1543 var = negate_expr (var);
1545 else if (TREE_CONSTANT (in))
1546 *conp = in;
1547 else
1548 var = in;
1550 if (negate_p)
1552 if (*litp)
1553 *minus_litp = *litp, *litp = 0;
1554 else if (*minus_litp)
1555 *litp = *minus_litp, *minus_litp = 0;
1556 *conp = negate_expr (*conp);
1557 var = negate_expr (var);
1560 return var;
1563 /* Re-associate trees split by the above function. T1 and T2 are
1564 either expressions to associate or null. Return the new
1565 expression, if any. LOC is the location of the new expression. If
1566 we build an operation, do it in TYPE and with CODE. */
1568 static tree
1569 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1571 tree tem;
1573 if (t1 == 0)
1574 return t2;
1575 else if (t2 == 0)
1576 return t1;
1578 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1579 try to fold this since we will have infinite recursion. But do
1580 deal with any NEGATE_EXPRs. */
1581 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1582 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1584 if (code == PLUS_EXPR)
1586 if (TREE_CODE (t1) == NEGATE_EXPR)
1587 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
1588 fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
1589 else if (TREE_CODE (t2) == NEGATE_EXPR)
1590 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
1591 fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
1592 else if (integer_zerop (t2))
1593 return fold_convert_loc (loc, type, t1);
1595 else if (code == MINUS_EXPR)
1597 if (integer_zerop (t2))
1598 return fold_convert_loc (loc, type, t1);
1601 tem = build2 (code, type, fold_convert_loc (loc, type, t1),
1602 fold_convert_loc (loc, type, t2));
1603 goto associate_trees_exit;
1606 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1607 fold_convert_loc (loc, type, t2));
1608 associate_trees_exit:
1609 protected_set_expr_location (tem, loc);
1610 return tem;
1613 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1614 for use in int_const_binop, size_binop and size_diffop. */
1616 static bool
1617 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1619 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1620 return false;
1621 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1622 return false;
1624 switch (code)
1626 case LSHIFT_EXPR:
1627 case RSHIFT_EXPR:
1628 case LROTATE_EXPR:
1629 case RROTATE_EXPR:
1630 return true;
1632 default:
1633 break;
1636 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1637 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1638 && TYPE_MODE (type1) == TYPE_MODE (type2);
1642 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1643 to produce a new constant. Return NULL_TREE if we don't know how
1644 to evaluate CODE at compile-time.
1646 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1648 tree
1649 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1651 unsigned HOST_WIDE_INT int1l, int2l;
1652 HOST_WIDE_INT int1h, int2h;
1653 unsigned HOST_WIDE_INT low;
1654 HOST_WIDE_INT hi;
1655 unsigned HOST_WIDE_INT garbagel;
1656 HOST_WIDE_INT garbageh;
1657 tree t;
1658 tree type = TREE_TYPE (arg1);
1659 int uns = TYPE_UNSIGNED (type);
1660 int is_sizetype
1661 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1662 int overflow = 0;
1664 int1l = TREE_INT_CST_LOW (arg1);
1665 int1h = TREE_INT_CST_HIGH (arg1);
1666 int2l = TREE_INT_CST_LOW (arg2);
1667 int2h = TREE_INT_CST_HIGH (arg2);
1669 switch (code)
1671 case BIT_IOR_EXPR:
1672 low = int1l | int2l, hi = int1h | int2h;
1673 break;
1675 case BIT_XOR_EXPR:
1676 low = int1l ^ int2l, hi = int1h ^ int2h;
1677 break;
1679 case BIT_AND_EXPR:
1680 low = int1l & int2l, hi = int1h & int2h;
1681 break;
1683 case RSHIFT_EXPR:
1684 int2l = -int2l;
1685 case LSHIFT_EXPR:
1686 /* It's unclear from the C standard whether shifts can overflow.
1687 The following code ignores overflow; perhaps a C standard
1688 interpretation ruling is needed. */
1689 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1690 &low, &hi, !uns);
1691 break;
1693 case RROTATE_EXPR:
1694 int2l = - int2l;
1695 case LROTATE_EXPR:
1696 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1697 &low, &hi);
1698 break;
1700 case PLUS_EXPR:
1701 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1702 break;
1704 case MINUS_EXPR:
1705 neg_double (int2l, int2h, &low, &hi);
1706 add_double (int1l, int1h, low, hi, &low, &hi);
1707 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1708 break;
1710 case MULT_EXPR:
1711 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1712 break;
1714 case TRUNC_DIV_EXPR:
1715 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1716 case EXACT_DIV_EXPR:
1717 /* This is a shortcut for a common special case. */
1718 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1719 && !TREE_OVERFLOW (arg1)
1720 && !TREE_OVERFLOW (arg2)
1721 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1723 if (code == CEIL_DIV_EXPR)
1724 int1l += int2l - 1;
1726 low = int1l / int2l, hi = 0;
1727 break;
1730 /* ... fall through ... */
1732 case ROUND_DIV_EXPR:
1733 if (int2h == 0 && int2l == 0)
1734 return NULL_TREE;
1735 if (int2h == 0 && int2l == 1)
1737 low = int1l, hi = int1h;
1738 break;
1740 if (int1l == int2l && int1h == int2h
1741 && ! (int1l == 0 && int1h == 0))
1743 low = 1, hi = 0;
1744 break;
1746 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1747 &low, &hi, &garbagel, &garbageh);
1748 break;
1750 case TRUNC_MOD_EXPR:
1751 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1752 /* This is a shortcut for a common special case. */
1753 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1754 && !TREE_OVERFLOW (arg1)
1755 && !TREE_OVERFLOW (arg2)
1756 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1758 if (code == CEIL_MOD_EXPR)
1759 int1l += int2l - 1;
1760 low = int1l % int2l, hi = 0;
1761 break;
1764 /* ... fall through ... */
1766 case ROUND_MOD_EXPR:
1767 if (int2h == 0 && int2l == 0)
1768 return NULL_TREE;
1769 overflow = div_and_round_double (code, uns,
1770 int1l, int1h, int2l, int2h,
1771 &garbagel, &garbageh, &low, &hi);
1772 break;
1774 case MIN_EXPR:
1775 case MAX_EXPR:
1776 if (uns)
1777 low = (((unsigned HOST_WIDE_INT) int1h
1778 < (unsigned HOST_WIDE_INT) int2h)
1779 || (((unsigned HOST_WIDE_INT) int1h
1780 == (unsigned HOST_WIDE_INT) int2h)
1781 && int1l < int2l));
1782 else
1783 low = (int1h < int2h
1784 || (int1h == int2h && int1l < int2l));
1786 if (low == (code == MIN_EXPR))
1787 low = int1l, hi = int1h;
1788 else
1789 low = int2l, hi = int2h;
1790 break;
1792 default:
1793 return NULL_TREE;
1796 if (notrunc)
1798 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1800 /* Propagate overflow flags ourselves. */
1801 if (((!uns || is_sizetype) && overflow)
1802 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1804 t = copy_node (t);
1805 TREE_OVERFLOW (t) = 1;
1808 else
1809 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1810 ((!uns || is_sizetype) && overflow)
1811 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1813 return t;
1816 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1817 constant. We assume ARG1 and ARG2 have the same data type, or at least
1818 are the same kind of constant and the same machine mode. Return zero if
1819 combining the constants is not allowed in the current operating mode.
1821 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1823 static tree
1824 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1826 /* Sanity check for the recursive cases. */
1827 if (!arg1 || !arg2)
1828 return NULL_TREE;
1830 STRIP_NOPS (arg1);
1831 STRIP_NOPS (arg2);
1833 if (TREE_CODE (arg1) == INTEGER_CST)
1834 return int_const_binop (code, arg1, arg2, notrunc);
1836 if (TREE_CODE (arg1) == REAL_CST)
1838 enum machine_mode mode;
1839 REAL_VALUE_TYPE d1;
1840 REAL_VALUE_TYPE d2;
1841 REAL_VALUE_TYPE value;
1842 REAL_VALUE_TYPE result;
1843 bool inexact;
1844 tree t, type;
1846 /* The following codes are handled by real_arithmetic. */
1847 switch (code)
1849 case PLUS_EXPR:
1850 case MINUS_EXPR:
1851 case MULT_EXPR:
1852 case RDIV_EXPR:
1853 case MIN_EXPR:
1854 case MAX_EXPR:
1855 break;
1857 default:
1858 return NULL_TREE;
1861 d1 = TREE_REAL_CST (arg1);
1862 d2 = TREE_REAL_CST (arg2);
1864 type = TREE_TYPE (arg1);
1865 mode = TYPE_MODE (type);
1867 /* Don't perform operation if we honor signaling NaNs and
1868 either operand is a NaN. */
1869 if (HONOR_SNANS (mode)
1870 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1871 return NULL_TREE;
1873 /* Don't perform operation if it would raise a division
1874 by zero exception. */
1875 if (code == RDIV_EXPR
1876 && REAL_VALUES_EQUAL (d2, dconst0)
1877 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1878 return NULL_TREE;
1880 /* If either operand is a NaN, just return it. Otherwise, set up
1881 for floating-point trap; we return an overflow. */
1882 if (REAL_VALUE_ISNAN (d1))
1883 return arg1;
1884 else if (REAL_VALUE_ISNAN (d2))
1885 return arg2;
1887 inexact = real_arithmetic (&value, code, &d1, &d2);
1888 real_convert (&result, mode, &value);
1890 /* Don't constant fold this floating point operation if
1891 the result has overflowed and flag_trapping_math. */
1892 if (flag_trapping_math
1893 && MODE_HAS_INFINITIES (mode)
1894 && REAL_VALUE_ISINF (result)
1895 && !REAL_VALUE_ISINF (d1)
1896 && !REAL_VALUE_ISINF (d2))
1897 return NULL_TREE;
1899 /* Don't constant fold this floating point operation if the
1900 result may dependent upon the run-time rounding mode and
1901 flag_rounding_math is set, or if GCC's software emulation
1902 is unable to accurately represent the result. */
1903 if ((flag_rounding_math
1904 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1905 && (inexact || !real_identical (&result, &value)))
1906 return NULL_TREE;
1908 t = build_real (type, result);
1910 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1911 return t;
1914 if (TREE_CODE (arg1) == FIXED_CST)
1916 FIXED_VALUE_TYPE f1;
1917 FIXED_VALUE_TYPE f2;
1918 FIXED_VALUE_TYPE result;
1919 tree t, type;
1920 int sat_p;
1921 bool overflow_p;
1923 /* The following codes are handled by fixed_arithmetic. */
1924 switch (code)
1926 case PLUS_EXPR:
1927 case MINUS_EXPR:
1928 case MULT_EXPR:
1929 case TRUNC_DIV_EXPR:
1930 f2 = TREE_FIXED_CST (arg2);
1931 break;
1933 case LSHIFT_EXPR:
1934 case RSHIFT_EXPR:
1935 f2.data.high = TREE_INT_CST_HIGH (arg2);
1936 f2.data.low = TREE_INT_CST_LOW (arg2);
1937 f2.mode = SImode;
1938 break;
1940 default:
1941 return NULL_TREE;
1944 f1 = TREE_FIXED_CST (arg1);
1945 type = TREE_TYPE (arg1);
1946 sat_p = TYPE_SATURATING (type);
1947 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1948 t = build_fixed (type, result);
1949 /* Propagate overflow flags. */
1950 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1951 TREE_OVERFLOW (t) = 1;
1952 return t;
1955 if (TREE_CODE (arg1) == COMPLEX_CST)
1957 tree type = TREE_TYPE (arg1);
1958 tree r1 = TREE_REALPART (arg1);
1959 tree i1 = TREE_IMAGPART (arg1);
1960 tree r2 = TREE_REALPART (arg2);
1961 tree i2 = TREE_IMAGPART (arg2);
1962 tree real, imag;
1964 switch (code)
1966 case PLUS_EXPR:
1967 case MINUS_EXPR:
1968 real = const_binop (code, r1, r2, notrunc);
1969 imag = const_binop (code, i1, i2, notrunc);
1970 break;
1972 case MULT_EXPR:
1973 if (COMPLEX_FLOAT_TYPE_P (type))
1974 return do_mpc_arg2 (arg1, arg2, type,
1975 /* do_nonfinite= */ folding_initializer,
1976 mpc_mul);
1978 real = const_binop (MINUS_EXPR,
1979 const_binop (MULT_EXPR, r1, r2, notrunc),
1980 const_binop (MULT_EXPR, i1, i2, notrunc),
1981 notrunc);
1982 imag = const_binop (PLUS_EXPR,
1983 const_binop (MULT_EXPR, r1, i2, notrunc),
1984 const_binop (MULT_EXPR, i1, r2, notrunc),
1985 notrunc);
1986 break;
1988 case RDIV_EXPR:
1989 if (COMPLEX_FLOAT_TYPE_P (type))
1990 return do_mpc_arg2 (arg1, arg2, type,
1991 /* do_nonfinite= */ folding_initializer,
1992 mpc_div);
1993 /* Fallthru ... */
1994 case TRUNC_DIV_EXPR:
1995 case CEIL_DIV_EXPR:
1996 case FLOOR_DIV_EXPR:
1997 case ROUND_DIV_EXPR:
1998 if (flag_complex_method == 0)
2000 /* Keep this algorithm in sync with
2001 tree-complex.c:expand_complex_div_straight().
2003 Expand complex division to scalars, straightforward algorithm.
2004 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
2005 t = br*br + bi*bi
2007 tree magsquared
2008 = const_binop (PLUS_EXPR,
2009 const_binop (MULT_EXPR, r2, r2, notrunc),
2010 const_binop (MULT_EXPR, i2, i2, notrunc),
2011 notrunc);
2012 tree t1
2013 = const_binop (PLUS_EXPR,
2014 const_binop (MULT_EXPR, r1, r2, notrunc),
2015 const_binop (MULT_EXPR, i1, i2, notrunc),
2016 notrunc);
2017 tree t2
2018 = const_binop (MINUS_EXPR,
2019 const_binop (MULT_EXPR, i1, r2, notrunc),
2020 const_binop (MULT_EXPR, r1, i2, notrunc),
2021 notrunc);
2023 real = const_binop (code, t1, magsquared, notrunc);
2024 imag = const_binop (code, t2, magsquared, notrunc);
2026 else
2028 /* Keep this algorithm in sync with
2029 tree-complex.c:expand_complex_div_wide().
2031 Expand complex division to scalars, modified algorithm to minimize
2032 overflow with wide input ranges. */
2033 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
2034 fold_abs_const (r2, TREE_TYPE (type)),
2035 fold_abs_const (i2, TREE_TYPE (type)));
2037 if (integer_nonzerop (compare))
2039 /* In the TRUE branch, we compute
2040 ratio = br/bi;
2041 div = (br * ratio) + bi;
2042 tr = (ar * ratio) + ai;
2043 ti = (ai * ratio) - ar;
2044 tr = tr / div;
2045 ti = ti / div; */
2046 tree ratio = const_binop (code, r2, i2, notrunc);
2047 tree div = const_binop (PLUS_EXPR, i2,
2048 const_binop (MULT_EXPR, r2, ratio,
2049 notrunc),
2050 notrunc);
2051 real = const_binop (MULT_EXPR, r1, ratio, notrunc);
2052 real = const_binop (PLUS_EXPR, real, i1, notrunc);
2053 real = const_binop (code, real, div, notrunc);
2055 imag = const_binop (MULT_EXPR, i1, ratio, notrunc);
2056 imag = const_binop (MINUS_EXPR, imag, r1, notrunc);
2057 imag = const_binop (code, imag, div, notrunc);
2059 else
2061 /* In the FALSE branch, we compute
2062 ratio = d/c;
2063 divisor = (d * ratio) + c;
2064 tr = (b * ratio) + a;
2065 ti = b - (a * ratio);
2066 tr = tr / div;
2067 ti = ti / div; */
2068 tree ratio = const_binop (code, i2, r2, notrunc);
2069 tree div = const_binop (PLUS_EXPR, r2,
2070 const_binop (MULT_EXPR, i2, ratio,
2071 notrunc),
2072 notrunc);
2074 real = const_binop (MULT_EXPR, i1, ratio, notrunc);
2075 real = const_binop (PLUS_EXPR, real, r1, notrunc);
2076 real = const_binop (code, real, div, notrunc);
2078 imag = const_binop (MULT_EXPR, r1, ratio, notrunc);
2079 imag = const_binop (MINUS_EXPR, i1, imag, notrunc);
2080 imag = const_binop (code, imag, div, notrunc);
2083 break;
2085 default:
2086 return NULL_TREE;
2089 if (real && imag)
2090 return build_complex (type, real, imag);
2093 if (TREE_CODE (arg1) == VECTOR_CST)
2095 tree type = TREE_TYPE(arg1);
2096 int count = TYPE_VECTOR_SUBPARTS (type), i;
2097 tree elements1, elements2, list = NULL_TREE;
2099 if(TREE_CODE(arg2) != VECTOR_CST)
2100 return NULL_TREE;
2102 elements1 = TREE_VECTOR_CST_ELTS (arg1);
2103 elements2 = TREE_VECTOR_CST_ELTS (arg2);
2105 for (i = 0; i < count; i++)
2107 tree elem1, elem2, elem;
2109 /* The trailing elements can be empty and should be treated as 0 */
2110 if(!elements1)
2111 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2112 else
2114 elem1 = TREE_VALUE(elements1);
2115 elements1 = TREE_CHAIN (elements1);
2118 if(!elements2)
2119 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2120 else
2122 elem2 = TREE_VALUE(elements2);
2123 elements2 = TREE_CHAIN (elements2);
2126 elem = const_binop (code, elem1, elem2, notrunc);
2128 /* It is possible that const_binop cannot handle the given
2129 code and return NULL_TREE */
2130 if(elem == NULL_TREE)
2131 return NULL_TREE;
2133 list = tree_cons (NULL_TREE, elem, list);
2135 return build_vector(type, nreverse(list));
2137 return NULL_TREE;
2140 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2141 indicates which particular sizetype to create. */
2143 tree
2144 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2146 return build_int_cst (sizetype_tab[(int) kind], number);
2149 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2150 is a tree code. The type of the result is taken from the operands.
2151 Both must be equivalent integer types, ala int_binop_types_match_p.
2152 If the operands are constant, so is the result. */
2154 tree
2155 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2157 tree type = TREE_TYPE (arg0);
2159 if (arg0 == error_mark_node || arg1 == error_mark_node)
2160 return error_mark_node;
2162 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2163 TREE_TYPE (arg1)));
2165 /* Handle the special case of two integer constants faster. */
2166 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2168 /* And some specific cases even faster than that. */
2169 if (code == PLUS_EXPR)
2171 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2172 return arg1;
2173 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2174 return arg0;
2176 else if (code == MINUS_EXPR)
2178 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2179 return arg0;
2181 else if (code == MULT_EXPR)
2183 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2184 return arg1;
2187 /* Handle general case of two integer constants. */
2188 return int_const_binop (code, arg0, arg1, 0);
2191 return fold_build2_loc (loc, code, type, arg0, arg1);
2194 /* Given two values, either both of sizetype or both of bitsizetype,
2195 compute the difference between the two values. Return the value
2196 in signed type corresponding to the type of the operands. */
2198 tree
2199 size_diffop_loc (location_t loc, tree arg0, tree arg1)
2201 tree type = TREE_TYPE (arg0);
2202 tree ctype;
2204 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2205 TREE_TYPE (arg1)));
2207 /* If the type is already signed, just do the simple thing. */
2208 if (!TYPE_UNSIGNED (type))
2209 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2211 if (type == sizetype)
2212 ctype = ssizetype;
2213 else if (type == bitsizetype)
2214 ctype = sbitsizetype;
2215 else
2216 ctype = signed_type_for (type);
2218 /* If either operand is not a constant, do the conversions to the signed
2219 type and subtract. The hardware will do the right thing with any
2220 overflow in the subtraction. */
2221 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2222 return size_binop_loc (loc, MINUS_EXPR,
2223 fold_convert_loc (loc, ctype, arg0),
2224 fold_convert_loc (loc, ctype, arg1));
2226 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2227 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2228 overflow) and negate (which can't either). Special-case a result
2229 of zero while we're here. */
2230 if (tree_int_cst_equal (arg0, arg1))
2231 return build_int_cst (ctype, 0);
2232 else if (tree_int_cst_lt (arg1, arg0))
2233 return fold_convert_loc (loc, ctype,
2234 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2235 else
2236 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2237 fold_convert_loc (loc, ctype,
2238 size_binop_loc (loc,
2239 MINUS_EXPR,
2240 arg1, arg0)));
2243 /* A subroutine of fold_convert_const handling conversions of an
2244 INTEGER_CST to another integer type. */
2246 static tree
2247 fold_convert_const_int_from_int (tree type, const_tree arg1)
2249 tree t;
2251 /* Given an integer constant, make new constant with new type,
2252 appropriately sign-extended or truncated. */
2253 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2254 TREE_INT_CST_HIGH (arg1),
2255 /* Don't set the overflow when
2256 converting from a pointer, */
2257 !POINTER_TYPE_P (TREE_TYPE (arg1))
2258 /* or to a sizetype with same signedness
2259 and the precision is unchanged.
2260 ??? sizetype is always sign-extended,
2261 but its signedness depends on the
2262 frontend. Thus we see spurious overflows
2263 here if we do not check this. */
2264 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2265 == TYPE_PRECISION (type))
2266 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2267 == TYPE_UNSIGNED (type))
2268 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2269 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2270 || (TREE_CODE (type) == INTEGER_TYPE
2271 && TYPE_IS_SIZETYPE (type)))),
2272 (TREE_INT_CST_HIGH (arg1) < 0
2273 && (TYPE_UNSIGNED (type)
2274 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2275 | TREE_OVERFLOW (arg1));
2277 return t;
2280 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2281 to an integer type. */
2283 static tree
2284 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2286 int overflow = 0;
2287 tree t;
2289 /* The following code implements the floating point to integer
2290 conversion rules required by the Java Language Specification,
2291 that IEEE NaNs are mapped to zero and values that overflow
2292 the target precision saturate, i.e. values greater than
2293 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2294 are mapped to INT_MIN. These semantics are allowed by the
2295 C and C++ standards that simply state that the behavior of
2296 FP-to-integer conversion is unspecified upon overflow. */
2298 HOST_WIDE_INT high, low;
2299 REAL_VALUE_TYPE r;
2300 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2302 switch (code)
2304 case FIX_TRUNC_EXPR:
2305 real_trunc (&r, VOIDmode, &x);
2306 break;
2308 default:
2309 gcc_unreachable ();
2312 /* If R is NaN, return zero and show we have an overflow. */
2313 if (REAL_VALUE_ISNAN (r))
2315 overflow = 1;
2316 high = 0;
2317 low = 0;
2320 /* See if R is less than the lower bound or greater than the
2321 upper bound. */
2323 if (! overflow)
2325 tree lt = TYPE_MIN_VALUE (type);
2326 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2327 if (REAL_VALUES_LESS (r, l))
2329 overflow = 1;
2330 high = TREE_INT_CST_HIGH (lt);
2331 low = TREE_INT_CST_LOW (lt);
2335 if (! overflow)
2337 tree ut = TYPE_MAX_VALUE (type);
2338 if (ut)
2340 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2341 if (REAL_VALUES_LESS (u, r))
2343 overflow = 1;
2344 high = TREE_INT_CST_HIGH (ut);
2345 low = TREE_INT_CST_LOW (ut);
2350 if (! overflow)
2351 REAL_VALUE_TO_INT (&low, &high, r);
2353 t = force_fit_type_double (type, low, high, -1,
2354 overflow | TREE_OVERFLOW (arg1));
2355 return t;
2358 /* A subroutine of fold_convert_const handling conversions of a
2359 FIXED_CST to an integer type. */
2361 static tree
2362 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2364 tree t;
2365 double_int temp, temp_trunc;
2366 unsigned int mode;
2368 /* Right shift FIXED_CST to temp by fbit. */
2369 temp = TREE_FIXED_CST (arg1).data;
2370 mode = TREE_FIXED_CST (arg1).mode;
2371 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2373 lshift_double (temp.low, temp.high,
2374 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2375 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2377 /* Left shift temp to temp_trunc by fbit. */
2378 lshift_double (temp.low, temp.high,
2379 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2380 &temp_trunc.low, &temp_trunc.high,
2381 SIGNED_FIXED_POINT_MODE_P (mode));
2383 else
2385 temp.low = 0;
2386 temp.high = 0;
2387 temp_trunc.low = 0;
2388 temp_trunc.high = 0;
2391 /* If FIXED_CST is negative, we need to round the value toward 0.
2392 By checking if the fractional bits are not zero to add 1 to temp. */
2393 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2394 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2396 double_int one;
2397 one.low = 1;
2398 one.high = 0;
2399 temp = double_int_add (temp, one);
2402 /* Given a fixed-point constant, make new constant with new type,
2403 appropriately sign-extended or truncated. */
2404 t = force_fit_type_double (type, temp.low, temp.high, -1,
2405 (temp.high < 0
2406 && (TYPE_UNSIGNED (type)
2407 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2408 | TREE_OVERFLOW (arg1));
2410 return t;
2413 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2414 to another floating point type. */
2416 static tree
2417 fold_convert_const_real_from_real (tree type, const_tree arg1)
2419 REAL_VALUE_TYPE value;
2420 tree t;
2422 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2423 t = build_real (type, value);
2425 /* If converting an infinity or NAN to a representation that doesn't
2426 have one, set the overflow bit so that we can produce some kind of
2427 error message at the appropriate point if necessary. It's not the
2428 most user-friendly message, but it's better than nothing. */
2429 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2430 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2431 TREE_OVERFLOW (t) = 1;
2432 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2433 && !MODE_HAS_NANS (TYPE_MODE (type)))
2434 TREE_OVERFLOW (t) = 1;
2435 /* Regular overflow, conversion produced an infinity in a mode that
2436 can't represent them. */
2437 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2438 && REAL_VALUE_ISINF (value)
2439 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2440 TREE_OVERFLOW (t) = 1;
2441 else
2442 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2443 return t;
2446 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2447 to a floating point type. */
2449 static tree
2450 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2452 REAL_VALUE_TYPE value;
2453 tree t;
2455 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2456 t = build_real (type, value);
2458 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2459 return t;
2462 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2463 to another fixed-point type. */
2465 static tree
2466 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2468 FIXED_VALUE_TYPE value;
2469 tree t;
2470 bool overflow_p;
2472 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2473 TYPE_SATURATING (type));
2474 t = build_fixed (type, value);
2476 /* Propagate overflow flags. */
2477 if (overflow_p | TREE_OVERFLOW (arg1))
2478 TREE_OVERFLOW (t) = 1;
2479 return t;
2482 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2483 to a fixed-point type. */
2485 static tree
2486 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2488 FIXED_VALUE_TYPE value;
2489 tree t;
2490 bool overflow_p;
2492 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2493 TREE_INT_CST (arg1),
2494 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2495 TYPE_SATURATING (type));
2496 t = build_fixed (type, value);
2498 /* Propagate overflow flags. */
2499 if (overflow_p | TREE_OVERFLOW (arg1))
2500 TREE_OVERFLOW (t) = 1;
2501 return t;
2504 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2505 to a fixed-point type. */
2507 static tree
2508 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2510 FIXED_VALUE_TYPE value;
2511 tree t;
2512 bool overflow_p;
2514 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2515 &TREE_REAL_CST (arg1),
2516 TYPE_SATURATING (type));
2517 t = build_fixed (type, value);
2519 /* Propagate overflow flags. */
2520 if (overflow_p | TREE_OVERFLOW (arg1))
2521 TREE_OVERFLOW (t) = 1;
2522 return t;
2525 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2526 type TYPE. If no simplification can be done return NULL_TREE. */
2528 static tree
2529 fold_convert_const (enum tree_code code, tree type, tree arg1)
2531 if (TREE_TYPE (arg1) == type)
2532 return arg1;
2534 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2535 || TREE_CODE (type) == OFFSET_TYPE)
2537 if (TREE_CODE (arg1) == INTEGER_CST)
2538 return fold_convert_const_int_from_int (type, arg1);
2539 else if (TREE_CODE (arg1) == REAL_CST)
2540 return fold_convert_const_int_from_real (code, type, arg1);
2541 else if (TREE_CODE (arg1) == FIXED_CST)
2542 return fold_convert_const_int_from_fixed (type, arg1);
2544 else if (TREE_CODE (type) == REAL_TYPE)
2546 if (TREE_CODE (arg1) == INTEGER_CST)
2547 return build_real_from_int_cst (type, arg1);
2548 else if (TREE_CODE (arg1) == REAL_CST)
2549 return fold_convert_const_real_from_real (type, arg1);
2550 else if (TREE_CODE (arg1) == FIXED_CST)
2551 return fold_convert_const_real_from_fixed (type, arg1);
2553 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2555 if (TREE_CODE (arg1) == FIXED_CST)
2556 return fold_convert_const_fixed_from_fixed (type, arg1);
2557 else if (TREE_CODE (arg1) == INTEGER_CST)
2558 return fold_convert_const_fixed_from_int (type, arg1);
2559 else if (TREE_CODE (arg1) == REAL_CST)
2560 return fold_convert_const_fixed_from_real (type, arg1);
2562 return NULL_TREE;
2565 /* Construct a vector of zero elements of vector type TYPE. */
2567 static tree
2568 build_zero_vector (tree type)
2570 tree elem, list;
2571 int i, units;
2573 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2574 units = TYPE_VECTOR_SUBPARTS (type);
2576 list = NULL_TREE;
2577 for (i = 0; i < units; i++)
2578 list = tree_cons (NULL_TREE, elem, list);
2579 return build_vector (type, list);
2582 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2584 bool
2585 fold_convertible_p (const_tree type, const_tree arg)
2587 tree orig = TREE_TYPE (arg);
2589 if (type == orig)
2590 return true;
2592 if (TREE_CODE (arg) == ERROR_MARK
2593 || TREE_CODE (type) == ERROR_MARK
2594 || TREE_CODE (orig) == ERROR_MARK)
2595 return false;
2597 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2598 return true;
2600 switch (TREE_CODE (type))
2602 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2603 case POINTER_TYPE: case REFERENCE_TYPE:
2604 case OFFSET_TYPE:
2605 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2606 || TREE_CODE (orig) == OFFSET_TYPE)
2607 return true;
2608 return (TREE_CODE (orig) == VECTOR_TYPE
2609 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2611 case REAL_TYPE:
2612 case FIXED_POINT_TYPE:
2613 case COMPLEX_TYPE:
2614 case VECTOR_TYPE:
2615 case VOID_TYPE:
2616 return TREE_CODE (type) == TREE_CODE (orig);
2618 default:
2619 return false;
2623 /* Convert expression ARG to type TYPE. Used by the middle-end for
2624 simple conversions in preference to calling the front-end's convert. */
2626 tree
2627 fold_convert_loc (location_t loc, tree type, tree arg)
2629 tree orig = TREE_TYPE (arg);
2630 tree tem;
2632 if (type == orig)
2633 return arg;
2635 if (TREE_CODE (arg) == ERROR_MARK
2636 || TREE_CODE (type) == ERROR_MARK
2637 || TREE_CODE (orig) == ERROR_MARK)
2638 return error_mark_node;
2640 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2641 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2643 switch (TREE_CODE (type))
2645 case POINTER_TYPE:
2646 case REFERENCE_TYPE:
2647 /* Handle conversions between pointers to different address spaces. */
2648 if (POINTER_TYPE_P (orig)
2649 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2650 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2651 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2652 /* fall through */
2654 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2655 case OFFSET_TYPE:
2656 if (TREE_CODE (arg) == INTEGER_CST)
2658 tem = fold_convert_const (NOP_EXPR, type, arg);
2659 if (tem != NULL_TREE)
2660 return tem;
2662 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2663 || TREE_CODE (orig) == OFFSET_TYPE)
2664 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2665 if (TREE_CODE (orig) == COMPLEX_TYPE)
2666 return fold_convert_loc (loc, type,
2667 fold_build1_loc (loc, REALPART_EXPR,
2668 TREE_TYPE (orig), arg));
2669 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2670 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2671 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2673 case REAL_TYPE:
2674 if (TREE_CODE (arg) == INTEGER_CST)
2676 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2677 if (tem != NULL_TREE)
2678 return tem;
2680 else if (TREE_CODE (arg) == REAL_CST)
2682 tem = fold_convert_const (NOP_EXPR, type, arg);
2683 if (tem != NULL_TREE)
2684 return tem;
2686 else if (TREE_CODE (arg) == FIXED_CST)
2688 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2689 if (tem != NULL_TREE)
2690 return tem;
2693 switch (TREE_CODE (orig))
2695 case INTEGER_TYPE:
2696 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2697 case POINTER_TYPE: case REFERENCE_TYPE:
2698 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2700 case REAL_TYPE:
2701 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2703 case FIXED_POINT_TYPE:
2704 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2706 case COMPLEX_TYPE:
2707 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2708 return fold_convert_loc (loc, type, tem);
2710 default:
2711 gcc_unreachable ();
2714 case FIXED_POINT_TYPE:
2715 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2716 || TREE_CODE (arg) == REAL_CST)
2718 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2719 if (tem != NULL_TREE)
2720 goto fold_convert_exit;
2723 switch (TREE_CODE (orig))
2725 case FIXED_POINT_TYPE:
2726 case INTEGER_TYPE:
2727 case ENUMERAL_TYPE:
2728 case BOOLEAN_TYPE:
2729 case REAL_TYPE:
2730 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2732 case COMPLEX_TYPE:
2733 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2734 return fold_convert_loc (loc, type, tem);
2736 default:
2737 gcc_unreachable ();
2740 case COMPLEX_TYPE:
2741 switch (TREE_CODE (orig))
2743 case INTEGER_TYPE:
2744 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2745 case POINTER_TYPE: case REFERENCE_TYPE:
2746 case REAL_TYPE:
2747 case FIXED_POINT_TYPE:
2748 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2749 fold_convert_loc (loc, TREE_TYPE (type), arg),
2750 fold_convert_loc (loc, TREE_TYPE (type),
2751 integer_zero_node));
2752 case COMPLEX_TYPE:
2754 tree rpart, ipart;
2756 if (TREE_CODE (arg) == COMPLEX_EXPR)
2758 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2759 TREE_OPERAND (arg, 0));
2760 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2761 TREE_OPERAND (arg, 1));
2762 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2765 arg = save_expr (arg);
2766 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2767 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2768 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2769 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2770 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2773 default:
2774 gcc_unreachable ();
2777 case VECTOR_TYPE:
2778 if (integer_zerop (arg))
2779 return build_zero_vector (type);
2780 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2781 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2782 || TREE_CODE (orig) == VECTOR_TYPE);
2783 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2785 case VOID_TYPE:
2786 tem = fold_ignored_result (arg);
2787 if (TREE_CODE (tem) == MODIFY_EXPR)
2788 goto fold_convert_exit;
2789 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2791 default:
2792 gcc_unreachable ();
2794 fold_convert_exit:
2795 protected_set_expr_location (tem, loc);
2796 return tem;
2799 /* Return false if expr can be assumed not to be an lvalue, true
2800 otherwise. */
2802 static bool
2803 maybe_lvalue_p (const_tree x)
2805 /* We only need to wrap lvalue tree codes. */
2806 switch (TREE_CODE (x))
2808 case VAR_DECL:
2809 case PARM_DECL:
2810 case RESULT_DECL:
2811 case LABEL_DECL:
2812 case FUNCTION_DECL:
2813 case SSA_NAME:
2815 case COMPONENT_REF:
2816 case INDIRECT_REF:
2817 case ALIGN_INDIRECT_REF:
2818 case MISALIGNED_INDIRECT_REF:
2819 case ARRAY_REF:
2820 case ARRAY_RANGE_REF:
2821 case BIT_FIELD_REF:
2822 case OBJ_TYPE_REF:
2824 case REALPART_EXPR:
2825 case IMAGPART_EXPR:
2826 case PREINCREMENT_EXPR:
2827 case PREDECREMENT_EXPR:
2828 case SAVE_EXPR:
2829 case TRY_CATCH_EXPR:
2830 case WITH_CLEANUP_EXPR:
2831 case COMPOUND_EXPR:
2832 case MODIFY_EXPR:
2833 case TARGET_EXPR:
2834 case COND_EXPR:
2835 case BIND_EXPR:
2836 break;
2838 default:
2839 /* Assume the worst for front-end tree codes. */
2840 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2841 break;
2842 return false;
2845 return true;
2848 /* Return an expr equal to X but certainly not valid as an lvalue. */
2850 tree
2851 non_lvalue_loc (location_t loc, tree x)
2853 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2854 us. */
2855 if (in_gimple_form)
2856 return x;
2858 if (! maybe_lvalue_p (x))
2859 return x;
2860 x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2861 SET_EXPR_LOCATION (x, loc);
2862 return x;
2865 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2866 Zero means allow extended lvalues. */
2868 int pedantic_lvalues;
2870 /* When pedantic, return an expr equal to X but certainly not valid as a
2871 pedantic lvalue. Otherwise, return X. */
2873 static tree
2874 pedantic_non_lvalue_loc (location_t loc, tree x)
2876 if (pedantic_lvalues)
2877 return non_lvalue_loc (loc, x);
2878 protected_set_expr_location (x, loc);
2879 return x;
2882 /* Given a tree comparison code, return the code that is the logical inverse
2883 of the given code. It is not safe to do this for floating-point
2884 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2885 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2887 enum tree_code
2888 invert_tree_comparison (enum tree_code code, bool honor_nans)
2890 if (honor_nans && flag_trapping_math)
2891 return ERROR_MARK;
2893 switch (code)
2895 case EQ_EXPR:
2896 return NE_EXPR;
2897 case NE_EXPR:
2898 return EQ_EXPR;
2899 case GT_EXPR:
2900 return honor_nans ? UNLE_EXPR : LE_EXPR;
2901 case GE_EXPR:
2902 return honor_nans ? UNLT_EXPR : LT_EXPR;
2903 case LT_EXPR:
2904 return honor_nans ? UNGE_EXPR : GE_EXPR;
2905 case LE_EXPR:
2906 return honor_nans ? UNGT_EXPR : GT_EXPR;
2907 case LTGT_EXPR:
2908 return UNEQ_EXPR;
2909 case UNEQ_EXPR:
2910 return LTGT_EXPR;
2911 case UNGT_EXPR:
2912 return LE_EXPR;
2913 case UNGE_EXPR:
2914 return LT_EXPR;
2915 case UNLT_EXPR:
2916 return GE_EXPR;
2917 case UNLE_EXPR:
2918 return GT_EXPR;
2919 case ORDERED_EXPR:
2920 return UNORDERED_EXPR;
2921 case UNORDERED_EXPR:
2922 return ORDERED_EXPR;
2923 default:
2924 gcc_unreachable ();
2928 /* Similar, but return the comparison that results if the operands are
2929 swapped. This is safe for floating-point. */
2931 enum tree_code
2932 swap_tree_comparison (enum tree_code code)
2934 switch (code)
2936 case EQ_EXPR:
2937 case NE_EXPR:
2938 case ORDERED_EXPR:
2939 case UNORDERED_EXPR:
2940 case LTGT_EXPR:
2941 case UNEQ_EXPR:
2942 return code;
2943 case GT_EXPR:
2944 return LT_EXPR;
2945 case GE_EXPR:
2946 return LE_EXPR;
2947 case LT_EXPR:
2948 return GT_EXPR;
2949 case LE_EXPR:
2950 return GE_EXPR;
2951 case UNGT_EXPR:
2952 return UNLT_EXPR;
2953 case UNGE_EXPR:
2954 return UNLE_EXPR;
2955 case UNLT_EXPR:
2956 return UNGT_EXPR;
2957 case UNLE_EXPR:
2958 return UNGE_EXPR;
2959 default:
2960 gcc_unreachable ();
2965 /* Convert a comparison tree code from an enum tree_code representation
2966 into a compcode bit-based encoding. This function is the inverse of
2967 compcode_to_comparison. */
2969 static enum comparison_code
2970 comparison_to_compcode (enum tree_code code)
2972 switch (code)
2974 case LT_EXPR:
2975 return COMPCODE_LT;
2976 case EQ_EXPR:
2977 return COMPCODE_EQ;
2978 case LE_EXPR:
2979 return COMPCODE_LE;
2980 case GT_EXPR:
2981 return COMPCODE_GT;
2982 case NE_EXPR:
2983 return COMPCODE_NE;
2984 case GE_EXPR:
2985 return COMPCODE_GE;
2986 case ORDERED_EXPR:
2987 return COMPCODE_ORD;
2988 case UNORDERED_EXPR:
2989 return COMPCODE_UNORD;
2990 case UNLT_EXPR:
2991 return COMPCODE_UNLT;
2992 case UNEQ_EXPR:
2993 return COMPCODE_UNEQ;
2994 case UNLE_EXPR:
2995 return COMPCODE_UNLE;
2996 case UNGT_EXPR:
2997 return COMPCODE_UNGT;
2998 case LTGT_EXPR:
2999 return COMPCODE_LTGT;
3000 case UNGE_EXPR:
3001 return COMPCODE_UNGE;
3002 default:
3003 gcc_unreachable ();
3007 /* Convert a compcode bit-based encoding of a comparison operator back
3008 to GCC's enum tree_code representation. This function is the
3009 inverse of comparison_to_compcode. */
3011 static enum tree_code
3012 compcode_to_comparison (enum comparison_code code)
3014 switch (code)
3016 case COMPCODE_LT:
3017 return LT_EXPR;
3018 case COMPCODE_EQ:
3019 return EQ_EXPR;
3020 case COMPCODE_LE:
3021 return LE_EXPR;
3022 case COMPCODE_GT:
3023 return GT_EXPR;
3024 case COMPCODE_NE:
3025 return NE_EXPR;
3026 case COMPCODE_GE:
3027 return GE_EXPR;
3028 case COMPCODE_ORD:
3029 return ORDERED_EXPR;
3030 case COMPCODE_UNORD:
3031 return UNORDERED_EXPR;
3032 case COMPCODE_UNLT:
3033 return UNLT_EXPR;
3034 case COMPCODE_UNEQ:
3035 return UNEQ_EXPR;
3036 case COMPCODE_UNLE:
3037 return UNLE_EXPR;
3038 case COMPCODE_UNGT:
3039 return UNGT_EXPR;
3040 case COMPCODE_LTGT:
3041 return LTGT_EXPR;
3042 case COMPCODE_UNGE:
3043 return UNGE_EXPR;
3044 default:
3045 gcc_unreachable ();
3049 /* Return a tree for the comparison which is the combination of
3050 doing the AND or OR (depending on CODE) of the two operations LCODE
3051 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
3052 the possibility of trapping if the mode has NaNs, and return NULL_TREE
3053 if this makes the transformation invalid. */
3055 tree
3056 combine_comparisons (location_t loc,
3057 enum tree_code code, enum tree_code lcode,
3058 enum tree_code rcode, tree truth_type,
3059 tree ll_arg, tree lr_arg)
3061 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
3062 enum comparison_code lcompcode = comparison_to_compcode (lcode);
3063 enum comparison_code rcompcode = comparison_to_compcode (rcode);
3064 int compcode;
3066 switch (code)
3068 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
3069 compcode = lcompcode & rcompcode;
3070 break;
3072 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
3073 compcode = lcompcode | rcompcode;
3074 break;
3076 default:
3077 return NULL_TREE;
3080 if (!honor_nans)
3082 /* Eliminate unordered comparisons, as well as LTGT and ORD
3083 which are not used unless the mode has NaNs. */
3084 compcode &= ~COMPCODE_UNORD;
3085 if (compcode == COMPCODE_LTGT)
3086 compcode = COMPCODE_NE;
3087 else if (compcode == COMPCODE_ORD)
3088 compcode = COMPCODE_TRUE;
3090 else if (flag_trapping_math)
3092 /* Check that the original operation and the optimized ones will trap
3093 under the same condition. */
3094 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
3095 && (lcompcode != COMPCODE_EQ)
3096 && (lcompcode != COMPCODE_ORD);
3097 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3098 && (rcompcode != COMPCODE_EQ)
3099 && (rcompcode != COMPCODE_ORD);
3100 bool trap = (compcode & COMPCODE_UNORD) == 0
3101 && (compcode != COMPCODE_EQ)
3102 && (compcode != COMPCODE_ORD);
3104 /* In a short-circuited boolean expression the LHS might be
3105 such that the RHS, if evaluated, will never trap. For
3106 example, in ORD (x, y) && (x < y), we evaluate the RHS only
3107 if neither x nor y is NaN. (This is a mixed blessing: for
3108 example, the expression above will never trap, hence
3109 optimizing it to x < y would be invalid). */
3110 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3111 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3112 rtrap = false;
3114 /* If the comparison was short-circuited, and only the RHS
3115 trapped, we may now generate a spurious trap. */
3116 if (rtrap && !ltrap
3117 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3118 return NULL_TREE;
3120 /* If we changed the conditions that cause a trap, we lose. */
3121 if ((ltrap || rtrap) != trap)
3122 return NULL_TREE;
3125 if (compcode == COMPCODE_TRUE)
3126 return constant_boolean_node (true, truth_type);
3127 else if (compcode == COMPCODE_FALSE)
3128 return constant_boolean_node (false, truth_type);
3129 else
3131 enum tree_code tcode;
3133 tcode = compcode_to_comparison ((enum comparison_code) compcode);
3134 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3138 /* Return nonzero if two operands (typically of the same tree node)
3139 are necessarily equal. If either argument has side-effects this
3140 function returns zero. FLAGS modifies behavior as follows:
3142 If OEP_ONLY_CONST is set, only return nonzero for constants.
3143 This function tests whether the operands are indistinguishable;
3144 it does not test whether they are equal using C's == operation.
3145 The distinction is important for IEEE floating point, because
3146 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3147 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3149 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3150 even though it may hold multiple values during a function.
3151 This is because a GCC tree node guarantees that nothing else is
3152 executed between the evaluation of its "operands" (which may often
3153 be evaluated in arbitrary order). Hence if the operands themselves
3154 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3155 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3156 unset means assuming isochronic (or instantaneous) tree equivalence.
3157 Unless comparing arbitrary expression trees, such as from different
3158 statements, this flag can usually be left unset.
3160 If OEP_PURE_SAME is set, then pure functions with identical arguments
3161 are considered the same. It is used when the caller has other ways
3162 to ensure that global memory is unchanged in between. */
3165 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3167 /* If either is ERROR_MARK, they aren't equal. */
3168 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
3169 || TREE_TYPE (arg0) == error_mark_node
3170 || TREE_TYPE (arg1) == error_mark_node)
3171 return 0;
3173 /* Check equality of integer constants before bailing out due to
3174 precision differences. */
3175 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3176 return tree_int_cst_equal (arg0, arg1);
3178 /* If both types don't have the same signedness, then we can't consider
3179 them equal. We must check this before the STRIP_NOPS calls
3180 because they may change the signedness of the arguments. As pointers
3181 strictly don't have a signedness, require either two pointers or
3182 two non-pointers as well. */
3183 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3184 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3185 return 0;
3187 /* We cannot consider pointers to different address space equal. */
3188 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
3189 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3190 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3191 return 0;
3193 /* If both types don't have the same precision, then it is not safe
3194 to strip NOPs. */
3195 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3196 return 0;
3198 STRIP_NOPS (arg0);
3199 STRIP_NOPS (arg1);
3201 /* In case both args are comparisons but with different comparison
3202 code, try to swap the comparison operands of one arg to produce
3203 a match and compare that variant. */
3204 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3205 && COMPARISON_CLASS_P (arg0)
3206 && COMPARISON_CLASS_P (arg1))
3208 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3210 if (TREE_CODE (arg0) == swap_code)
3211 return operand_equal_p (TREE_OPERAND (arg0, 0),
3212 TREE_OPERAND (arg1, 1), flags)
3213 && operand_equal_p (TREE_OPERAND (arg0, 1),
3214 TREE_OPERAND (arg1, 0), flags);
3217 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3218 /* This is needed for conversions and for COMPONENT_REF.
3219 Might as well play it safe and always test this. */
3220 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3221 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3222 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3223 return 0;
3225 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3226 We don't care about side effects in that case because the SAVE_EXPR
3227 takes care of that for us. In all other cases, two expressions are
3228 equal if they have no side effects. If we have two identical
3229 expressions with side effects that should be treated the same due
3230 to the only side effects being identical SAVE_EXPR's, that will
3231 be detected in the recursive calls below. */
3232 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3233 && (TREE_CODE (arg0) == SAVE_EXPR
3234 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3235 return 1;
3237 /* Next handle constant cases, those for which we can return 1 even
3238 if ONLY_CONST is set. */
3239 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3240 switch (TREE_CODE (arg0))
3242 case INTEGER_CST:
3243 return tree_int_cst_equal (arg0, arg1);
3245 case FIXED_CST:
3246 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3247 TREE_FIXED_CST (arg1));
3249 case REAL_CST:
3250 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3251 TREE_REAL_CST (arg1)))
3252 return 1;
3255 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3257 /* If we do not distinguish between signed and unsigned zero,
3258 consider them equal. */
3259 if (real_zerop (arg0) && real_zerop (arg1))
3260 return 1;
3262 return 0;
3264 case VECTOR_CST:
3266 tree v1, v2;
3268 v1 = TREE_VECTOR_CST_ELTS (arg0);
3269 v2 = TREE_VECTOR_CST_ELTS (arg1);
3270 while (v1 && v2)
3272 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3273 flags))
3274 return 0;
3275 v1 = TREE_CHAIN (v1);
3276 v2 = TREE_CHAIN (v2);
3279 return v1 == v2;
3282 case COMPLEX_CST:
3283 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3284 flags)
3285 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3286 flags));
3288 case STRING_CST:
3289 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3290 && ! memcmp (TREE_STRING_POINTER (arg0),
3291 TREE_STRING_POINTER (arg1),
3292 TREE_STRING_LENGTH (arg0)));
3294 case ADDR_EXPR:
3295 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3297 default:
3298 break;
3301 if (flags & OEP_ONLY_CONST)
3302 return 0;
3304 /* Define macros to test an operand from arg0 and arg1 for equality and a
3305 variant that allows null and views null as being different from any
3306 non-null value. In the latter case, if either is null, the both
3307 must be; otherwise, do the normal comparison. */
3308 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3309 TREE_OPERAND (arg1, N), flags)
3311 #define OP_SAME_WITH_NULL(N) \
3312 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3313 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3315 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3317 case tcc_unary:
3318 /* Two conversions are equal only if signedness and modes match. */
3319 switch (TREE_CODE (arg0))
3321 CASE_CONVERT:
3322 case FIX_TRUNC_EXPR:
3323 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3324 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3325 return 0;
3326 break;
3327 default:
3328 break;
3331 return OP_SAME (0);
3334 case tcc_comparison:
3335 case tcc_binary:
3336 if (OP_SAME (0) && OP_SAME (1))
3337 return 1;
3339 /* For commutative ops, allow the other order. */
3340 return (commutative_tree_code (TREE_CODE (arg0))
3341 && operand_equal_p (TREE_OPERAND (arg0, 0),
3342 TREE_OPERAND (arg1, 1), flags)
3343 && operand_equal_p (TREE_OPERAND (arg0, 1),
3344 TREE_OPERAND (arg1, 0), flags));
3346 case tcc_reference:
3347 /* If either of the pointer (or reference) expressions we are
3348 dereferencing contain a side effect, these cannot be equal. */
3349 if (TREE_SIDE_EFFECTS (arg0)
3350 || TREE_SIDE_EFFECTS (arg1))
3351 return 0;
3353 switch (TREE_CODE (arg0))
3355 case INDIRECT_REF:
3356 case ALIGN_INDIRECT_REF:
3357 case MISALIGNED_INDIRECT_REF:
3358 case REALPART_EXPR:
3359 case IMAGPART_EXPR:
3360 return OP_SAME (0);
3362 case ARRAY_REF:
3363 case ARRAY_RANGE_REF:
3364 /* Operands 2 and 3 may be null.
3365 Compare the array index by value if it is constant first as we
3366 may have different types but same value here. */
3367 return (OP_SAME (0)
3368 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3369 TREE_OPERAND (arg1, 1))
3370 || OP_SAME (1))
3371 && OP_SAME_WITH_NULL (2)
3372 && OP_SAME_WITH_NULL (3));
3374 case COMPONENT_REF:
3375 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3376 may be NULL when we're called to compare MEM_EXPRs. */
3377 return OP_SAME_WITH_NULL (0)
3378 && OP_SAME (1)
3379 && OP_SAME_WITH_NULL (2);
3381 case BIT_FIELD_REF:
3382 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3384 default:
3385 return 0;
3388 case tcc_expression:
3389 switch (TREE_CODE (arg0))
3391 case ADDR_EXPR:
3392 case TRUTH_NOT_EXPR:
3393 return OP_SAME (0);
3395 case TRUTH_ANDIF_EXPR:
3396 case TRUTH_ORIF_EXPR:
3397 return OP_SAME (0) && OP_SAME (1);
3399 case TRUTH_AND_EXPR:
3400 case TRUTH_OR_EXPR:
3401 case TRUTH_XOR_EXPR:
3402 if (OP_SAME (0) && OP_SAME (1))
3403 return 1;
3405 /* Otherwise take into account this is a commutative operation. */
3406 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3407 TREE_OPERAND (arg1, 1), flags)
3408 && operand_equal_p (TREE_OPERAND (arg0, 1),
3409 TREE_OPERAND (arg1, 0), flags));
3411 case COND_EXPR:
3412 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3414 default:
3415 return 0;
3418 case tcc_vl_exp:
3419 switch (TREE_CODE (arg0))
3421 case CALL_EXPR:
3422 /* If the CALL_EXPRs call different functions, then they
3423 clearly can not be equal. */
3424 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3425 flags))
3426 return 0;
3429 unsigned int cef = call_expr_flags (arg0);
3430 if (flags & OEP_PURE_SAME)
3431 cef &= ECF_CONST | ECF_PURE;
3432 else
3433 cef &= ECF_CONST;
3434 if (!cef)
3435 return 0;
3438 /* Now see if all the arguments are the same. */
3440 const_call_expr_arg_iterator iter0, iter1;
3441 const_tree a0, a1;
3442 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3443 a1 = first_const_call_expr_arg (arg1, &iter1);
3444 a0 && a1;
3445 a0 = next_const_call_expr_arg (&iter0),
3446 a1 = next_const_call_expr_arg (&iter1))
3447 if (! operand_equal_p (a0, a1, flags))
3448 return 0;
3450 /* If we get here and both argument lists are exhausted
3451 then the CALL_EXPRs are equal. */
3452 return ! (a0 || a1);
3454 default:
3455 return 0;
3458 case tcc_declaration:
3459 /* Consider __builtin_sqrt equal to sqrt. */
3460 return (TREE_CODE (arg0) == FUNCTION_DECL
3461 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3462 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3463 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3465 default:
3466 return 0;
3469 #undef OP_SAME
3470 #undef OP_SAME_WITH_NULL
3473 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3474 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3476 When in doubt, return 0. */
3478 static int
3479 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3481 int unsignedp1, unsignedpo;
3482 tree primarg0, primarg1, primother;
3483 unsigned int correct_width;
3485 if (operand_equal_p (arg0, arg1, 0))
3486 return 1;
3488 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3489 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3490 return 0;
3492 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3493 and see if the inner values are the same. This removes any
3494 signedness comparison, which doesn't matter here. */
3495 primarg0 = arg0, primarg1 = arg1;
3496 STRIP_NOPS (primarg0);
3497 STRIP_NOPS (primarg1);
3498 if (operand_equal_p (primarg0, primarg1, 0))
3499 return 1;
3501 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3502 actual comparison operand, ARG0.
3504 First throw away any conversions to wider types
3505 already present in the operands. */
3507 primarg1 = get_narrower (arg1, &unsignedp1);
3508 primother = get_narrower (other, &unsignedpo);
3510 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3511 if (unsignedp1 == unsignedpo
3512 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3513 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3515 tree type = TREE_TYPE (arg0);
3517 /* Make sure shorter operand is extended the right way
3518 to match the longer operand. */
3519 primarg1 = fold_convert (signed_or_unsigned_type_for
3520 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3522 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3523 return 1;
3526 return 0;
3529 /* See if ARG is an expression that is either a comparison or is performing
3530 arithmetic on comparisons. The comparisons must only be comparing
3531 two different values, which will be stored in *CVAL1 and *CVAL2; if
3532 they are nonzero it means that some operands have already been found.
3533 No variables may be used anywhere else in the expression except in the
3534 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3535 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3537 If this is true, return 1. Otherwise, return zero. */
3539 static int
3540 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3542 enum tree_code code = TREE_CODE (arg);
3543 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3545 /* We can handle some of the tcc_expression cases here. */
3546 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3547 tclass = tcc_unary;
3548 else if (tclass == tcc_expression
3549 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3550 || code == COMPOUND_EXPR))
3551 tclass = tcc_binary;
3553 else if (tclass == tcc_expression && code == SAVE_EXPR
3554 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3556 /* If we've already found a CVAL1 or CVAL2, this expression is
3557 two complex to handle. */
3558 if (*cval1 || *cval2)
3559 return 0;
3561 tclass = tcc_unary;
3562 *save_p = 1;
3565 switch (tclass)
3567 case tcc_unary:
3568 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3570 case tcc_binary:
3571 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3572 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3573 cval1, cval2, save_p));
3575 case tcc_constant:
3576 return 1;
3578 case tcc_expression:
3579 if (code == COND_EXPR)
3580 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3581 cval1, cval2, save_p)
3582 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3583 cval1, cval2, save_p)
3584 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3585 cval1, cval2, save_p));
3586 return 0;
3588 case tcc_comparison:
3589 /* First see if we can handle the first operand, then the second. For
3590 the second operand, we know *CVAL1 can't be zero. It must be that
3591 one side of the comparison is each of the values; test for the
3592 case where this isn't true by failing if the two operands
3593 are the same. */
3595 if (operand_equal_p (TREE_OPERAND (arg, 0),
3596 TREE_OPERAND (arg, 1), 0))
3597 return 0;
3599 if (*cval1 == 0)
3600 *cval1 = TREE_OPERAND (arg, 0);
3601 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3603 else if (*cval2 == 0)
3604 *cval2 = TREE_OPERAND (arg, 0);
3605 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3607 else
3608 return 0;
3610 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3612 else if (*cval2 == 0)
3613 *cval2 = TREE_OPERAND (arg, 1);
3614 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3616 else
3617 return 0;
3619 return 1;
3621 default:
3622 return 0;
3626 /* ARG is a tree that is known to contain just arithmetic operations and
3627 comparisons. Evaluate the operations in the tree substituting NEW0 for
3628 any occurrence of OLD0 as an operand of a comparison and likewise for
3629 NEW1 and OLD1. */
3631 static tree
3632 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3633 tree old1, tree new1)
3635 tree type = TREE_TYPE (arg);
3636 enum tree_code code = TREE_CODE (arg);
3637 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3639 /* We can handle some of the tcc_expression cases here. */
3640 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3641 tclass = tcc_unary;
3642 else if (tclass == tcc_expression
3643 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3644 tclass = tcc_binary;
3646 switch (tclass)
3648 case tcc_unary:
3649 return fold_build1_loc (loc, code, type,
3650 eval_subst (loc, TREE_OPERAND (arg, 0),
3651 old0, new0, old1, new1));
3653 case tcc_binary:
3654 return fold_build2_loc (loc, code, type,
3655 eval_subst (loc, TREE_OPERAND (arg, 0),
3656 old0, new0, old1, new1),
3657 eval_subst (loc, TREE_OPERAND (arg, 1),
3658 old0, new0, old1, new1));
3660 case tcc_expression:
3661 switch (code)
3663 case SAVE_EXPR:
3664 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3665 old1, new1);
3667 case COMPOUND_EXPR:
3668 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3669 old1, new1);
3671 case COND_EXPR:
3672 return fold_build3_loc (loc, code, type,
3673 eval_subst (loc, TREE_OPERAND (arg, 0),
3674 old0, new0, old1, new1),
3675 eval_subst (loc, TREE_OPERAND (arg, 1),
3676 old0, new0, old1, new1),
3677 eval_subst (loc, TREE_OPERAND (arg, 2),
3678 old0, new0, old1, new1));
3679 default:
3680 break;
3682 /* Fall through - ??? */
3684 case tcc_comparison:
3686 tree arg0 = TREE_OPERAND (arg, 0);
3687 tree arg1 = TREE_OPERAND (arg, 1);
3689 /* We need to check both for exact equality and tree equality. The
3690 former will be true if the operand has a side-effect. In that
3691 case, we know the operand occurred exactly once. */
3693 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3694 arg0 = new0;
3695 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3696 arg0 = new1;
3698 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3699 arg1 = new0;
3700 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3701 arg1 = new1;
3703 return fold_build2_loc (loc, code, type, arg0, arg1);
3706 default:
3707 return arg;
3711 /* Return a tree for the case when the result of an expression is RESULT
3712 converted to TYPE and OMITTED was previously an operand of the expression
3713 but is now not needed (e.g., we folded OMITTED * 0).
3715 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3716 the conversion of RESULT to TYPE. */
3718 tree
3719 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3721 tree t = fold_convert_loc (loc, type, result);
3723 /* If the resulting operand is an empty statement, just return the omitted
3724 statement casted to void. */
3725 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3727 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3728 goto omit_one_operand_exit;
3731 if (TREE_SIDE_EFFECTS (omitted))
3733 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3734 goto omit_one_operand_exit;
3737 return non_lvalue_loc (loc, t);
3739 omit_one_operand_exit:
3740 protected_set_expr_location (t, loc);
3741 return t;
3744 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3746 static tree
3747 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3748 tree omitted)
3750 tree t = fold_convert_loc (loc, type, result);
3752 /* If the resulting operand is an empty statement, just return the omitted
3753 statement casted to void. */
3754 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3756 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3757 goto pedantic_omit_one_operand_exit;
3760 if (TREE_SIDE_EFFECTS (omitted))
3762 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3763 goto pedantic_omit_one_operand_exit;
3766 return pedantic_non_lvalue_loc (loc, t);
3768 pedantic_omit_one_operand_exit:
3769 protected_set_expr_location (t, loc);
3770 return t;
3773 /* Return a tree for the case when the result of an expression is RESULT
3774 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3775 of the expression but are now not needed.
3777 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3778 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3779 evaluated before OMITTED2. Otherwise, if neither has side effects,
3780 just do the conversion of RESULT to TYPE. */
3782 tree
3783 omit_two_operands_loc (location_t loc, tree type, tree result,
3784 tree omitted1, tree omitted2)
3786 tree t = fold_convert_loc (loc, type, result);
3788 if (TREE_SIDE_EFFECTS (omitted2))
3790 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3791 SET_EXPR_LOCATION (t, loc);
3793 if (TREE_SIDE_EFFECTS (omitted1))
3795 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3796 SET_EXPR_LOCATION (t, loc);
3799 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3803 /* Return a simplified tree node for the truth-negation of ARG. This
3804 never alters ARG itself. We assume that ARG is an operation that
3805 returns a truth value (0 or 1).
3807 FIXME: one would think we would fold the result, but it causes
3808 problems with the dominator optimizer. */
3810 tree
3811 fold_truth_not_expr (location_t loc, tree arg)
3813 tree t, type = TREE_TYPE (arg);
3814 enum tree_code code = TREE_CODE (arg);
3815 location_t loc1, loc2;
3817 /* If this is a comparison, we can simply invert it, except for
3818 floating-point non-equality comparisons, in which case we just
3819 enclose a TRUTH_NOT_EXPR around what we have. */
3821 if (TREE_CODE_CLASS (code) == tcc_comparison)
3823 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3824 if (FLOAT_TYPE_P (op_type)
3825 && flag_trapping_math
3826 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3827 && code != NE_EXPR && code != EQ_EXPR)
3828 return NULL_TREE;
3830 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3831 if (code == ERROR_MARK)
3832 return NULL_TREE;
3834 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3835 SET_EXPR_LOCATION (t, loc);
3836 return t;
3839 switch (code)
3841 case INTEGER_CST:
3842 return constant_boolean_node (integer_zerop (arg), type);
3844 case TRUTH_AND_EXPR:
3845 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3846 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3847 if (loc1 == UNKNOWN_LOCATION)
3848 loc1 = loc;
3849 if (loc2 == UNKNOWN_LOCATION)
3850 loc2 = loc;
3851 t = build2 (TRUTH_OR_EXPR, type,
3852 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3853 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3854 break;
3856 case TRUTH_OR_EXPR:
3857 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3858 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3859 if (loc1 == UNKNOWN_LOCATION)
3860 loc1 = loc;
3861 if (loc2 == UNKNOWN_LOCATION)
3862 loc2 = loc;
3863 t = build2 (TRUTH_AND_EXPR, type,
3864 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3865 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3866 break;
3868 case TRUTH_XOR_EXPR:
3869 /* Here we can invert either operand. We invert the first operand
3870 unless the second operand is a TRUTH_NOT_EXPR in which case our
3871 result is the XOR of the first operand with the inside of the
3872 negation of the second operand. */
3874 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3875 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3876 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3877 else
3878 t = build2 (TRUTH_XOR_EXPR, type,
3879 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3880 TREE_OPERAND (arg, 1));
3881 break;
3883 case TRUTH_ANDIF_EXPR:
3884 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3885 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3886 if (loc1 == UNKNOWN_LOCATION)
3887 loc1 = loc;
3888 if (loc2 == UNKNOWN_LOCATION)
3889 loc2 = loc;
3890 t = build2 (TRUTH_ORIF_EXPR, type,
3891 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3892 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3893 break;
3895 case TRUTH_ORIF_EXPR:
3896 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3897 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3898 if (loc1 == UNKNOWN_LOCATION)
3899 loc1 = loc;
3900 if (loc2 == UNKNOWN_LOCATION)
3901 loc2 = loc;
3902 t = build2 (TRUTH_ANDIF_EXPR, type,
3903 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3904 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3905 break;
3907 case TRUTH_NOT_EXPR:
3908 return TREE_OPERAND (arg, 0);
3910 case COND_EXPR:
3912 tree arg1 = TREE_OPERAND (arg, 1);
3913 tree arg2 = TREE_OPERAND (arg, 2);
3915 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3916 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3917 if (loc1 == UNKNOWN_LOCATION)
3918 loc1 = loc;
3919 if (loc2 == UNKNOWN_LOCATION)
3920 loc2 = loc;
3922 /* A COND_EXPR may have a throw as one operand, which
3923 then has void type. Just leave void operands
3924 as they are. */
3925 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3926 VOID_TYPE_P (TREE_TYPE (arg1))
3927 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3928 VOID_TYPE_P (TREE_TYPE (arg2))
3929 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3930 break;
3933 case COMPOUND_EXPR:
3934 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3935 if (loc1 == UNKNOWN_LOCATION)
3936 loc1 = loc;
3937 t = build2 (COMPOUND_EXPR, type,
3938 TREE_OPERAND (arg, 0),
3939 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3940 break;
3942 case NON_LVALUE_EXPR:
3943 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3944 if (loc1 == UNKNOWN_LOCATION)
3945 loc1 = loc;
3946 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3948 CASE_CONVERT:
3949 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3951 t = build1 (TRUTH_NOT_EXPR, type, arg);
3952 break;
3955 /* ... fall through ... */
3957 case FLOAT_EXPR:
3958 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3959 if (loc1 == UNKNOWN_LOCATION)
3960 loc1 = loc;
3961 t = build1 (TREE_CODE (arg), type,
3962 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3963 break;
3965 case BIT_AND_EXPR:
3966 if (!integer_onep (TREE_OPERAND (arg, 1)))
3967 return NULL_TREE;
3968 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3969 break;
3971 case SAVE_EXPR:
3972 t = build1 (TRUTH_NOT_EXPR, type, arg);
3973 break;
3975 case CLEANUP_POINT_EXPR:
3976 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3977 if (loc1 == UNKNOWN_LOCATION)
3978 loc1 = loc;
3979 t = build1 (CLEANUP_POINT_EXPR, type,
3980 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3981 break;
3983 default:
3984 t = NULL_TREE;
3985 break;
3988 if (t)
3989 SET_EXPR_LOCATION (t, loc);
3991 return t;
3994 /* Return a simplified tree node for the truth-negation of ARG. This
3995 never alters ARG itself. We assume that ARG is an operation that
3996 returns a truth value (0 or 1).
3998 FIXME: one would think we would fold the result, but it causes
3999 problems with the dominator optimizer. */
4001 tree
4002 invert_truthvalue_loc (location_t loc, tree arg)
4004 tree tem;
4006 if (TREE_CODE (arg) == ERROR_MARK)
4007 return arg;
4009 tem = fold_truth_not_expr (loc, arg);
4010 if (!tem)
4012 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
4013 SET_EXPR_LOCATION (tem, loc);
4016 return tem;
4019 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
4020 operands are another bit-wise operation with a common input. If so,
4021 distribute the bit operations to save an operation and possibly two if
4022 constants are involved. For example, convert
4023 (A | B) & (A | C) into A | (B & C)
4024 Further simplification will occur if B and C are constants.
4026 If this optimization cannot be done, 0 will be returned. */
4028 static tree
4029 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
4030 tree arg0, tree arg1)
4032 tree common;
4033 tree left, right;
4035 if (TREE_CODE (arg0) != TREE_CODE (arg1)
4036 || TREE_CODE (arg0) == code
4037 || (TREE_CODE (arg0) != BIT_AND_EXPR
4038 && TREE_CODE (arg0) != BIT_IOR_EXPR))
4039 return 0;
4041 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
4043 common = TREE_OPERAND (arg0, 0);
4044 left = TREE_OPERAND (arg0, 1);
4045 right = TREE_OPERAND (arg1, 1);
4047 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
4049 common = TREE_OPERAND (arg0, 0);
4050 left = TREE_OPERAND (arg0, 1);
4051 right = TREE_OPERAND (arg1, 0);
4053 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
4055 common = TREE_OPERAND (arg0, 1);
4056 left = TREE_OPERAND (arg0, 0);
4057 right = TREE_OPERAND (arg1, 1);
4059 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
4061 common = TREE_OPERAND (arg0, 1);
4062 left = TREE_OPERAND (arg0, 0);
4063 right = TREE_OPERAND (arg1, 0);
4065 else
4066 return 0;
4068 common = fold_convert_loc (loc, type, common);
4069 left = fold_convert_loc (loc, type, left);
4070 right = fold_convert_loc (loc, type, right);
4071 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
4072 fold_build2_loc (loc, code, type, left, right));
4075 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
4076 with code CODE. This optimization is unsafe. */
4077 static tree
4078 distribute_real_division (location_t loc, enum tree_code code, tree type,
4079 tree arg0, tree arg1)
4081 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
4082 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
4084 /* (A / C) +- (B / C) -> (A +- B) / C. */
4085 if (mul0 == mul1
4086 && operand_equal_p (TREE_OPERAND (arg0, 1),
4087 TREE_OPERAND (arg1, 1), 0))
4088 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
4089 fold_build2_loc (loc, code, type,
4090 TREE_OPERAND (arg0, 0),
4091 TREE_OPERAND (arg1, 0)),
4092 TREE_OPERAND (arg0, 1));
4094 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
4095 if (operand_equal_p (TREE_OPERAND (arg0, 0),
4096 TREE_OPERAND (arg1, 0), 0)
4097 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
4098 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
4100 REAL_VALUE_TYPE r0, r1;
4101 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
4102 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
4103 if (!mul0)
4104 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
4105 if (!mul1)
4106 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
4107 real_arithmetic (&r0, code, &r0, &r1);
4108 return fold_build2_loc (loc, MULT_EXPR, type,
4109 TREE_OPERAND (arg0, 0),
4110 build_real (type, r0));
4113 return NULL_TREE;
4116 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4117 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
4119 static tree
4120 make_bit_field_ref (location_t loc, tree inner, tree type,
4121 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
4123 tree result, bftype;
4125 if (bitpos == 0)
4127 tree size = TYPE_SIZE (TREE_TYPE (inner));
4128 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4129 || POINTER_TYPE_P (TREE_TYPE (inner)))
4130 && host_integerp (size, 0)
4131 && tree_low_cst (size, 0) == bitsize)
4132 return fold_convert_loc (loc, type, inner);
4135 bftype = type;
4136 if (TYPE_PRECISION (bftype) != bitsize
4137 || TYPE_UNSIGNED (bftype) == !unsignedp)
4138 bftype = build_nonstandard_integer_type (bitsize, 0);
4140 result = build3 (BIT_FIELD_REF, bftype, inner,
4141 size_int (bitsize), bitsize_int (bitpos));
4142 SET_EXPR_LOCATION (result, loc);
4144 if (bftype != type)
4145 result = fold_convert_loc (loc, type, result);
4147 return result;
4150 /* Optimize a bit-field compare.
4152 There are two cases: First is a compare against a constant and the
4153 second is a comparison of two items where the fields are at the same
4154 bit position relative to the start of a chunk (byte, halfword, word)
4155 large enough to contain it. In these cases we can avoid the shift
4156 implicit in bitfield extractions.
4158 For constants, we emit a compare of the shifted constant with the
4159 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4160 compared. For two fields at the same position, we do the ANDs with the
4161 similar mask and compare the result of the ANDs.
4163 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4164 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4165 are the left and right operands of the comparison, respectively.
4167 If the optimization described above can be done, we return the resulting
4168 tree. Otherwise we return zero. */
4170 static tree
4171 optimize_bit_field_compare (location_t loc, enum tree_code code,
4172 tree compare_type, tree lhs, tree rhs)
4174 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
4175 tree type = TREE_TYPE (lhs);
4176 tree signed_type, unsigned_type;
4177 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4178 enum machine_mode lmode, rmode, nmode;
4179 int lunsignedp, runsignedp;
4180 int lvolatilep = 0, rvolatilep = 0;
4181 tree linner, rinner = NULL_TREE;
4182 tree mask;
4183 tree offset;
4185 /* Get all the information about the extractions being done. If the bit size
4186 if the same as the size of the underlying object, we aren't doing an
4187 extraction at all and so can do nothing. We also don't want to
4188 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4189 then will no longer be able to replace it. */
4190 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
4191 &lunsignedp, &lvolatilep, false);
4192 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
4193 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
4194 return 0;
4196 if (!const_p)
4198 /* If this is not a constant, we can only do something if bit positions,
4199 sizes, and signedness are the same. */
4200 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4201 &runsignedp, &rvolatilep, false);
4203 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
4204 || lunsignedp != runsignedp || offset != 0
4205 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
4206 return 0;
4209 /* See if we can find a mode to refer to this field. We should be able to,
4210 but fail if we can't. */
4211 nmode = get_best_mode (lbitsize, lbitpos,
4212 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4213 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4214 TYPE_ALIGN (TREE_TYPE (rinner))),
4215 word_mode, lvolatilep || rvolatilep);
4216 if (nmode == VOIDmode)
4217 return 0;
4219 /* Set signed and unsigned types of the precision of this mode for the
4220 shifts below. */
4221 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
4222 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4224 /* Compute the bit position and size for the new reference and our offset
4225 within it. If the new reference is the same size as the original, we
4226 won't optimize anything, so return zero. */
4227 nbitsize = GET_MODE_BITSIZE (nmode);
4228 nbitpos = lbitpos & ~ (nbitsize - 1);
4229 lbitpos -= nbitpos;
4230 if (nbitsize == lbitsize)
4231 return 0;
4233 if (BYTES_BIG_ENDIAN)
4234 lbitpos = nbitsize - lbitsize - lbitpos;
4236 /* Make the mask to be used against the extracted field. */
4237 mask = build_int_cst_type (unsigned_type, -1);
4238 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
4239 mask = const_binop (RSHIFT_EXPR, mask,
4240 size_int (nbitsize - lbitsize - lbitpos), 0);
4242 if (! const_p)
4243 /* If not comparing with constant, just rework the comparison
4244 and return. */
4245 return fold_build2_loc (loc, code, compare_type,
4246 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4247 make_bit_field_ref (loc, linner,
4248 unsigned_type,
4249 nbitsize, nbitpos,
4251 mask),
4252 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4253 make_bit_field_ref (loc, rinner,
4254 unsigned_type,
4255 nbitsize, nbitpos,
4257 mask));
4259 /* Otherwise, we are handling the constant case. See if the constant is too
4260 big for the field. Warn and return a tree of for 0 (false) if so. We do
4261 this not only for its own sake, but to avoid having to test for this
4262 error case below. If we didn't, we might generate wrong code.
4264 For unsigned fields, the constant shifted right by the field length should
4265 be all zero. For signed fields, the high-order bits should agree with
4266 the sign bit. */
4268 if (lunsignedp)
4270 if (! integer_zerop (const_binop (RSHIFT_EXPR,
4271 fold_convert_loc (loc,
4272 unsigned_type, rhs),
4273 size_int (lbitsize), 0)))
4275 warning (0, "comparison is always %d due to width of bit-field",
4276 code == NE_EXPR);
4277 return constant_boolean_node (code == NE_EXPR, compare_type);
4280 else
4282 tree tem = const_binop (RSHIFT_EXPR,
4283 fold_convert_loc (loc, signed_type, rhs),
4284 size_int (lbitsize - 1), 0);
4285 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4287 warning (0, "comparison is always %d due to width of bit-field",
4288 code == NE_EXPR);
4289 return constant_boolean_node (code == NE_EXPR, compare_type);
4293 /* Single-bit compares should always be against zero. */
4294 if (lbitsize == 1 && ! integer_zerop (rhs))
4296 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4297 rhs = build_int_cst (type, 0);
4300 /* Make a new bitfield reference, shift the constant over the
4301 appropriate number of bits and mask it with the computed mask
4302 (in case this was a signed field). If we changed it, make a new one. */
4303 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
4304 if (lvolatilep)
4306 TREE_SIDE_EFFECTS (lhs) = 1;
4307 TREE_THIS_VOLATILE (lhs) = 1;
4310 rhs = const_binop (BIT_AND_EXPR,
4311 const_binop (LSHIFT_EXPR,
4312 fold_convert_loc (loc, unsigned_type, rhs),
4313 size_int (lbitpos), 0),
4314 mask, 0);
4316 lhs = build2 (code, compare_type,
4317 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4318 rhs);
4319 SET_EXPR_LOCATION (lhs, loc);
4320 return lhs;
4323 /* Subroutine for fold_truthop: decode a field reference.
4325 If EXP is a comparison reference, we return the innermost reference.
4327 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4328 set to the starting bit number.
4330 If the innermost field can be completely contained in a mode-sized
4331 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4333 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4334 otherwise it is not changed.
4336 *PUNSIGNEDP is set to the signedness of the field.
4338 *PMASK is set to the mask used. This is either contained in a
4339 BIT_AND_EXPR or derived from the width of the field.
4341 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4343 Return 0 if this is not a component reference or is one that we can't
4344 do anything with. */
4346 static tree
4347 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
4348 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4349 int *punsignedp, int *pvolatilep,
4350 tree *pmask, tree *pand_mask)
4352 tree outer_type = 0;
4353 tree and_mask = 0;
4354 tree mask, inner, offset;
4355 tree unsigned_type;
4356 unsigned int precision;
4358 /* All the optimizations using this function assume integer fields.
4359 There are problems with FP fields since the type_for_size call
4360 below can fail for, e.g., XFmode. */
4361 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4362 return 0;
4364 /* We are interested in the bare arrangement of bits, so strip everything
4365 that doesn't affect the machine mode. However, record the type of the
4366 outermost expression if it may matter below. */
4367 if (CONVERT_EXPR_P (exp)
4368 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4369 outer_type = TREE_TYPE (exp);
4370 STRIP_NOPS (exp);
4372 if (TREE_CODE (exp) == BIT_AND_EXPR)
4374 and_mask = TREE_OPERAND (exp, 1);
4375 exp = TREE_OPERAND (exp, 0);
4376 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4377 if (TREE_CODE (and_mask) != INTEGER_CST)
4378 return 0;
4381 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4382 punsignedp, pvolatilep, false);
4383 if ((inner == exp && and_mask == 0)
4384 || *pbitsize < 0 || offset != 0
4385 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4386 return 0;
4388 /* If the number of bits in the reference is the same as the bitsize of
4389 the outer type, then the outer type gives the signedness. Otherwise
4390 (in case of a small bitfield) the signedness is unchanged. */
4391 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4392 *punsignedp = TYPE_UNSIGNED (outer_type);
4394 /* Compute the mask to access the bitfield. */
4395 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4396 precision = TYPE_PRECISION (unsigned_type);
4398 mask = build_int_cst_type (unsigned_type, -1);
4400 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4401 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4403 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4404 if (and_mask != 0)
4405 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4406 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4408 *pmask = mask;
4409 *pand_mask = and_mask;
4410 return inner;
4413 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4414 bit positions. */
4416 static int
4417 all_ones_mask_p (const_tree mask, int size)
4419 tree type = TREE_TYPE (mask);
4420 unsigned int precision = TYPE_PRECISION (type);
4421 tree tmask;
4423 tmask = build_int_cst_type (signed_type_for (type), -1);
4425 return
4426 tree_int_cst_equal (mask,
4427 const_binop (RSHIFT_EXPR,
4428 const_binop (LSHIFT_EXPR, tmask,
4429 size_int (precision - size),
4431 size_int (precision - size), 0));
4434 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4435 represents the sign bit of EXP's type. If EXP represents a sign
4436 or zero extension, also test VAL against the unextended type.
4437 The return value is the (sub)expression whose sign bit is VAL,
4438 or NULL_TREE otherwise. */
4440 static tree
4441 sign_bit_p (tree exp, const_tree val)
4443 unsigned HOST_WIDE_INT mask_lo, lo;
4444 HOST_WIDE_INT mask_hi, hi;
4445 int width;
4446 tree t;
4448 /* Tree EXP must have an integral type. */
4449 t = TREE_TYPE (exp);
4450 if (! INTEGRAL_TYPE_P (t))
4451 return NULL_TREE;
4453 /* Tree VAL must be an integer constant. */
4454 if (TREE_CODE (val) != INTEGER_CST
4455 || TREE_OVERFLOW (val))
4456 return NULL_TREE;
4458 width = TYPE_PRECISION (t);
4459 if (width > HOST_BITS_PER_WIDE_INT)
4461 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4462 lo = 0;
4464 mask_hi = ((unsigned HOST_WIDE_INT) -1
4465 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4466 mask_lo = -1;
4468 else
4470 hi = 0;
4471 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4473 mask_hi = 0;
4474 mask_lo = ((unsigned HOST_WIDE_INT) -1
4475 >> (HOST_BITS_PER_WIDE_INT - width));
4478 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4479 treat VAL as if it were unsigned. */
4480 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4481 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4482 return exp;
4484 /* Handle extension from a narrower type. */
4485 if (TREE_CODE (exp) == NOP_EXPR
4486 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4487 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4489 return NULL_TREE;
4492 /* Subroutine for fold_truthop: determine if an operand is simple enough
4493 to be evaluated unconditionally. */
4495 static int
4496 simple_operand_p (const_tree exp)
4498 /* Strip any conversions that don't change the machine mode. */
4499 STRIP_NOPS (exp);
4501 return (CONSTANT_CLASS_P (exp)
4502 || TREE_CODE (exp) == SSA_NAME
4503 || (DECL_P (exp)
4504 && ! TREE_ADDRESSABLE (exp)
4505 && ! TREE_THIS_VOLATILE (exp)
4506 && ! DECL_NONLOCAL (exp)
4507 /* Don't regard global variables as simple. They may be
4508 allocated in ways unknown to the compiler (shared memory,
4509 #pragma weak, etc). */
4510 && ! TREE_PUBLIC (exp)
4511 && ! DECL_EXTERNAL (exp)
4512 /* Loading a static variable is unduly expensive, but global
4513 registers aren't expensive. */
4514 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4517 /* The following functions are subroutines to fold_range_test and allow it to
4518 try to change a logical combination of comparisons into a range test.
4520 For example, both
4521 X == 2 || X == 3 || X == 4 || X == 5
4523 X >= 2 && X <= 5
4524 are converted to
4525 (unsigned) (X - 2) <= 3
4527 We describe each set of comparisons as being either inside or outside
4528 a range, using a variable named like IN_P, and then describe the
4529 range with a lower and upper bound. If one of the bounds is omitted,
4530 it represents either the highest or lowest value of the type.
4532 In the comments below, we represent a range by two numbers in brackets
4533 preceded by a "+" to designate being inside that range, or a "-" to
4534 designate being outside that range, so the condition can be inverted by
4535 flipping the prefix. An omitted bound is represented by a "-". For
4536 example, "- [-, 10]" means being outside the range starting at the lowest
4537 possible value and ending at 10, in other words, being greater than 10.
4538 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4539 always false.
4541 We set up things so that the missing bounds are handled in a consistent
4542 manner so neither a missing bound nor "true" and "false" need to be
4543 handled using a special case. */
4545 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4546 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4547 and UPPER1_P are nonzero if the respective argument is an upper bound
4548 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4549 must be specified for a comparison. ARG1 will be converted to ARG0's
4550 type if both are specified. */
4552 static tree
4553 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4554 tree arg1, int upper1_p)
4556 tree tem;
4557 int result;
4558 int sgn0, sgn1;
4560 /* If neither arg represents infinity, do the normal operation.
4561 Else, if not a comparison, return infinity. Else handle the special
4562 comparison rules. Note that most of the cases below won't occur, but
4563 are handled for consistency. */
4565 if (arg0 != 0 && arg1 != 0)
4567 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4568 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4569 STRIP_NOPS (tem);
4570 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4573 if (TREE_CODE_CLASS (code) != tcc_comparison)
4574 return 0;
4576 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4577 for neither. In real maths, we cannot assume open ended ranges are
4578 the same. But, this is computer arithmetic, where numbers are finite.
4579 We can therefore make the transformation of any unbounded range with
4580 the value Z, Z being greater than any representable number. This permits
4581 us to treat unbounded ranges as equal. */
4582 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4583 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4584 switch (code)
4586 case EQ_EXPR:
4587 result = sgn0 == sgn1;
4588 break;
4589 case NE_EXPR:
4590 result = sgn0 != sgn1;
4591 break;
4592 case LT_EXPR:
4593 result = sgn0 < sgn1;
4594 break;
4595 case LE_EXPR:
4596 result = sgn0 <= sgn1;
4597 break;
4598 case GT_EXPR:
4599 result = sgn0 > sgn1;
4600 break;
4601 case GE_EXPR:
4602 result = sgn0 >= sgn1;
4603 break;
4604 default:
4605 gcc_unreachable ();
4608 return constant_boolean_node (result, type);
4611 /* Given EXP, a logical expression, set the range it is testing into
4612 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4613 actually being tested. *PLOW and *PHIGH will be made of the same
4614 type as the returned expression. If EXP is not a comparison, we
4615 will most likely not be returning a useful value and range. Set
4616 *STRICT_OVERFLOW_P to true if the return value is only valid
4617 because signed overflow is undefined; otherwise, do not change
4618 *STRICT_OVERFLOW_P. */
4620 tree
4621 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4622 bool *strict_overflow_p)
4624 enum tree_code code;
4625 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4626 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4627 int in_p, n_in_p;
4628 tree low, high, n_low, n_high;
4629 location_t loc = EXPR_LOCATION (exp);
4631 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4632 and see if we can refine the range. Some of the cases below may not
4633 happen, but it doesn't seem worth worrying about this. We "continue"
4634 the outer loop when we've changed something; otherwise we "break"
4635 the switch, which will "break" the while. */
4637 in_p = 0;
4638 low = high = build_int_cst (TREE_TYPE (exp), 0);
4640 while (1)
4642 code = TREE_CODE (exp);
4643 exp_type = TREE_TYPE (exp);
4645 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4647 if (TREE_OPERAND_LENGTH (exp) > 0)
4648 arg0 = TREE_OPERAND (exp, 0);
4649 if (TREE_CODE_CLASS (code) == tcc_comparison
4650 || TREE_CODE_CLASS (code) == tcc_unary
4651 || TREE_CODE_CLASS (code) == tcc_binary)
4652 arg0_type = TREE_TYPE (arg0);
4653 if (TREE_CODE_CLASS (code) == tcc_binary
4654 || TREE_CODE_CLASS (code) == tcc_comparison
4655 || (TREE_CODE_CLASS (code) == tcc_expression
4656 && TREE_OPERAND_LENGTH (exp) > 1))
4657 arg1 = TREE_OPERAND (exp, 1);
4660 switch (code)
4662 case TRUTH_NOT_EXPR:
4663 in_p = ! in_p, exp = arg0;
4664 continue;
4666 case EQ_EXPR: case NE_EXPR:
4667 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4668 /* We can only do something if the range is testing for zero
4669 and if the second operand is an integer constant. Note that
4670 saying something is "in" the range we make is done by
4671 complementing IN_P since it will set in the initial case of
4672 being not equal to zero; "out" is leaving it alone. */
4673 if (low == 0 || high == 0
4674 || ! integer_zerop (low) || ! integer_zerop (high)
4675 || TREE_CODE (arg1) != INTEGER_CST)
4676 break;
4678 switch (code)
4680 case NE_EXPR: /* - [c, c] */
4681 low = high = arg1;
4682 break;
4683 case EQ_EXPR: /* + [c, c] */
4684 in_p = ! in_p, low = high = arg1;
4685 break;
4686 case GT_EXPR: /* - [-, c] */
4687 low = 0, high = arg1;
4688 break;
4689 case GE_EXPR: /* + [c, -] */
4690 in_p = ! in_p, low = arg1, high = 0;
4691 break;
4692 case LT_EXPR: /* - [c, -] */
4693 low = arg1, high = 0;
4694 break;
4695 case LE_EXPR: /* + [-, c] */
4696 in_p = ! in_p, low = 0, high = arg1;
4697 break;
4698 default:
4699 gcc_unreachable ();
4702 /* If this is an unsigned comparison, we also know that EXP is
4703 greater than or equal to zero. We base the range tests we make
4704 on that fact, so we record it here so we can parse existing
4705 range tests. We test arg0_type since often the return type
4706 of, e.g. EQ_EXPR, is boolean. */
4707 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4709 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4710 in_p, low, high, 1,
4711 build_int_cst (arg0_type, 0),
4712 NULL_TREE))
4713 break;
4715 in_p = n_in_p, low = n_low, high = n_high;
4717 /* If the high bound is missing, but we have a nonzero low
4718 bound, reverse the range so it goes from zero to the low bound
4719 minus 1. */
4720 if (high == 0 && low && ! integer_zerop (low))
4722 in_p = ! in_p;
4723 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4724 integer_one_node, 0);
4725 low = build_int_cst (arg0_type, 0);
4729 exp = arg0;
4730 continue;
4732 case NEGATE_EXPR:
4733 /* (-x) IN [a,b] -> x in [-b, -a] */
4734 n_low = range_binop (MINUS_EXPR, exp_type,
4735 build_int_cst (exp_type, 0),
4736 0, high, 1);
4737 n_high = range_binop (MINUS_EXPR, exp_type,
4738 build_int_cst (exp_type, 0),
4739 0, low, 0);
4740 low = n_low, high = n_high;
4741 exp = arg0;
4742 continue;
4744 case BIT_NOT_EXPR:
4745 /* ~ X -> -X - 1 */
4746 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4747 build_int_cst (exp_type, 1));
4748 SET_EXPR_LOCATION (exp, loc);
4749 continue;
4751 case PLUS_EXPR: case MINUS_EXPR:
4752 if (TREE_CODE (arg1) != INTEGER_CST)
4753 break;
4755 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4756 move a constant to the other side. */
4757 if (!TYPE_UNSIGNED (arg0_type)
4758 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4759 break;
4761 /* If EXP is signed, any overflow in the computation is undefined,
4762 so we don't worry about it so long as our computations on
4763 the bounds don't overflow. For unsigned, overflow is defined
4764 and this is exactly the right thing. */
4765 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4766 arg0_type, low, 0, arg1, 0);
4767 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4768 arg0_type, high, 1, arg1, 0);
4769 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4770 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4771 break;
4773 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4774 *strict_overflow_p = true;
4776 /* Check for an unsigned range which has wrapped around the maximum
4777 value thus making n_high < n_low, and normalize it. */
4778 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4780 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4781 integer_one_node, 0);
4782 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4783 integer_one_node, 0);
4785 /* If the range is of the form +/- [ x+1, x ], we won't
4786 be able to normalize it. But then, it represents the
4787 whole range or the empty set, so make it
4788 +/- [ -, - ]. */
4789 if (tree_int_cst_equal (n_low, low)
4790 && tree_int_cst_equal (n_high, high))
4791 low = high = 0;
4792 else
4793 in_p = ! in_p;
4795 else
4796 low = n_low, high = n_high;
4798 exp = arg0;
4799 continue;
4801 CASE_CONVERT: case NON_LVALUE_EXPR:
4802 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4803 break;
4805 if (! INTEGRAL_TYPE_P (arg0_type)
4806 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4807 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4808 break;
4810 n_low = low, n_high = high;
4812 if (n_low != 0)
4813 n_low = fold_convert_loc (loc, arg0_type, n_low);
4815 if (n_high != 0)
4816 n_high = fold_convert_loc (loc, arg0_type, n_high);
4819 /* If we're converting arg0 from an unsigned type, to exp,
4820 a signed type, we will be doing the comparison as unsigned.
4821 The tests above have already verified that LOW and HIGH
4822 are both positive.
4824 So we have to ensure that we will handle large unsigned
4825 values the same way that the current signed bounds treat
4826 negative values. */
4828 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4830 tree high_positive;
4831 tree equiv_type;
4832 /* For fixed-point modes, we need to pass the saturating flag
4833 as the 2nd parameter. */
4834 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4835 equiv_type = lang_hooks.types.type_for_mode
4836 (TYPE_MODE (arg0_type),
4837 TYPE_SATURATING (arg0_type));
4838 else
4839 equiv_type = lang_hooks.types.type_for_mode
4840 (TYPE_MODE (arg0_type), 1);
4842 /* A range without an upper bound is, naturally, unbounded.
4843 Since convert would have cropped a very large value, use
4844 the max value for the destination type. */
4845 high_positive
4846 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4847 : TYPE_MAX_VALUE (arg0_type);
4849 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4850 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4851 fold_convert_loc (loc, arg0_type,
4852 high_positive),
4853 build_int_cst (arg0_type, 1));
4855 /* If the low bound is specified, "and" the range with the
4856 range for which the original unsigned value will be
4857 positive. */
4858 if (low != 0)
4860 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4861 1, n_low, n_high, 1,
4862 fold_convert_loc (loc, arg0_type,
4863 integer_zero_node),
4864 high_positive))
4865 break;
4867 in_p = (n_in_p == in_p);
4869 else
4871 /* Otherwise, "or" the range with the range of the input
4872 that will be interpreted as negative. */
4873 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4874 0, n_low, n_high, 1,
4875 fold_convert_loc (loc, arg0_type,
4876 integer_zero_node),
4877 high_positive))
4878 break;
4880 in_p = (in_p != n_in_p);
4884 exp = arg0;
4885 low = n_low, high = n_high;
4886 continue;
4888 default:
4889 break;
4892 break;
4895 /* If EXP is a constant, we can evaluate whether this is true or false. */
4896 if (TREE_CODE (exp) == INTEGER_CST)
4898 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4899 exp, 0, low, 0))
4900 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4901 exp, 1, high, 1)));
4902 low = high = 0;
4903 exp = 0;
4906 *pin_p = in_p, *plow = low, *phigh = high;
4907 return exp;
4910 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4911 type, TYPE, return an expression to test if EXP is in (or out of, depending
4912 on IN_P) the range. Return 0 if the test couldn't be created. */
4914 tree
4915 build_range_check (location_t loc, tree type, tree exp, int in_p,
4916 tree low, tree high)
4918 tree etype = TREE_TYPE (exp), value;
4920 #ifdef HAVE_canonicalize_funcptr_for_compare
4921 /* Disable this optimization for function pointer expressions
4922 on targets that require function pointer canonicalization. */
4923 if (HAVE_canonicalize_funcptr_for_compare
4924 && TREE_CODE (etype) == POINTER_TYPE
4925 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4926 return NULL_TREE;
4927 #endif
4929 if (! in_p)
4931 value = build_range_check (loc, type, exp, 1, low, high);
4932 if (value != 0)
4933 return invert_truthvalue_loc (loc, value);
4935 return 0;
4938 if (low == 0 && high == 0)
4939 return build_int_cst (type, 1);
4941 if (low == 0)
4942 return fold_build2_loc (loc, LE_EXPR, type, exp,
4943 fold_convert_loc (loc, etype, high));
4945 if (high == 0)
4946 return fold_build2_loc (loc, GE_EXPR, type, exp,
4947 fold_convert_loc (loc, etype, low));
4949 if (operand_equal_p (low, high, 0))
4950 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4951 fold_convert_loc (loc, etype, low));
4953 if (integer_zerop (low))
4955 if (! TYPE_UNSIGNED (etype))
4957 etype = unsigned_type_for (etype);
4958 high = fold_convert_loc (loc, etype, high);
4959 exp = fold_convert_loc (loc, etype, exp);
4961 return build_range_check (loc, type, exp, 1, 0, high);
4964 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4965 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4967 unsigned HOST_WIDE_INT lo;
4968 HOST_WIDE_INT hi;
4969 int prec;
4971 prec = TYPE_PRECISION (etype);
4972 if (prec <= HOST_BITS_PER_WIDE_INT)
4974 hi = 0;
4975 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4977 else
4979 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4980 lo = (unsigned HOST_WIDE_INT) -1;
4983 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4985 if (TYPE_UNSIGNED (etype))
4987 tree signed_etype = signed_type_for (etype);
4988 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4989 etype
4990 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4991 else
4992 etype = signed_etype;
4993 exp = fold_convert_loc (loc, etype, exp);
4995 return fold_build2_loc (loc, GT_EXPR, type, exp,
4996 build_int_cst (etype, 0));
5000 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5001 This requires wrap-around arithmetics for the type of the expression.
5002 First make sure that arithmetics in this type is valid, then make sure
5003 that it wraps around. */
5004 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5005 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
5006 TYPE_UNSIGNED (etype));
5008 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
5010 tree utype, minv, maxv;
5012 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5013 for the type in question, as we rely on this here. */
5014 utype = unsigned_type_for (etype);
5015 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
5016 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5017 integer_one_node, 1);
5018 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
5020 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5021 minv, 1, maxv, 1)))
5022 etype = utype;
5023 else
5024 return 0;
5027 high = fold_convert_loc (loc, etype, high);
5028 low = fold_convert_loc (loc, etype, low);
5029 exp = fold_convert_loc (loc, etype, exp);
5031 value = const_binop (MINUS_EXPR, high, low, 0);
5034 if (POINTER_TYPE_P (etype))
5036 if (value != 0 && !TREE_OVERFLOW (value))
5038 low = fold_convert_loc (loc, sizetype, low);
5039 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
5040 return build_range_check (loc, type,
5041 fold_build2_loc (loc, POINTER_PLUS_EXPR,
5042 etype, exp, low),
5043 1, build_int_cst (etype, 0), value);
5045 return 0;
5048 if (value != 0 && !TREE_OVERFLOW (value))
5049 return build_range_check (loc, type,
5050 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5051 1, build_int_cst (etype, 0), value);
5053 return 0;
5056 /* Return the predecessor of VAL in its type, handling the infinite case. */
5058 static tree
5059 range_predecessor (tree val)
5061 tree type = TREE_TYPE (val);
5063 if (INTEGRAL_TYPE_P (type)
5064 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5065 return 0;
5066 else
5067 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
5070 /* Return the successor of VAL in its type, handling the infinite case. */
5072 static tree
5073 range_successor (tree val)
5075 tree type = TREE_TYPE (val);
5077 if (INTEGRAL_TYPE_P (type)
5078 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5079 return 0;
5080 else
5081 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
5084 /* Given two ranges, see if we can merge them into one. Return 1 if we
5085 can, 0 if we can't. Set the output range into the specified parameters. */
5087 bool
5088 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5089 tree high0, int in1_p, tree low1, tree high1)
5091 int no_overlap;
5092 int subset;
5093 int temp;
5094 tree tem;
5095 int in_p;
5096 tree low, high;
5097 int lowequal = ((low0 == 0 && low1 == 0)
5098 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5099 low0, 0, low1, 0)));
5100 int highequal = ((high0 == 0 && high1 == 0)
5101 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5102 high0, 1, high1, 1)));
5104 /* Make range 0 be the range that starts first, or ends last if they
5105 start at the same value. Swap them if it isn't. */
5106 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5107 low0, 0, low1, 0))
5108 || (lowequal
5109 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5110 high1, 1, high0, 1))))
5112 temp = in0_p, in0_p = in1_p, in1_p = temp;
5113 tem = low0, low0 = low1, low1 = tem;
5114 tem = high0, high0 = high1, high1 = tem;
5117 /* Now flag two cases, whether the ranges are disjoint or whether the
5118 second range is totally subsumed in the first. Note that the tests
5119 below are simplified by the ones above. */
5120 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5121 high0, 1, low1, 0));
5122 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5123 high1, 1, high0, 1));
5125 /* We now have four cases, depending on whether we are including or
5126 excluding the two ranges. */
5127 if (in0_p && in1_p)
5129 /* If they don't overlap, the result is false. If the second range
5130 is a subset it is the result. Otherwise, the range is from the start
5131 of the second to the end of the first. */
5132 if (no_overlap)
5133 in_p = 0, low = high = 0;
5134 else if (subset)
5135 in_p = 1, low = low1, high = high1;
5136 else
5137 in_p = 1, low = low1, high = high0;
5140 else if (in0_p && ! in1_p)
5142 /* If they don't overlap, the result is the first range. If they are
5143 equal, the result is false. If the second range is a subset of the
5144 first, and the ranges begin at the same place, we go from just after
5145 the end of the second range to the end of the first. If the second
5146 range is not a subset of the first, or if it is a subset and both
5147 ranges end at the same place, the range starts at the start of the
5148 first range and ends just before the second range.
5149 Otherwise, we can't describe this as a single range. */
5150 if (no_overlap)
5151 in_p = 1, low = low0, high = high0;
5152 else if (lowequal && highequal)
5153 in_p = 0, low = high = 0;
5154 else if (subset && lowequal)
5156 low = range_successor (high1);
5157 high = high0;
5158 in_p = 1;
5159 if (low == 0)
5161 /* We are in the weird situation where high0 > high1 but
5162 high1 has no successor. Punt. */
5163 return 0;
5166 else if (! subset || highequal)
5168 low = low0;
5169 high = range_predecessor (low1);
5170 in_p = 1;
5171 if (high == 0)
5173 /* low0 < low1 but low1 has no predecessor. Punt. */
5174 return 0;
5177 else
5178 return 0;
5181 else if (! in0_p && in1_p)
5183 /* If they don't overlap, the result is the second range. If the second
5184 is a subset of the first, the result is false. Otherwise,
5185 the range starts just after the first range and ends at the
5186 end of the second. */
5187 if (no_overlap)
5188 in_p = 1, low = low1, high = high1;
5189 else if (subset || highequal)
5190 in_p = 0, low = high = 0;
5191 else
5193 low = range_successor (high0);
5194 high = high1;
5195 in_p = 1;
5196 if (low == 0)
5198 /* high1 > high0 but high0 has no successor. Punt. */
5199 return 0;
5204 else
5206 /* The case where we are excluding both ranges. Here the complex case
5207 is if they don't overlap. In that case, the only time we have a
5208 range is if they are adjacent. If the second is a subset of the
5209 first, the result is the first. Otherwise, the range to exclude
5210 starts at the beginning of the first range and ends at the end of the
5211 second. */
5212 if (no_overlap)
5214 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5215 range_successor (high0),
5216 1, low1, 0)))
5217 in_p = 0, low = low0, high = high1;
5218 else
5220 /* Canonicalize - [min, x] into - [-, x]. */
5221 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5222 switch (TREE_CODE (TREE_TYPE (low0)))
5224 case ENUMERAL_TYPE:
5225 if (TYPE_PRECISION (TREE_TYPE (low0))
5226 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5227 break;
5228 /* FALLTHROUGH */
5229 case INTEGER_TYPE:
5230 if (tree_int_cst_equal (low0,
5231 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5232 low0 = 0;
5233 break;
5234 case POINTER_TYPE:
5235 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5236 && integer_zerop (low0))
5237 low0 = 0;
5238 break;
5239 default:
5240 break;
5243 /* Canonicalize - [x, max] into - [x, -]. */
5244 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5245 switch (TREE_CODE (TREE_TYPE (high1)))
5247 case ENUMERAL_TYPE:
5248 if (TYPE_PRECISION (TREE_TYPE (high1))
5249 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5250 break;
5251 /* FALLTHROUGH */
5252 case INTEGER_TYPE:
5253 if (tree_int_cst_equal (high1,
5254 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5255 high1 = 0;
5256 break;
5257 case POINTER_TYPE:
5258 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5259 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5260 high1, 1,
5261 integer_one_node, 1)))
5262 high1 = 0;
5263 break;
5264 default:
5265 break;
5268 /* The ranges might be also adjacent between the maximum and
5269 minimum values of the given type. For
5270 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5271 return + [x + 1, y - 1]. */
5272 if (low0 == 0 && high1 == 0)
5274 low = range_successor (high0);
5275 high = range_predecessor (low1);
5276 if (low == 0 || high == 0)
5277 return 0;
5279 in_p = 1;
5281 else
5282 return 0;
5285 else if (subset)
5286 in_p = 0, low = low0, high = high0;
5287 else
5288 in_p = 0, low = low0, high = high1;
5291 *pin_p = in_p, *plow = low, *phigh = high;
5292 return 1;
5296 /* Subroutine of fold, looking inside expressions of the form
5297 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5298 of the COND_EXPR. This function is being used also to optimize
5299 A op B ? C : A, by reversing the comparison first.
5301 Return a folded expression whose code is not a COND_EXPR
5302 anymore, or NULL_TREE if no folding opportunity is found. */
5304 static tree
5305 fold_cond_expr_with_comparison (location_t loc, tree type,
5306 tree arg0, tree arg1, tree arg2)
5308 enum tree_code comp_code = TREE_CODE (arg0);
5309 tree arg00 = TREE_OPERAND (arg0, 0);
5310 tree arg01 = TREE_OPERAND (arg0, 1);
5311 tree arg1_type = TREE_TYPE (arg1);
5312 tree tem;
5314 STRIP_NOPS (arg1);
5315 STRIP_NOPS (arg2);
5317 /* If we have A op 0 ? A : -A, consider applying the following
5318 transformations:
5320 A == 0? A : -A same as -A
5321 A != 0? A : -A same as A
5322 A >= 0? A : -A same as abs (A)
5323 A > 0? A : -A same as abs (A)
5324 A <= 0? A : -A same as -abs (A)
5325 A < 0? A : -A same as -abs (A)
5327 None of these transformations work for modes with signed
5328 zeros. If A is +/-0, the first two transformations will
5329 change the sign of the result (from +0 to -0, or vice
5330 versa). The last four will fix the sign of the result,
5331 even though the original expressions could be positive or
5332 negative, depending on the sign of A.
5334 Note that all these transformations are correct if A is
5335 NaN, since the two alternatives (A and -A) are also NaNs. */
5336 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5337 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5338 ? real_zerop (arg01)
5339 : integer_zerop (arg01))
5340 && ((TREE_CODE (arg2) == NEGATE_EXPR
5341 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5342 /* In the case that A is of the form X-Y, '-A' (arg2) may
5343 have already been folded to Y-X, check for that. */
5344 || (TREE_CODE (arg1) == MINUS_EXPR
5345 && TREE_CODE (arg2) == MINUS_EXPR
5346 && operand_equal_p (TREE_OPERAND (arg1, 0),
5347 TREE_OPERAND (arg2, 1), 0)
5348 && operand_equal_p (TREE_OPERAND (arg1, 1),
5349 TREE_OPERAND (arg2, 0), 0))))
5350 switch (comp_code)
5352 case EQ_EXPR:
5353 case UNEQ_EXPR:
5354 tem = fold_convert_loc (loc, arg1_type, arg1);
5355 return pedantic_non_lvalue_loc (loc,
5356 fold_convert_loc (loc, type,
5357 negate_expr (tem)));
5358 case NE_EXPR:
5359 case LTGT_EXPR:
5360 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5361 case UNGE_EXPR:
5362 case UNGT_EXPR:
5363 if (flag_trapping_math)
5364 break;
5365 /* Fall through. */
5366 case GE_EXPR:
5367 case GT_EXPR:
5368 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5369 arg1 = fold_convert_loc (loc, signed_type_for
5370 (TREE_TYPE (arg1)), arg1);
5371 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5372 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5373 case UNLE_EXPR:
5374 case UNLT_EXPR:
5375 if (flag_trapping_math)
5376 break;
5377 case LE_EXPR:
5378 case LT_EXPR:
5379 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5380 arg1 = fold_convert_loc (loc, signed_type_for
5381 (TREE_TYPE (arg1)), arg1);
5382 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5383 return negate_expr (fold_convert_loc (loc, type, tem));
5384 default:
5385 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5386 break;
5389 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5390 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5391 both transformations are correct when A is NaN: A != 0
5392 is then true, and A == 0 is false. */
5394 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5395 && integer_zerop (arg01) && integer_zerop (arg2))
5397 if (comp_code == NE_EXPR)
5398 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5399 else if (comp_code == EQ_EXPR)
5400 return build_int_cst (type, 0);
5403 /* Try some transformations of A op B ? A : B.
5405 A == B? A : B same as B
5406 A != B? A : B same as A
5407 A >= B? A : B same as max (A, B)
5408 A > B? A : B same as max (B, A)
5409 A <= B? A : B same as min (A, B)
5410 A < B? A : B same as min (B, A)
5412 As above, these transformations don't work in the presence
5413 of signed zeros. For example, if A and B are zeros of
5414 opposite sign, the first two transformations will change
5415 the sign of the result. In the last four, the original
5416 expressions give different results for (A=+0, B=-0) and
5417 (A=-0, B=+0), but the transformed expressions do not.
5419 The first two transformations are correct if either A or B
5420 is a NaN. In the first transformation, the condition will
5421 be false, and B will indeed be chosen. In the case of the
5422 second transformation, the condition A != B will be true,
5423 and A will be chosen.
5425 The conversions to max() and min() are not correct if B is
5426 a number and A is not. The conditions in the original
5427 expressions will be false, so all four give B. The min()
5428 and max() versions would give a NaN instead. */
5429 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5430 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5431 /* Avoid these transformations if the COND_EXPR may be used
5432 as an lvalue in the C++ front-end. PR c++/19199. */
5433 && (in_gimple_form
5434 || (strcmp (lang_hooks.name, "GNU C++") != 0
5435 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5436 || ! maybe_lvalue_p (arg1)
5437 || ! maybe_lvalue_p (arg2)))
5439 tree comp_op0 = arg00;
5440 tree comp_op1 = arg01;
5441 tree comp_type = TREE_TYPE (comp_op0);
5443 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5444 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5446 comp_type = type;
5447 comp_op0 = arg1;
5448 comp_op1 = arg2;
5451 switch (comp_code)
5453 case EQ_EXPR:
5454 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5455 case NE_EXPR:
5456 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5457 case LE_EXPR:
5458 case LT_EXPR:
5459 case UNLE_EXPR:
5460 case UNLT_EXPR:
5461 /* In C++ a ?: expression can be an lvalue, so put the
5462 operand which will be used if they are equal first
5463 so that we can convert this back to the
5464 corresponding COND_EXPR. */
5465 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5467 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5468 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5469 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5470 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5471 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5472 comp_op1, comp_op0);
5473 return pedantic_non_lvalue_loc (loc,
5474 fold_convert_loc (loc, type, tem));
5476 break;
5477 case GE_EXPR:
5478 case GT_EXPR:
5479 case UNGE_EXPR:
5480 case UNGT_EXPR:
5481 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5483 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5484 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5485 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5486 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5487 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5488 comp_op1, comp_op0);
5489 return pedantic_non_lvalue_loc (loc,
5490 fold_convert_loc (loc, type, tem));
5492 break;
5493 case UNEQ_EXPR:
5494 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5495 return pedantic_non_lvalue_loc (loc,
5496 fold_convert_loc (loc, type, arg2));
5497 break;
5498 case LTGT_EXPR:
5499 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5500 return pedantic_non_lvalue_loc (loc,
5501 fold_convert_loc (loc, type, arg1));
5502 break;
5503 default:
5504 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5505 break;
5509 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5510 we might still be able to simplify this. For example,
5511 if C1 is one less or one more than C2, this might have started
5512 out as a MIN or MAX and been transformed by this function.
5513 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5515 if (INTEGRAL_TYPE_P (type)
5516 && TREE_CODE (arg01) == INTEGER_CST
5517 && TREE_CODE (arg2) == INTEGER_CST)
5518 switch (comp_code)
5520 case EQ_EXPR:
5521 if (TREE_CODE (arg1) == INTEGER_CST)
5522 break;
5523 /* We can replace A with C1 in this case. */
5524 arg1 = fold_convert_loc (loc, type, arg01);
5525 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5527 case LT_EXPR:
5528 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5529 MIN_EXPR, to preserve the signedness of the comparison. */
5530 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5531 OEP_ONLY_CONST)
5532 && operand_equal_p (arg01,
5533 const_binop (PLUS_EXPR, arg2,
5534 build_int_cst (type, 1), 0),
5535 OEP_ONLY_CONST))
5537 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5538 fold_convert_loc (loc, TREE_TYPE (arg00),
5539 arg2));
5540 return pedantic_non_lvalue_loc (loc,
5541 fold_convert_loc (loc, type, tem));
5543 break;
5545 case LE_EXPR:
5546 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5547 as above. */
5548 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5549 OEP_ONLY_CONST)
5550 && operand_equal_p (arg01,
5551 const_binop (MINUS_EXPR, arg2,
5552 build_int_cst (type, 1), 0),
5553 OEP_ONLY_CONST))
5555 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5556 fold_convert_loc (loc, TREE_TYPE (arg00),
5557 arg2));
5558 return pedantic_non_lvalue_loc (loc,
5559 fold_convert_loc (loc, type, tem));
5561 break;
5563 case GT_EXPR:
5564 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5565 MAX_EXPR, to preserve the signedness of the comparison. */
5566 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5567 OEP_ONLY_CONST)
5568 && operand_equal_p (arg01,
5569 const_binop (MINUS_EXPR, arg2,
5570 build_int_cst (type, 1), 0),
5571 OEP_ONLY_CONST))
5573 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5574 fold_convert_loc (loc, TREE_TYPE (arg00),
5575 arg2));
5576 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5578 break;
5580 case GE_EXPR:
5581 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5582 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5583 OEP_ONLY_CONST)
5584 && operand_equal_p (arg01,
5585 const_binop (PLUS_EXPR, arg2,
5586 build_int_cst (type, 1), 0),
5587 OEP_ONLY_CONST))
5589 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5590 fold_convert_loc (loc, TREE_TYPE (arg00),
5591 arg2));
5592 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5594 break;
5595 case NE_EXPR:
5596 break;
5597 default:
5598 gcc_unreachable ();
5601 return NULL_TREE;
5606 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5607 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5608 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5609 false) >= 2)
5610 #endif
5612 /* EXP is some logical combination of boolean tests. See if we can
5613 merge it into some range test. Return the new tree if so. */
5615 static tree
5616 fold_range_test (location_t loc, enum tree_code code, tree type,
5617 tree op0, tree op1)
5619 int or_op = (code == TRUTH_ORIF_EXPR
5620 || code == TRUTH_OR_EXPR);
5621 int in0_p, in1_p, in_p;
5622 tree low0, low1, low, high0, high1, high;
5623 bool strict_overflow_p = false;
5624 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5625 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5626 tree tem;
5627 const char * const warnmsg = G_("assuming signed overflow does not occur "
5628 "when simplifying range test");
5630 /* If this is an OR operation, invert both sides; we will invert
5631 again at the end. */
5632 if (or_op)
5633 in0_p = ! in0_p, in1_p = ! in1_p;
5635 /* If both expressions are the same, if we can merge the ranges, and we
5636 can build the range test, return it or it inverted. If one of the
5637 ranges is always true or always false, consider it to be the same
5638 expression as the other. */
5639 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5640 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5641 in1_p, low1, high1)
5642 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
5643 lhs != 0 ? lhs
5644 : rhs != 0 ? rhs : integer_zero_node,
5645 in_p, low, high))))
5647 if (strict_overflow_p)
5648 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5649 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5652 /* On machines where the branch cost is expensive, if this is a
5653 short-circuited branch and the underlying object on both sides
5654 is the same, make a non-short-circuit operation. */
5655 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5656 && lhs != 0 && rhs != 0
5657 && (code == TRUTH_ANDIF_EXPR
5658 || code == TRUTH_ORIF_EXPR)
5659 && operand_equal_p (lhs, rhs, 0))
5661 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5662 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5663 which cases we can't do this. */
5664 if (simple_operand_p (lhs))
5666 tem = build2 (code == TRUTH_ANDIF_EXPR
5667 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5668 type, op0, op1);
5669 SET_EXPR_LOCATION (tem, loc);
5670 return tem;
5673 else if (lang_hooks.decls.global_bindings_p () == 0
5674 && ! CONTAINS_PLACEHOLDER_P (lhs))
5676 tree common = save_expr (lhs);
5678 if (0 != (lhs = build_range_check (loc, type, common,
5679 or_op ? ! in0_p : in0_p,
5680 low0, high0))
5681 && (0 != (rhs = build_range_check (loc, type, common,
5682 or_op ? ! in1_p : in1_p,
5683 low1, high1))))
5685 if (strict_overflow_p)
5686 fold_overflow_warning (warnmsg,
5687 WARN_STRICT_OVERFLOW_COMPARISON);
5688 tem = build2 (code == TRUTH_ANDIF_EXPR
5689 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5690 type, lhs, rhs);
5691 SET_EXPR_LOCATION (tem, loc);
5692 return tem;
5697 return 0;
5700 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5701 bit value. Arrange things so the extra bits will be set to zero if and
5702 only if C is signed-extended to its full width. If MASK is nonzero,
5703 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5705 static tree
5706 unextend (tree c, int p, int unsignedp, tree mask)
5708 tree type = TREE_TYPE (c);
5709 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5710 tree temp;
5712 if (p == modesize || unsignedp)
5713 return c;
5715 /* We work by getting just the sign bit into the low-order bit, then
5716 into the high-order bit, then sign-extend. We then XOR that value
5717 with C. */
5718 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5719 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5721 /* We must use a signed type in order to get an arithmetic right shift.
5722 However, we must also avoid introducing accidental overflows, so that
5723 a subsequent call to integer_zerop will work. Hence we must
5724 do the type conversion here. At this point, the constant is either
5725 zero or one, and the conversion to a signed type can never overflow.
5726 We could get an overflow if this conversion is done anywhere else. */
5727 if (TYPE_UNSIGNED (type))
5728 temp = fold_convert (signed_type_for (type), temp);
5730 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5731 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5732 if (mask != 0)
5733 temp = const_binop (BIT_AND_EXPR, temp,
5734 fold_convert (TREE_TYPE (c), mask),
5736 /* If necessary, convert the type back to match the type of C. */
5737 if (TYPE_UNSIGNED (type))
5738 temp = fold_convert (type, temp);
5740 return fold_convert (type,
5741 const_binop (BIT_XOR_EXPR, c, temp, 0));
5744 /* Find ways of folding logical expressions of LHS and RHS:
5745 Try to merge two comparisons to the same innermost item.
5746 Look for range tests like "ch >= '0' && ch <= '9'".
5747 Look for combinations of simple terms on machines with expensive branches
5748 and evaluate the RHS unconditionally.
5750 For example, if we have p->a == 2 && p->b == 4 and we can make an
5751 object large enough to span both A and B, we can do this with a comparison
5752 against the object ANDed with the a mask.
5754 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5755 operations to do this with one comparison.
5757 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5758 function and the one above.
5760 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5761 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5763 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5764 two operands.
5766 We return the simplified tree or 0 if no optimization is possible. */
5768 static tree
5769 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5770 tree lhs, tree rhs)
5772 /* If this is the "or" of two comparisons, we can do something if
5773 the comparisons are NE_EXPR. If this is the "and", we can do something
5774 if the comparisons are EQ_EXPR. I.e.,
5775 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5777 WANTED_CODE is this operation code. For single bit fields, we can
5778 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5779 comparison for one-bit fields. */
5781 enum tree_code wanted_code;
5782 enum tree_code lcode, rcode;
5783 tree ll_arg, lr_arg, rl_arg, rr_arg;
5784 tree ll_inner, lr_inner, rl_inner, rr_inner;
5785 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5786 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5787 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5788 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5789 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5790 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5791 enum machine_mode lnmode, rnmode;
5792 tree ll_mask, lr_mask, rl_mask, rr_mask;
5793 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5794 tree l_const, r_const;
5795 tree lntype, rntype, result;
5796 HOST_WIDE_INT first_bit, end_bit;
5797 int volatilep;
5798 tree orig_lhs = lhs, orig_rhs = rhs;
5799 enum tree_code orig_code = code;
5801 /* Start by getting the comparison codes. Fail if anything is volatile.
5802 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5803 it were surrounded with a NE_EXPR. */
5805 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5806 return 0;
5808 lcode = TREE_CODE (lhs);
5809 rcode = TREE_CODE (rhs);
5811 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5813 lhs = build2 (NE_EXPR, truth_type, lhs,
5814 build_int_cst (TREE_TYPE (lhs), 0));
5815 lcode = NE_EXPR;
5818 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5820 rhs = build2 (NE_EXPR, truth_type, rhs,
5821 build_int_cst (TREE_TYPE (rhs), 0));
5822 rcode = NE_EXPR;
5825 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5826 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5827 return 0;
5829 ll_arg = TREE_OPERAND (lhs, 0);
5830 lr_arg = TREE_OPERAND (lhs, 1);
5831 rl_arg = TREE_OPERAND (rhs, 0);
5832 rr_arg = TREE_OPERAND (rhs, 1);
5834 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5835 if (simple_operand_p (ll_arg)
5836 && simple_operand_p (lr_arg))
5838 tree result;
5839 if (operand_equal_p (ll_arg, rl_arg, 0)
5840 && operand_equal_p (lr_arg, rr_arg, 0))
5842 result = combine_comparisons (loc, code, lcode, rcode,
5843 truth_type, ll_arg, lr_arg);
5844 if (result)
5845 return result;
5847 else if (operand_equal_p (ll_arg, rr_arg, 0)
5848 && operand_equal_p (lr_arg, rl_arg, 0))
5850 result = combine_comparisons (loc, code, lcode,
5851 swap_tree_comparison (rcode),
5852 truth_type, ll_arg, lr_arg);
5853 if (result)
5854 return result;
5858 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5859 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5861 /* If the RHS can be evaluated unconditionally and its operands are
5862 simple, it wins to evaluate the RHS unconditionally on machines
5863 with expensive branches. In this case, this isn't a comparison
5864 that can be merged. Avoid doing this if the RHS is a floating-point
5865 comparison since those can trap. */
5867 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5868 false) >= 2
5869 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5870 && simple_operand_p (rl_arg)
5871 && simple_operand_p (rr_arg))
5873 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5874 if (code == TRUTH_OR_EXPR
5875 && lcode == NE_EXPR && integer_zerop (lr_arg)
5876 && rcode == NE_EXPR && integer_zerop (rr_arg)
5877 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5878 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5880 result = build2 (NE_EXPR, truth_type,
5881 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5882 ll_arg, rl_arg),
5883 build_int_cst (TREE_TYPE (ll_arg), 0));
5884 goto fold_truthop_exit;
5887 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5888 if (code == TRUTH_AND_EXPR
5889 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5890 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5891 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5892 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5894 result = build2 (EQ_EXPR, truth_type,
5895 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5896 ll_arg, rl_arg),
5897 build_int_cst (TREE_TYPE (ll_arg), 0));
5898 goto fold_truthop_exit;
5901 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5903 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5905 result = build2 (code, truth_type, lhs, rhs);
5906 goto fold_truthop_exit;
5908 return NULL_TREE;
5912 /* See if the comparisons can be merged. Then get all the parameters for
5913 each side. */
5915 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5916 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5917 return 0;
5919 volatilep = 0;
5920 ll_inner = decode_field_reference (loc, ll_arg,
5921 &ll_bitsize, &ll_bitpos, &ll_mode,
5922 &ll_unsignedp, &volatilep, &ll_mask,
5923 &ll_and_mask);
5924 lr_inner = decode_field_reference (loc, lr_arg,
5925 &lr_bitsize, &lr_bitpos, &lr_mode,
5926 &lr_unsignedp, &volatilep, &lr_mask,
5927 &lr_and_mask);
5928 rl_inner = decode_field_reference (loc, rl_arg,
5929 &rl_bitsize, &rl_bitpos, &rl_mode,
5930 &rl_unsignedp, &volatilep, &rl_mask,
5931 &rl_and_mask);
5932 rr_inner = decode_field_reference (loc, rr_arg,
5933 &rr_bitsize, &rr_bitpos, &rr_mode,
5934 &rr_unsignedp, &volatilep, &rr_mask,
5935 &rr_and_mask);
5937 /* It must be true that the inner operation on the lhs of each
5938 comparison must be the same if we are to be able to do anything.
5939 Then see if we have constants. If not, the same must be true for
5940 the rhs's. */
5941 if (volatilep || ll_inner == 0 || rl_inner == 0
5942 || ! operand_equal_p (ll_inner, rl_inner, 0))
5943 return 0;
5945 if (TREE_CODE (lr_arg) == INTEGER_CST
5946 && TREE_CODE (rr_arg) == INTEGER_CST)
5947 l_const = lr_arg, r_const = rr_arg;
5948 else if (lr_inner == 0 || rr_inner == 0
5949 || ! operand_equal_p (lr_inner, rr_inner, 0))
5950 return 0;
5951 else
5952 l_const = r_const = 0;
5954 /* If either comparison code is not correct for our logical operation,
5955 fail. However, we can convert a one-bit comparison against zero into
5956 the opposite comparison against that bit being set in the field. */
5958 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5959 if (lcode != wanted_code)
5961 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5963 /* Make the left operand unsigned, since we are only interested
5964 in the value of one bit. Otherwise we are doing the wrong
5965 thing below. */
5966 ll_unsignedp = 1;
5967 l_const = ll_mask;
5969 else
5970 return 0;
5973 /* This is analogous to the code for l_const above. */
5974 if (rcode != wanted_code)
5976 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5978 rl_unsignedp = 1;
5979 r_const = rl_mask;
5981 else
5982 return 0;
5985 /* See if we can find a mode that contains both fields being compared on
5986 the left. If we can't, fail. Otherwise, update all constants and masks
5987 to be relative to a field of that size. */
5988 first_bit = MIN (ll_bitpos, rl_bitpos);
5989 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5990 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5991 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5992 volatilep);
5993 if (lnmode == VOIDmode)
5994 return 0;
5996 lnbitsize = GET_MODE_BITSIZE (lnmode);
5997 lnbitpos = first_bit & ~ (lnbitsize - 1);
5998 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5999 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6001 if (BYTES_BIG_ENDIAN)
6003 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6004 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6007 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6008 size_int (xll_bitpos), 0);
6009 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6010 size_int (xrl_bitpos), 0);
6012 if (l_const)
6014 l_const = fold_convert_loc (loc, lntype, l_const);
6015 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6016 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
6017 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6018 fold_build1_loc (loc, BIT_NOT_EXPR,
6019 lntype, ll_mask),
6020 0)))
6022 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6024 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6027 if (r_const)
6029 r_const = fold_convert_loc (loc, lntype, r_const);
6030 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6031 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
6032 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6033 fold_build1_loc (loc, BIT_NOT_EXPR,
6034 lntype, rl_mask),
6035 0)))
6037 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6039 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6043 /* If the right sides are not constant, do the same for it. Also,
6044 disallow this optimization if a size or signedness mismatch occurs
6045 between the left and right sides. */
6046 if (l_const == 0)
6048 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6049 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6050 /* Make sure the two fields on the right
6051 correspond to the left without being swapped. */
6052 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6053 return 0;
6055 first_bit = MIN (lr_bitpos, rr_bitpos);
6056 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6057 rnmode = get_best_mode (end_bit - first_bit, first_bit,
6058 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
6059 volatilep);
6060 if (rnmode == VOIDmode)
6061 return 0;
6063 rnbitsize = GET_MODE_BITSIZE (rnmode);
6064 rnbitpos = first_bit & ~ (rnbitsize - 1);
6065 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6066 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6068 if (BYTES_BIG_ENDIAN)
6070 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6071 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6074 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6075 rntype, lr_mask),
6076 size_int (xlr_bitpos), 0);
6077 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6078 rntype, rr_mask),
6079 size_int (xrr_bitpos), 0);
6081 /* Make a mask that corresponds to both fields being compared.
6082 Do this for both items being compared. If the operands are the
6083 same size and the bits being compared are in the same position
6084 then we can do this by masking both and comparing the masked
6085 results. */
6086 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
6087 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
6088 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
6090 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
6091 ll_unsignedp || rl_unsignedp);
6092 if (! all_ones_mask_p (ll_mask, lnbitsize))
6093 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6095 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
6096 lr_unsignedp || rr_unsignedp);
6097 if (! all_ones_mask_p (lr_mask, rnbitsize))
6098 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6100 result = build2 (wanted_code, truth_type, lhs, rhs);
6101 goto fold_truthop_exit;
6104 /* There is still another way we can do something: If both pairs of
6105 fields being compared are adjacent, we may be able to make a wider
6106 field containing them both.
6108 Note that we still must mask the lhs/rhs expressions. Furthermore,
6109 the mask must be shifted to account for the shift done by
6110 make_bit_field_ref. */
6111 if ((ll_bitsize + ll_bitpos == rl_bitpos
6112 && lr_bitsize + lr_bitpos == rr_bitpos)
6113 || (ll_bitpos == rl_bitpos + rl_bitsize
6114 && lr_bitpos == rr_bitpos + rr_bitsize))
6116 tree type;
6118 lhs = make_bit_field_ref (loc, ll_inner, lntype,
6119 ll_bitsize + rl_bitsize,
6120 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
6121 rhs = make_bit_field_ref (loc, lr_inner, rntype,
6122 lr_bitsize + rr_bitsize,
6123 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
6125 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6126 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
6127 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6128 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
6130 /* Convert to the smaller type before masking out unwanted bits. */
6131 type = lntype;
6132 if (lntype != rntype)
6134 if (lnbitsize > rnbitsize)
6136 lhs = fold_convert_loc (loc, rntype, lhs);
6137 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6138 type = rntype;
6140 else if (lnbitsize < rnbitsize)
6142 rhs = fold_convert_loc (loc, lntype, rhs);
6143 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6144 type = lntype;
6148 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6149 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6151 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6152 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6154 result = build2 (wanted_code, truth_type, lhs, rhs);
6155 goto fold_truthop_exit;
6158 return 0;
6161 /* Handle the case of comparisons with constants. If there is something in
6162 common between the masks, those bits of the constants must be the same.
6163 If not, the condition is always false. Test for this to avoid generating
6164 incorrect code below. */
6165 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
6166 if (! integer_zerop (result)
6167 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
6168 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
6170 if (wanted_code == NE_EXPR)
6172 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6173 return constant_boolean_node (true, truth_type);
6175 else
6177 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6178 return constant_boolean_node (false, truth_type);
6182 /* Construct the expression we will return. First get the component
6183 reference we will make. Unless the mask is all ones the width of
6184 that field, perform the mask operation. Then compare with the
6185 merged constant. */
6186 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
6187 ll_unsignedp || rl_unsignedp);
6189 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
6190 if (! all_ones_mask_p (ll_mask, lnbitsize))
6192 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
6193 SET_EXPR_LOCATION (result, loc);
6196 result = build2 (wanted_code, truth_type, result,
6197 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
6199 fold_truthop_exit:
6200 SET_EXPR_LOCATION (result, loc);
6201 return result;
6204 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
6205 constant. */
6207 static tree
6208 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
6209 tree op0, tree op1)
6211 tree arg0 = op0;
6212 enum tree_code op_code;
6213 tree comp_const;
6214 tree minmax_const;
6215 int consts_equal, consts_lt;
6216 tree inner;
6218 STRIP_SIGN_NOPS (arg0);
6220 op_code = TREE_CODE (arg0);
6221 minmax_const = TREE_OPERAND (arg0, 1);
6222 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
6223 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
6224 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
6225 inner = TREE_OPERAND (arg0, 0);
6227 /* If something does not permit us to optimize, return the original tree. */
6228 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
6229 || TREE_CODE (comp_const) != INTEGER_CST
6230 || TREE_OVERFLOW (comp_const)
6231 || TREE_CODE (minmax_const) != INTEGER_CST
6232 || TREE_OVERFLOW (minmax_const))
6233 return NULL_TREE;
6235 /* Now handle all the various comparison codes. We only handle EQ_EXPR
6236 and GT_EXPR, doing the rest with recursive calls using logical
6237 simplifications. */
6238 switch (code)
6240 case NE_EXPR: case LT_EXPR: case LE_EXPR:
6242 tree tem
6243 = optimize_minmax_comparison (loc,
6244 invert_tree_comparison (code, false),
6245 type, op0, op1);
6246 if (tem)
6247 return invert_truthvalue_loc (loc, tem);
6248 return NULL_TREE;
6251 case GE_EXPR:
6252 return
6253 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
6254 optimize_minmax_comparison
6255 (loc, EQ_EXPR, type, arg0, comp_const),
6256 optimize_minmax_comparison
6257 (loc, GT_EXPR, type, arg0, comp_const));
6259 case EQ_EXPR:
6260 if (op_code == MAX_EXPR && consts_equal)
6261 /* MAX (X, 0) == 0 -> X <= 0 */
6262 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
6264 else if (op_code == MAX_EXPR && consts_lt)
6265 /* MAX (X, 0) == 5 -> X == 5 */
6266 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6268 else if (op_code == MAX_EXPR)
6269 /* MAX (X, 0) == -1 -> false */
6270 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6272 else if (consts_equal)
6273 /* MIN (X, 0) == 0 -> X >= 0 */
6274 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
6276 else if (consts_lt)
6277 /* MIN (X, 0) == 5 -> false */
6278 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6280 else
6281 /* MIN (X, 0) == -1 -> X == -1 */
6282 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6284 case GT_EXPR:
6285 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6286 /* MAX (X, 0) > 0 -> X > 0
6287 MAX (X, 0) > 5 -> X > 5 */
6288 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6290 else if (op_code == MAX_EXPR)
6291 /* MAX (X, 0) > -1 -> true */
6292 return omit_one_operand_loc (loc, type, integer_one_node, inner);
6294 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6295 /* MIN (X, 0) > 0 -> false
6296 MIN (X, 0) > 5 -> false */
6297 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6299 else
6300 /* MIN (X, 0) > -1 -> X > -1 */
6301 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6303 default:
6304 return NULL_TREE;
6308 /* T is an integer expression that is being multiplied, divided, or taken a
6309 modulus (CODE says which and what kind of divide or modulus) by a
6310 constant C. See if we can eliminate that operation by folding it with
6311 other operations already in T. WIDE_TYPE, if non-null, is a type that
6312 should be used for the computation if wider than our type.
6314 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6315 (X * 2) + (Y * 4). We must, however, be assured that either the original
6316 expression would not overflow or that overflow is undefined for the type
6317 in the language in question.
6319 If we return a non-null expression, it is an equivalent form of the
6320 original computation, but need not be in the original type.
6322 We set *STRICT_OVERFLOW_P to true if the return values depends on
6323 signed overflow being undefined. Otherwise we do not change
6324 *STRICT_OVERFLOW_P. */
6326 static tree
6327 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6328 bool *strict_overflow_p)
6330 /* To avoid exponential search depth, refuse to allow recursion past
6331 three levels. Beyond that (1) it's highly unlikely that we'll find
6332 something interesting and (2) we've probably processed it before
6333 when we built the inner expression. */
6335 static int depth;
6336 tree ret;
6338 if (depth > 3)
6339 return NULL;
6341 depth++;
6342 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6343 depth--;
6345 return ret;
6348 static tree
6349 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6350 bool *strict_overflow_p)
6352 tree type = TREE_TYPE (t);
6353 enum tree_code tcode = TREE_CODE (t);
6354 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6355 > GET_MODE_SIZE (TYPE_MODE (type)))
6356 ? wide_type : type);
6357 tree t1, t2;
6358 int same_p = tcode == code;
6359 tree op0 = NULL_TREE, op1 = NULL_TREE;
6360 bool sub_strict_overflow_p;
6362 /* Don't deal with constants of zero here; they confuse the code below. */
6363 if (integer_zerop (c))
6364 return NULL_TREE;
6366 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6367 op0 = TREE_OPERAND (t, 0);
6369 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6370 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6372 /* Note that we need not handle conditional operations here since fold
6373 already handles those cases. So just do arithmetic here. */
6374 switch (tcode)
6376 case INTEGER_CST:
6377 /* For a constant, we can always simplify if we are a multiply
6378 or (for divide and modulus) if it is a multiple of our constant. */
6379 if (code == MULT_EXPR
6380 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6381 return const_binop (code, fold_convert (ctype, t),
6382 fold_convert (ctype, c), 0);
6383 break;
6385 CASE_CONVERT: case NON_LVALUE_EXPR:
6386 /* If op0 is an expression ... */
6387 if ((COMPARISON_CLASS_P (op0)
6388 || UNARY_CLASS_P (op0)
6389 || BINARY_CLASS_P (op0)
6390 || VL_EXP_CLASS_P (op0)
6391 || EXPRESSION_CLASS_P (op0))
6392 /* ... and has wrapping overflow, and its type is smaller
6393 than ctype, then we cannot pass through as widening. */
6394 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6395 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6396 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6397 && (TYPE_PRECISION (ctype)
6398 > TYPE_PRECISION (TREE_TYPE (op0))))
6399 /* ... or this is a truncation (t is narrower than op0),
6400 then we cannot pass through this narrowing. */
6401 || (TYPE_PRECISION (type)
6402 < TYPE_PRECISION (TREE_TYPE (op0)))
6403 /* ... or signedness changes for division or modulus,
6404 then we cannot pass through this conversion. */
6405 || (code != MULT_EXPR
6406 && (TYPE_UNSIGNED (ctype)
6407 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6408 /* ... or has undefined overflow while the converted to
6409 type has not, we cannot do the operation in the inner type
6410 as that would introduce undefined overflow. */
6411 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6412 && !TYPE_OVERFLOW_UNDEFINED (type))))
6413 break;
6415 /* Pass the constant down and see if we can make a simplification. If
6416 we can, replace this expression with the inner simplification for
6417 possible later conversion to our or some other type. */
6418 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6419 && TREE_CODE (t2) == INTEGER_CST
6420 && !TREE_OVERFLOW (t2)
6421 && (0 != (t1 = extract_muldiv (op0, t2, code,
6422 code == MULT_EXPR
6423 ? ctype : NULL_TREE,
6424 strict_overflow_p))))
6425 return t1;
6426 break;
6428 case ABS_EXPR:
6429 /* If widening the type changes it from signed to unsigned, then we
6430 must avoid building ABS_EXPR itself as unsigned. */
6431 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6433 tree cstype = (*signed_type_for) (ctype);
6434 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6435 != 0)
6437 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6438 return fold_convert (ctype, t1);
6440 break;
6442 /* If the constant is negative, we cannot simplify this. */
6443 if (tree_int_cst_sgn (c) == -1)
6444 break;
6445 /* FALLTHROUGH */
6446 case NEGATE_EXPR:
6447 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6448 != 0)
6449 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6450 break;
6452 case MIN_EXPR: case MAX_EXPR:
6453 /* If widening the type changes the signedness, then we can't perform
6454 this optimization as that changes the result. */
6455 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6456 break;
6458 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6459 sub_strict_overflow_p = false;
6460 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6461 &sub_strict_overflow_p)) != 0
6462 && (t2 = extract_muldiv (op1, c, code, wide_type,
6463 &sub_strict_overflow_p)) != 0)
6465 if (tree_int_cst_sgn (c) < 0)
6466 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6467 if (sub_strict_overflow_p)
6468 *strict_overflow_p = true;
6469 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6470 fold_convert (ctype, t2));
6472 break;
6474 case LSHIFT_EXPR: case RSHIFT_EXPR:
6475 /* If the second operand is constant, this is a multiplication
6476 or floor division, by a power of two, so we can treat it that
6477 way unless the multiplier or divisor overflows. Signed
6478 left-shift overflow is implementation-defined rather than
6479 undefined in C90, so do not convert signed left shift into
6480 multiplication. */
6481 if (TREE_CODE (op1) == INTEGER_CST
6482 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6483 /* const_binop may not detect overflow correctly,
6484 so check for it explicitly here. */
6485 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6486 && TREE_INT_CST_HIGH (op1) == 0
6487 && 0 != (t1 = fold_convert (ctype,
6488 const_binop (LSHIFT_EXPR,
6489 size_one_node,
6490 op1, 0)))
6491 && !TREE_OVERFLOW (t1))
6492 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6493 ? MULT_EXPR : FLOOR_DIV_EXPR,
6494 ctype,
6495 fold_convert (ctype, op0),
6496 t1),
6497 c, code, wide_type, strict_overflow_p);
6498 break;
6500 case PLUS_EXPR: case MINUS_EXPR:
6501 /* See if we can eliminate the operation on both sides. If we can, we
6502 can return a new PLUS or MINUS. If we can't, the only remaining
6503 cases where we can do anything are if the second operand is a
6504 constant. */
6505 sub_strict_overflow_p = false;
6506 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6507 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6508 if (t1 != 0 && t2 != 0
6509 && (code == MULT_EXPR
6510 /* If not multiplication, we can only do this if both operands
6511 are divisible by c. */
6512 || (multiple_of_p (ctype, op0, c)
6513 && multiple_of_p (ctype, op1, c))))
6515 if (sub_strict_overflow_p)
6516 *strict_overflow_p = true;
6517 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6518 fold_convert (ctype, t2));
6521 /* If this was a subtraction, negate OP1 and set it to be an addition.
6522 This simplifies the logic below. */
6523 if (tcode == MINUS_EXPR)
6525 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6526 /* If OP1 was not easily negatable, the constant may be OP0. */
6527 if (TREE_CODE (op0) == INTEGER_CST)
6529 tree tem = op0;
6530 op0 = op1;
6531 op1 = tem;
6532 tem = t1;
6533 t1 = t2;
6534 t2 = tem;
6538 if (TREE_CODE (op1) != INTEGER_CST)
6539 break;
6541 /* If either OP1 or C are negative, this optimization is not safe for
6542 some of the division and remainder types while for others we need
6543 to change the code. */
6544 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6546 if (code == CEIL_DIV_EXPR)
6547 code = FLOOR_DIV_EXPR;
6548 else if (code == FLOOR_DIV_EXPR)
6549 code = CEIL_DIV_EXPR;
6550 else if (code != MULT_EXPR
6551 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6552 break;
6555 /* If it's a multiply or a division/modulus operation of a multiple
6556 of our constant, do the operation and verify it doesn't overflow. */
6557 if (code == MULT_EXPR
6558 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6560 op1 = const_binop (code, fold_convert (ctype, op1),
6561 fold_convert (ctype, c), 0);
6562 /* We allow the constant to overflow with wrapping semantics. */
6563 if (op1 == 0
6564 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6565 break;
6567 else
6568 break;
6570 /* If we have an unsigned type is not a sizetype, we cannot widen
6571 the operation since it will change the result if the original
6572 computation overflowed. */
6573 if (TYPE_UNSIGNED (ctype)
6574 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6575 && ctype != type)
6576 break;
6578 /* If we were able to eliminate our operation from the first side,
6579 apply our operation to the second side and reform the PLUS. */
6580 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6581 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6583 /* The last case is if we are a multiply. In that case, we can
6584 apply the distributive law to commute the multiply and addition
6585 if the multiplication of the constants doesn't overflow. */
6586 if (code == MULT_EXPR)
6587 return fold_build2 (tcode, ctype,
6588 fold_build2 (code, ctype,
6589 fold_convert (ctype, op0),
6590 fold_convert (ctype, c)),
6591 op1);
6593 break;
6595 case MULT_EXPR:
6596 /* We have a special case here if we are doing something like
6597 (C * 8) % 4 since we know that's zero. */
6598 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6599 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6600 /* If the multiplication can overflow we cannot optimize this.
6601 ??? Until we can properly mark individual operations as
6602 not overflowing we need to treat sizetype special here as
6603 stor-layout relies on this opimization to make
6604 DECL_FIELD_BIT_OFFSET always a constant. */
6605 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6606 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
6607 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
6608 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6609 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6611 *strict_overflow_p = true;
6612 return omit_one_operand (type, integer_zero_node, op0);
6615 /* ... fall through ... */
6617 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6618 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6619 /* If we can extract our operation from the LHS, do so and return a
6620 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6621 do something only if the second operand is a constant. */
6622 if (same_p
6623 && (t1 = extract_muldiv (op0, c, code, wide_type,
6624 strict_overflow_p)) != 0)
6625 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6626 fold_convert (ctype, op1));
6627 else if (tcode == MULT_EXPR && code == MULT_EXPR
6628 && (t1 = extract_muldiv (op1, c, code, wide_type,
6629 strict_overflow_p)) != 0)
6630 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6631 fold_convert (ctype, t1));
6632 else if (TREE_CODE (op1) != INTEGER_CST)
6633 return 0;
6635 /* If these are the same operation types, we can associate them
6636 assuming no overflow. */
6637 if (tcode == code
6638 && 0 != (t1 = int_const_binop (MULT_EXPR,
6639 fold_convert (ctype, op1),
6640 fold_convert (ctype, c), 1))
6641 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
6642 TREE_INT_CST_HIGH (t1),
6643 (TYPE_UNSIGNED (ctype)
6644 && tcode != MULT_EXPR) ? -1 : 1,
6645 TREE_OVERFLOW (t1)))
6646 && !TREE_OVERFLOW (t1))
6647 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6649 /* If these operations "cancel" each other, we have the main
6650 optimizations of this pass, which occur when either constant is a
6651 multiple of the other, in which case we replace this with either an
6652 operation or CODE or TCODE.
6654 If we have an unsigned type that is not a sizetype, we cannot do
6655 this since it will change the result if the original computation
6656 overflowed. */
6657 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6658 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6659 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6660 || (tcode == MULT_EXPR
6661 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6662 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6663 && code != MULT_EXPR)))
6665 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6667 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6668 *strict_overflow_p = true;
6669 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6670 fold_convert (ctype,
6671 const_binop (TRUNC_DIV_EXPR,
6672 op1, c, 0)));
6674 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6676 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6677 *strict_overflow_p = true;
6678 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6679 fold_convert (ctype,
6680 const_binop (TRUNC_DIV_EXPR,
6681 c, op1, 0)));
6684 break;
6686 default:
6687 break;
6690 return 0;
6693 /* Return a node which has the indicated constant VALUE (either 0 or
6694 1), and is of the indicated TYPE. */
6696 tree
6697 constant_boolean_node (int value, tree type)
6699 if (type == integer_type_node)
6700 return value ? integer_one_node : integer_zero_node;
6701 else if (type == boolean_type_node)
6702 return value ? boolean_true_node : boolean_false_node;
6703 else
6704 return build_int_cst (type, value);
6708 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6709 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6710 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6711 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6712 COND is the first argument to CODE; otherwise (as in the example
6713 given here), it is the second argument. TYPE is the type of the
6714 original expression. Return NULL_TREE if no simplification is
6715 possible. */
6717 static tree
6718 fold_binary_op_with_conditional_arg (location_t loc,
6719 enum tree_code code,
6720 tree type, tree op0, tree op1,
6721 tree cond, tree arg, int cond_first_p)
6723 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6724 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6725 tree test, true_value, false_value;
6726 tree lhs = NULL_TREE;
6727 tree rhs = NULL_TREE;
6729 /* This transformation is only worthwhile if we don't have to wrap
6730 arg in a SAVE_EXPR, and the operation can be simplified on at least
6731 one of the branches once its pushed inside the COND_EXPR. */
6732 if (!TREE_CONSTANT (arg))
6733 return NULL_TREE;
6735 if (TREE_CODE (cond) == COND_EXPR)
6737 test = TREE_OPERAND (cond, 0);
6738 true_value = TREE_OPERAND (cond, 1);
6739 false_value = TREE_OPERAND (cond, 2);
6740 /* If this operand throws an expression, then it does not make
6741 sense to try to perform a logical or arithmetic operation
6742 involving it. */
6743 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6744 lhs = true_value;
6745 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6746 rhs = false_value;
6748 else
6750 tree testtype = TREE_TYPE (cond);
6751 test = cond;
6752 true_value = constant_boolean_node (true, testtype);
6753 false_value = constant_boolean_node (false, testtype);
6756 arg = fold_convert_loc (loc, arg_type, arg);
6757 if (lhs == 0)
6759 true_value = fold_convert_loc (loc, cond_type, true_value);
6760 if (cond_first_p)
6761 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6762 else
6763 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6765 if (rhs == 0)
6767 false_value = fold_convert_loc (loc, cond_type, false_value);
6768 if (cond_first_p)
6769 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6770 else
6771 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6774 test = fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6775 return fold_convert_loc (loc, type, test);
6779 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6781 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6782 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6783 ADDEND is the same as X.
6785 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6786 and finite. The problematic cases are when X is zero, and its mode
6787 has signed zeros. In the case of rounding towards -infinity,
6788 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6789 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6791 bool
6792 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6794 if (!real_zerop (addend))
6795 return false;
6797 /* Don't allow the fold with -fsignaling-nans. */
6798 if (HONOR_SNANS (TYPE_MODE (type)))
6799 return false;
6801 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6802 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6803 return true;
6805 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6806 if (TREE_CODE (addend) == REAL_CST
6807 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6808 negate = !negate;
6810 /* The mode has signed zeros, and we have to honor their sign.
6811 In this situation, there is only one case we can return true for.
6812 X - 0 is the same as X unless rounding towards -infinity is
6813 supported. */
6814 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6817 /* Subroutine of fold() that checks comparisons of built-in math
6818 functions against real constants.
6820 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6821 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6822 is the type of the result and ARG0 and ARG1 are the operands of the
6823 comparison. ARG1 must be a TREE_REAL_CST.
6825 The function returns the constant folded tree if a simplification
6826 can be made, and NULL_TREE otherwise. */
6828 static tree
6829 fold_mathfn_compare (location_t loc,
6830 enum built_in_function fcode, enum tree_code code,
6831 tree type, tree arg0, tree arg1)
6833 REAL_VALUE_TYPE c;
6835 if (BUILTIN_SQRT_P (fcode))
6837 tree arg = CALL_EXPR_ARG (arg0, 0);
6838 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6840 c = TREE_REAL_CST (arg1);
6841 if (REAL_VALUE_NEGATIVE (c))
6843 /* sqrt(x) < y is always false, if y is negative. */
6844 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6845 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6847 /* sqrt(x) > y is always true, if y is negative and we
6848 don't care about NaNs, i.e. negative values of x. */
6849 if (code == NE_EXPR || !HONOR_NANS (mode))
6850 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6852 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6853 return fold_build2_loc (loc, GE_EXPR, type, arg,
6854 build_real (TREE_TYPE (arg), dconst0));
6856 else if (code == GT_EXPR || code == GE_EXPR)
6858 REAL_VALUE_TYPE c2;
6860 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6861 real_convert (&c2, mode, &c2);
6863 if (REAL_VALUE_ISINF (c2))
6865 /* sqrt(x) > y is x == +Inf, when y is very large. */
6866 if (HONOR_INFINITIES (mode))
6867 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6868 build_real (TREE_TYPE (arg), c2));
6870 /* sqrt(x) > y is always false, when y is very large
6871 and we don't care about infinities. */
6872 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6875 /* sqrt(x) > c is the same as x > c*c. */
6876 return fold_build2_loc (loc, code, type, arg,
6877 build_real (TREE_TYPE (arg), c2));
6879 else if (code == LT_EXPR || code == LE_EXPR)
6881 REAL_VALUE_TYPE c2;
6883 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6884 real_convert (&c2, mode, &c2);
6886 if (REAL_VALUE_ISINF (c2))
6888 /* sqrt(x) < y is always true, when y is a very large
6889 value and we don't care about NaNs or Infinities. */
6890 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6891 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6893 /* sqrt(x) < y is x != +Inf when y is very large and we
6894 don't care about NaNs. */
6895 if (! HONOR_NANS (mode))
6896 return fold_build2_loc (loc, NE_EXPR, type, arg,
6897 build_real (TREE_TYPE (arg), c2));
6899 /* sqrt(x) < y is x >= 0 when y is very large and we
6900 don't care about Infinities. */
6901 if (! HONOR_INFINITIES (mode))
6902 return fold_build2_loc (loc, GE_EXPR, type, arg,
6903 build_real (TREE_TYPE (arg), dconst0));
6905 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6906 if (lang_hooks.decls.global_bindings_p () != 0
6907 || CONTAINS_PLACEHOLDER_P (arg))
6908 return NULL_TREE;
6910 arg = save_expr (arg);
6911 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6912 fold_build2_loc (loc, GE_EXPR, type, arg,
6913 build_real (TREE_TYPE (arg),
6914 dconst0)),
6915 fold_build2_loc (loc, NE_EXPR, type, arg,
6916 build_real (TREE_TYPE (arg),
6917 c2)));
6920 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6921 if (! HONOR_NANS (mode))
6922 return fold_build2_loc (loc, code, type, arg,
6923 build_real (TREE_TYPE (arg), c2));
6925 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6926 if (lang_hooks.decls.global_bindings_p () == 0
6927 && ! CONTAINS_PLACEHOLDER_P (arg))
6929 arg = save_expr (arg);
6930 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6931 fold_build2_loc (loc, GE_EXPR, type, arg,
6932 build_real (TREE_TYPE (arg),
6933 dconst0)),
6934 fold_build2_loc (loc, code, type, arg,
6935 build_real (TREE_TYPE (arg),
6936 c2)));
6941 return NULL_TREE;
6944 /* Subroutine of fold() that optimizes comparisons against Infinities,
6945 either +Inf or -Inf.
6947 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6948 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6949 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6951 The function returns the constant folded tree if a simplification
6952 can be made, and NULL_TREE otherwise. */
6954 static tree
6955 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6956 tree arg0, tree arg1)
6958 enum machine_mode mode;
6959 REAL_VALUE_TYPE max;
6960 tree temp;
6961 bool neg;
6963 mode = TYPE_MODE (TREE_TYPE (arg0));
6965 /* For negative infinity swap the sense of the comparison. */
6966 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6967 if (neg)
6968 code = swap_tree_comparison (code);
6970 switch (code)
6972 case GT_EXPR:
6973 /* x > +Inf is always false, if with ignore sNANs. */
6974 if (HONOR_SNANS (mode))
6975 return NULL_TREE;
6976 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6978 case LE_EXPR:
6979 /* x <= +Inf is always true, if we don't case about NaNs. */
6980 if (! HONOR_NANS (mode))
6981 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6983 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6984 if (lang_hooks.decls.global_bindings_p () == 0
6985 && ! CONTAINS_PLACEHOLDER_P (arg0))
6987 arg0 = save_expr (arg0);
6988 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6990 break;
6992 case EQ_EXPR:
6993 case GE_EXPR:
6994 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6995 real_maxval (&max, neg, mode);
6996 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6997 arg0, build_real (TREE_TYPE (arg0), max));
6999 case LT_EXPR:
7000 /* x < +Inf is always equal to x <= DBL_MAX. */
7001 real_maxval (&max, neg, mode);
7002 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
7003 arg0, build_real (TREE_TYPE (arg0), max));
7005 case NE_EXPR:
7006 /* x != +Inf is always equal to !(x > DBL_MAX). */
7007 real_maxval (&max, neg, mode);
7008 if (! HONOR_NANS (mode))
7009 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
7010 arg0, build_real (TREE_TYPE (arg0), max));
7012 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
7013 arg0, build_real (TREE_TYPE (arg0), max));
7014 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
7016 default:
7017 break;
7020 return NULL_TREE;
7023 /* Subroutine of fold() that optimizes comparisons of a division by
7024 a nonzero integer constant against an integer constant, i.e.
7025 X/C1 op C2.
7027 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7028 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
7029 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
7031 The function returns the constant folded tree if a simplification
7032 can be made, and NULL_TREE otherwise. */
7034 static tree
7035 fold_div_compare (location_t loc,
7036 enum tree_code code, tree type, tree arg0, tree arg1)
7038 tree prod, tmp, hi, lo;
7039 tree arg00 = TREE_OPERAND (arg0, 0);
7040 tree arg01 = TREE_OPERAND (arg0, 1);
7041 unsigned HOST_WIDE_INT lpart;
7042 HOST_WIDE_INT hpart;
7043 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
7044 bool neg_overflow;
7045 int overflow;
7047 /* We have to do this the hard way to detect unsigned overflow.
7048 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
7049 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
7050 TREE_INT_CST_HIGH (arg01),
7051 TREE_INT_CST_LOW (arg1),
7052 TREE_INT_CST_HIGH (arg1),
7053 &lpart, &hpart, unsigned_p);
7054 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
7055 -1, overflow);
7056 neg_overflow = false;
7058 if (unsigned_p)
7060 tmp = int_const_binop (MINUS_EXPR, arg01,
7061 build_int_cst (TREE_TYPE (arg01), 1), 0);
7062 lo = prod;
7064 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
7065 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
7066 TREE_INT_CST_HIGH (prod),
7067 TREE_INT_CST_LOW (tmp),
7068 TREE_INT_CST_HIGH (tmp),
7069 &lpart, &hpart, unsigned_p);
7070 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
7071 -1, overflow | TREE_OVERFLOW (prod));
7073 else if (tree_int_cst_sgn (arg01) >= 0)
7075 tmp = int_const_binop (MINUS_EXPR, arg01,
7076 build_int_cst (TREE_TYPE (arg01), 1), 0);
7077 switch (tree_int_cst_sgn (arg1))
7079 case -1:
7080 neg_overflow = true;
7081 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
7082 hi = prod;
7083 break;
7085 case 0:
7086 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
7087 hi = tmp;
7088 break;
7090 case 1:
7091 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
7092 lo = prod;
7093 break;
7095 default:
7096 gcc_unreachable ();
7099 else
7101 /* A negative divisor reverses the relational operators. */
7102 code = swap_tree_comparison (code);
7104 tmp = int_const_binop (PLUS_EXPR, arg01,
7105 build_int_cst (TREE_TYPE (arg01), 1), 0);
7106 switch (tree_int_cst_sgn (arg1))
7108 case -1:
7109 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
7110 lo = prod;
7111 break;
7113 case 0:
7114 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
7115 lo = tmp;
7116 break;
7118 case 1:
7119 neg_overflow = true;
7120 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
7121 hi = prod;
7122 break;
7124 default:
7125 gcc_unreachable ();
7129 switch (code)
7131 case EQ_EXPR:
7132 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
7133 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
7134 if (TREE_OVERFLOW (hi))
7135 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
7136 if (TREE_OVERFLOW (lo))
7137 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
7138 return build_range_check (loc, type, arg00, 1, lo, hi);
7140 case NE_EXPR:
7141 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
7142 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
7143 if (TREE_OVERFLOW (hi))
7144 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
7145 if (TREE_OVERFLOW (lo))
7146 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
7147 return build_range_check (loc, type, arg00, 0, lo, hi);
7149 case LT_EXPR:
7150 if (TREE_OVERFLOW (lo))
7152 tmp = neg_overflow ? integer_zero_node : integer_one_node;
7153 return omit_one_operand_loc (loc, type, tmp, arg00);
7155 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
7157 case LE_EXPR:
7158 if (TREE_OVERFLOW (hi))
7160 tmp = neg_overflow ? integer_zero_node : integer_one_node;
7161 return omit_one_operand_loc (loc, type, tmp, arg00);
7163 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
7165 case GT_EXPR:
7166 if (TREE_OVERFLOW (hi))
7168 tmp = neg_overflow ? integer_one_node : integer_zero_node;
7169 return omit_one_operand_loc (loc, type, tmp, arg00);
7171 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
7173 case GE_EXPR:
7174 if (TREE_OVERFLOW (lo))
7176 tmp = neg_overflow ? integer_one_node : integer_zero_node;
7177 return omit_one_operand_loc (loc, type, tmp, arg00);
7179 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
7181 default:
7182 break;
7185 return NULL_TREE;
7189 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7190 equality/inequality test, then return a simplified form of the test
7191 using a sign testing. Otherwise return NULL. TYPE is the desired
7192 result type. */
7194 static tree
7195 fold_single_bit_test_into_sign_test (location_t loc,
7196 enum tree_code code, tree arg0, tree arg1,
7197 tree result_type)
7199 /* If this is testing a single bit, we can optimize the test. */
7200 if ((code == NE_EXPR || code == EQ_EXPR)
7201 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7202 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7204 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7205 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7206 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7208 if (arg00 != NULL_TREE
7209 /* This is only a win if casting to a signed type is cheap,
7210 i.e. when arg00's type is not a partial mode. */
7211 && TYPE_PRECISION (TREE_TYPE (arg00))
7212 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
7214 tree stype = signed_type_for (TREE_TYPE (arg00));
7215 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7216 result_type,
7217 fold_convert_loc (loc, stype, arg00),
7218 build_int_cst (stype, 0));
7222 return NULL_TREE;
7225 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7226 equality/inequality test, then return a simplified form of
7227 the test using shifts and logical operations. Otherwise return
7228 NULL. TYPE is the desired result type. */
7230 tree
7231 fold_single_bit_test (location_t loc, enum tree_code code,
7232 tree arg0, tree arg1, tree result_type)
7234 /* If this is testing a single bit, we can optimize the test. */
7235 if ((code == NE_EXPR || code == EQ_EXPR)
7236 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7237 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7239 tree inner = TREE_OPERAND (arg0, 0);
7240 tree type = TREE_TYPE (arg0);
7241 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7242 enum machine_mode operand_mode = TYPE_MODE (type);
7243 int ops_unsigned;
7244 tree signed_type, unsigned_type, intermediate_type;
7245 tree tem, one;
7247 /* First, see if we can fold the single bit test into a sign-bit
7248 test. */
7249 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7250 result_type);
7251 if (tem)
7252 return tem;
7254 /* Otherwise we have (A & C) != 0 where C is a single bit,
7255 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7256 Similarly for (A & C) == 0. */
7258 /* If INNER is a right shift of a constant and it plus BITNUM does
7259 not overflow, adjust BITNUM and INNER. */
7260 if (TREE_CODE (inner) == RSHIFT_EXPR
7261 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7262 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
7263 && bitnum < TYPE_PRECISION (type)
7264 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
7265 bitnum - TYPE_PRECISION (type)))
7267 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
7268 inner = TREE_OPERAND (inner, 0);
7271 /* If we are going to be able to omit the AND below, we must do our
7272 operations as unsigned. If we must use the AND, we have a choice.
7273 Normally unsigned is faster, but for some machines signed is. */
7274 #ifdef LOAD_EXTEND_OP
7275 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
7276 && !flag_syntax_only) ? 0 : 1;
7277 #else
7278 ops_unsigned = 1;
7279 #endif
7281 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7282 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7283 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7284 inner = fold_convert_loc (loc, intermediate_type, inner);
7286 if (bitnum != 0)
7287 inner = build2 (RSHIFT_EXPR, intermediate_type,
7288 inner, size_int (bitnum));
7290 one = build_int_cst (intermediate_type, 1);
7292 if (code == EQ_EXPR)
7293 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7295 /* Put the AND last so it can combine with more things. */
7296 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7298 /* Make sure to return the proper type. */
7299 inner = fold_convert_loc (loc, result_type, inner);
7301 return inner;
7303 return NULL_TREE;
7306 /* Check whether we are allowed to reorder operands arg0 and arg1,
7307 such that the evaluation of arg1 occurs before arg0. */
7309 static bool
7310 reorder_operands_p (const_tree arg0, const_tree arg1)
7312 if (! flag_evaluation_order)
7313 return true;
7314 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
7315 return true;
7316 return ! TREE_SIDE_EFFECTS (arg0)
7317 && ! TREE_SIDE_EFFECTS (arg1);
7320 /* Test whether it is preferable two swap two operands, ARG0 and
7321 ARG1, for example because ARG0 is an integer constant and ARG1
7322 isn't. If REORDER is true, only recommend swapping if we can
7323 evaluate the operands in reverse order. */
7325 bool
7326 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
7328 STRIP_SIGN_NOPS (arg0);
7329 STRIP_SIGN_NOPS (arg1);
7331 if (TREE_CODE (arg1) == INTEGER_CST)
7332 return 0;
7333 if (TREE_CODE (arg0) == INTEGER_CST)
7334 return 1;
7336 if (TREE_CODE (arg1) == REAL_CST)
7337 return 0;
7338 if (TREE_CODE (arg0) == REAL_CST)
7339 return 1;
7341 if (TREE_CODE (arg1) == FIXED_CST)
7342 return 0;
7343 if (TREE_CODE (arg0) == FIXED_CST)
7344 return 1;
7346 if (TREE_CODE (arg1) == COMPLEX_CST)
7347 return 0;
7348 if (TREE_CODE (arg0) == COMPLEX_CST)
7349 return 1;
7351 if (TREE_CONSTANT (arg1))
7352 return 0;
7353 if (TREE_CONSTANT (arg0))
7354 return 1;
7356 if (optimize_function_for_size_p (cfun))
7357 return 0;
7359 if (reorder && flag_evaluation_order
7360 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7361 return 0;
7363 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7364 for commutative and comparison operators. Ensuring a canonical
7365 form allows the optimizers to find additional redundancies without
7366 having to explicitly check for both orderings. */
7367 if (TREE_CODE (arg0) == SSA_NAME
7368 && TREE_CODE (arg1) == SSA_NAME
7369 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7370 return 1;
7372 /* Put SSA_NAMEs last. */
7373 if (TREE_CODE (arg1) == SSA_NAME)
7374 return 0;
7375 if (TREE_CODE (arg0) == SSA_NAME)
7376 return 1;
7378 /* Put variables last. */
7379 if (DECL_P (arg1))
7380 return 0;
7381 if (DECL_P (arg0))
7382 return 1;
7384 return 0;
7387 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7388 ARG0 is extended to a wider type. */
7390 static tree
7391 fold_widened_comparison (location_t loc, enum tree_code code,
7392 tree type, tree arg0, tree arg1)
7394 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7395 tree arg1_unw;
7396 tree shorter_type, outer_type;
7397 tree min, max;
7398 bool above, below;
7400 if (arg0_unw == arg0)
7401 return NULL_TREE;
7402 shorter_type = TREE_TYPE (arg0_unw);
7404 #ifdef HAVE_canonicalize_funcptr_for_compare
7405 /* Disable this optimization if we're casting a function pointer
7406 type on targets that require function pointer canonicalization. */
7407 if (HAVE_canonicalize_funcptr_for_compare
7408 && TREE_CODE (shorter_type) == POINTER_TYPE
7409 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7410 return NULL_TREE;
7411 #endif
7413 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7414 return NULL_TREE;
7416 arg1_unw = get_unwidened (arg1, NULL_TREE);
7418 /* If possible, express the comparison in the shorter mode. */
7419 if ((code == EQ_EXPR || code == NE_EXPR
7420 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7421 && (TREE_TYPE (arg1_unw) == shorter_type
7422 || ((TYPE_PRECISION (shorter_type)
7423 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7424 && (TYPE_UNSIGNED (shorter_type)
7425 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7426 || (TREE_CODE (arg1_unw) == INTEGER_CST
7427 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7428 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7429 && int_fits_type_p (arg1_unw, shorter_type))))
7430 return fold_build2_loc (loc, code, type, arg0_unw,
7431 fold_convert_loc (loc, shorter_type, arg1_unw));
7433 if (TREE_CODE (arg1_unw) != INTEGER_CST
7434 || TREE_CODE (shorter_type) != INTEGER_TYPE
7435 || !int_fits_type_p (arg1_unw, shorter_type))
7436 return NULL_TREE;
7438 /* If we are comparing with the integer that does not fit into the range
7439 of the shorter type, the result is known. */
7440 outer_type = TREE_TYPE (arg1_unw);
7441 min = lower_bound_in_type (outer_type, shorter_type);
7442 max = upper_bound_in_type (outer_type, shorter_type);
7444 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7445 max, arg1_unw));
7446 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7447 arg1_unw, min));
7449 switch (code)
7451 case EQ_EXPR:
7452 if (above || below)
7453 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7454 break;
7456 case NE_EXPR:
7457 if (above || below)
7458 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7459 break;
7461 case LT_EXPR:
7462 case LE_EXPR:
7463 if (above)
7464 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7465 else if (below)
7466 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7468 case GT_EXPR:
7469 case GE_EXPR:
7470 if (above)
7471 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7472 else if (below)
7473 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7475 default:
7476 break;
7479 return NULL_TREE;
7482 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7483 ARG0 just the signedness is changed. */
7485 static tree
7486 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7487 tree arg0, tree arg1)
7489 tree arg0_inner;
7490 tree inner_type, outer_type;
7492 if (!CONVERT_EXPR_P (arg0))
7493 return NULL_TREE;
7495 outer_type = TREE_TYPE (arg0);
7496 arg0_inner = TREE_OPERAND (arg0, 0);
7497 inner_type = TREE_TYPE (arg0_inner);
7499 #ifdef HAVE_canonicalize_funcptr_for_compare
7500 /* Disable this optimization if we're casting a function pointer
7501 type on targets that require function pointer canonicalization. */
7502 if (HAVE_canonicalize_funcptr_for_compare
7503 && TREE_CODE (inner_type) == POINTER_TYPE
7504 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7505 return NULL_TREE;
7506 #endif
7508 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7509 return NULL_TREE;
7511 if (TREE_CODE (arg1) != INTEGER_CST
7512 && !(CONVERT_EXPR_P (arg1)
7513 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7514 return NULL_TREE;
7516 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7517 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7518 && code != NE_EXPR
7519 && code != EQ_EXPR)
7520 return NULL_TREE;
7522 if (TREE_CODE (arg1) == INTEGER_CST)
7523 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7524 TREE_INT_CST_HIGH (arg1), 0,
7525 TREE_OVERFLOW (arg1));
7526 else
7527 arg1 = fold_convert_loc (loc, inner_type, arg1);
7529 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7532 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7533 step of the array. Reconstructs s and delta in the case of s *
7534 delta being an integer constant (and thus already folded). ADDR is
7535 the address. MULT is the multiplicative expression. If the
7536 function succeeds, the new address expression is returned.
7537 Otherwise NULL_TREE is returned. LOC is the location of the
7538 resulting expression. */
7540 static tree
7541 try_move_mult_to_index (location_t loc, tree addr, tree op1)
7543 tree s, delta, step;
7544 tree ref = TREE_OPERAND (addr, 0), pref;
7545 tree ret, pos;
7546 tree itype;
7547 bool mdim = false;
7549 /* Strip the nops that might be added when converting op1 to sizetype. */
7550 STRIP_NOPS (op1);
7552 /* Canonicalize op1 into a possibly non-constant delta
7553 and an INTEGER_CST s. */
7554 if (TREE_CODE (op1) == MULT_EXPR)
7556 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7558 STRIP_NOPS (arg0);
7559 STRIP_NOPS (arg1);
7561 if (TREE_CODE (arg0) == INTEGER_CST)
7563 s = arg0;
7564 delta = arg1;
7566 else if (TREE_CODE (arg1) == INTEGER_CST)
7568 s = arg1;
7569 delta = arg0;
7571 else
7572 return NULL_TREE;
7574 else if (TREE_CODE (op1) == INTEGER_CST)
7576 delta = op1;
7577 s = NULL_TREE;
7579 else
7581 /* Simulate we are delta * 1. */
7582 delta = op1;
7583 s = integer_one_node;
7586 for (;; ref = TREE_OPERAND (ref, 0))
7588 if (TREE_CODE (ref) == ARRAY_REF)
7590 tree domain;
7592 /* Remember if this was a multi-dimensional array. */
7593 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7594 mdim = true;
7596 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7597 if (! domain)
7598 continue;
7599 itype = TREE_TYPE (domain);
7601 step = array_ref_element_size (ref);
7602 if (TREE_CODE (step) != INTEGER_CST)
7603 continue;
7605 if (s)
7607 if (! tree_int_cst_equal (step, s))
7608 continue;
7610 else
7612 /* Try if delta is a multiple of step. */
7613 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7614 if (! tmp)
7615 continue;
7616 delta = tmp;
7619 /* Only fold here if we can verify we do not overflow one
7620 dimension of a multi-dimensional array. */
7621 if (mdim)
7623 tree tmp;
7625 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7626 || !TYPE_MAX_VALUE (domain)
7627 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7628 continue;
7630 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7631 fold_convert_loc (loc, itype,
7632 TREE_OPERAND (ref, 1)),
7633 fold_convert_loc (loc, itype, delta));
7634 if (!tmp
7635 || TREE_CODE (tmp) != INTEGER_CST
7636 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7637 continue;
7640 break;
7642 else
7643 mdim = false;
7645 if (!handled_component_p (ref))
7646 return NULL_TREE;
7649 /* We found the suitable array reference. So copy everything up to it,
7650 and replace the index. */
7652 pref = TREE_OPERAND (addr, 0);
7653 ret = copy_node (pref);
7654 SET_EXPR_LOCATION (ret, loc);
7655 pos = ret;
7657 while (pref != ref)
7659 pref = TREE_OPERAND (pref, 0);
7660 TREE_OPERAND (pos, 0) = copy_node (pref);
7661 pos = TREE_OPERAND (pos, 0);
7664 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
7665 fold_convert_loc (loc, itype,
7666 TREE_OPERAND (pos, 1)),
7667 fold_convert_loc (loc, itype, delta));
7669 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7673 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7674 means A >= Y && A != MAX, but in this case we know that
7675 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7677 static tree
7678 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7680 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7682 if (TREE_CODE (bound) == LT_EXPR)
7683 a = TREE_OPERAND (bound, 0);
7684 else if (TREE_CODE (bound) == GT_EXPR)
7685 a = TREE_OPERAND (bound, 1);
7686 else
7687 return NULL_TREE;
7689 typea = TREE_TYPE (a);
7690 if (!INTEGRAL_TYPE_P (typea)
7691 && !POINTER_TYPE_P (typea))
7692 return NULL_TREE;
7694 if (TREE_CODE (ineq) == LT_EXPR)
7696 a1 = TREE_OPERAND (ineq, 1);
7697 y = TREE_OPERAND (ineq, 0);
7699 else if (TREE_CODE (ineq) == GT_EXPR)
7701 a1 = TREE_OPERAND (ineq, 0);
7702 y = TREE_OPERAND (ineq, 1);
7704 else
7705 return NULL_TREE;
7707 if (TREE_TYPE (a1) != typea)
7708 return NULL_TREE;
7710 if (POINTER_TYPE_P (typea))
7712 /* Convert the pointer types into integer before taking the difference. */
7713 tree ta = fold_convert_loc (loc, ssizetype, a);
7714 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7715 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7717 else
7718 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7720 if (!diff || !integer_onep (diff))
7721 return NULL_TREE;
7723 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7726 /* Fold a sum or difference of at least one multiplication.
7727 Returns the folded tree or NULL if no simplification could be made. */
7729 static tree
7730 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7731 tree arg0, tree arg1)
7733 tree arg00, arg01, arg10, arg11;
7734 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7736 /* (A * C) +- (B * C) -> (A+-B) * C.
7737 (A * C) +- A -> A * (C+-1).
7738 We are most concerned about the case where C is a constant,
7739 but other combinations show up during loop reduction. Since
7740 it is not difficult, try all four possibilities. */
7742 if (TREE_CODE (arg0) == MULT_EXPR)
7744 arg00 = TREE_OPERAND (arg0, 0);
7745 arg01 = TREE_OPERAND (arg0, 1);
7747 else if (TREE_CODE (arg0) == INTEGER_CST)
7749 arg00 = build_one_cst (type);
7750 arg01 = arg0;
7752 else
7754 /* We cannot generate constant 1 for fract. */
7755 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7756 return NULL_TREE;
7757 arg00 = arg0;
7758 arg01 = build_one_cst (type);
7760 if (TREE_CODE (arg1) == MULT_EXPR)
7762 arg10 = TREE_OPERAND (arg1, 0);
7763 arg11 = TREE_OPERAND (arg1, 1);
7765 else if (TREE_CODE (arg1) == INTEGER_CST)
7767 arg10 = build_one_cst (type);
7768 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7769 the purpose of this canonicalization. */
7770 if (TREE_INT_CST_HIGH (arg1) == -1
7771 && negate_expr_p (arg1)
7772 && code == PLUS_EXPR)
7774 arg11 = negate_expr (arg1);
7775 code = MINUS_EXPR;
7777 else
7778 arg11 = arg1;
7780 else
7782 /* We cannot generate constant 1 for fract. */
7783 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7784 return NULL_TREE;
7785 arg10 = arg1;
7786 arg11 = build_one_cst (type);
7788 same = NULL_TREE;
7790 if (operand_equal_p (arg01, arg11, 0))
7791 same = arg01, alt0 = arg00, alt1 = arg10;
7792 else if (operand_equal_p (arg00, arg10, 0))
7793 same = arg00, alt0 = arg01, alt1 = arg11;
7794 else if (operand_equal_p (arg00, arg11, 0))
7795 same = arg00, alt0 = arg01, alt1 = arg10;
7796 else if (operand_equal_p (arg01, arg10, 0))
7797 same = arg01, alt0 = arg00, alt1 = arg11;
7799 /* No identical multiplicands; see if we can find a common
7800 power-of-two factor in non-power-of-two multiplies. This
7801 can help in multi-dimensional array access. */
7802 else if (host_integerp (arg01, 0)
7803 && host_integerp (arg11, 0))
7805 HOST_WIDE_INT int01, int11, tmp;
7806 bool swap = false;
7807 tree maybe_same;
7808 int01 = TREE_INT_CST_LOW (arg01);
7809 int11 = TREE_INT_CST_LOW (arg11);
7811 /* Move min of absolute values to int11. */
7812 if ((int01 >= 0 ? int01 : -int01)
7813 < (int11 >= 0 ? int11 : -int11))
7815 tmp = int01, int01 = int11, int11 = tmp;
7816 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7817 maybe_same = arg01;
7818 swap = true;
7820 else
7821 maybe_same = arg11;
7823 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7824 /* The remainder should not be a constant, otherwise we
7825 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7826 increased the number of multiplications necessary. */
7827 && TREE_CODE (arg10) != INTEGER_CST)
7829 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7830 build_int_cst (TREE_TYPE (arg00),
7831 int01 / int11));
7832 alt1 = arg10;
7833 same = maybe_same;
7834 if (swap)
7835 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7839 if (same)
7840 return fold_build2_loc (loc, MULT_EXPR, type,
7841 fold_build2_loc (loc, code, type,
7842 fold_convert_loc (loc, type, alt0),
7843 fold_convert_loc (loc, type, alt1)),
7844 fold_convert_loc (loc, type, same));
7846 return NULL_TREE;
7849 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7850 specified by EXPR into the buffer PTR of length LEN bytes.
7851 Return the number of bytes placed in the buffer, or zero
7852 upon failure. */
7854 static int
7855 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7857 tree type = TREE_TYPE (expr);
7858 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7859 int byte, offset, word, words;
7860 unsigned char value;
7862 if (total_bytes > len)
7863 return 0;
7864 words = total_bytes / UNITS_PER_WORD;
7866 for (byte = 0; byte < total_bytes; byte++)
7868 int bitpos = byte * BITS_PER_UNIT;
7869 if (bitpos < HOST_BITS_PER_WIDE_INT)
7870 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7871 else
7872 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7873 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7875 if (total_bytes > UNITS_PER_WORD)
7877 word = byte / UNITS_PER_WORD;
7878 if (WORDS_BIG_ENDIAN)
7879 word = (words - 1) - word;
7880 offset = word * UNITS_PER_WORD;
7881 if (BYTES_BIG_ENDIAN)
7882 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7883 else
7884 offset += byte % UNITS_PER_WORD;
7886 else
7887 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7888 ptr[offset] = value;
7890 return total_bytes;
7894 /* Subroutine of native_encode_expr. Encode the REAL_CST
7895 specified by EXPR into the buffer PTR of length LEN bytes.
7896 Return the number of bytes placed in the buffer, or zero
7897 upon failure. */
7899 static int
7900 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7902 tree type = TREE_TYPE (expr);
7903 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7904 int byte, offset, word, words, bitpos;
7905 unsigned char value;
7907 /* There are always 32 bits in each long, no matter the size of
7908 the hosts long. We handle floating point representations with
7909 up to 192 bits. */
7910 long tmp[6];
7912 if (total_bytes > len)
7913 return 0;
7914 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7916 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7918 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7919 bitpos += BITS_PER_UNIT)
7921 byte = (bitpos / BITS_PER_UNIT) & 3;
7922 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7924 if (UNITS_PER_WORD < 4)
7926 word = byte / UNITS_PER_WORD;
7927 if (WORDS_BIG_ENDIAN)
7928 word = (words - 1) - word;
7929 offset = word * UNITS_PER_WORD;
7930 if (BYTES_BIG_ENDIAN)
7931 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7932 else
7933 offset += byte % UNITS_PER_WORD;
7935 else
7936 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7937 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7939 return total_bytes;
7942 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7943 specified by EXPR into the buffer PTR of length LEN bytes.
7944 Return the number of bytes placed in the buffer, or zero
7945 upon failure. */
7947 static int
7948 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7950 int rsize, isize;
7951 tree part;
7953 part = TREE_REALPART (expr);
7954 rsize = native_encode_expr (part, ptr, len);
7955 if (rsize == 0)
7956 return 0;
7957 part = TREE_IMAGPART (expr);
7958 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7959 if (isize != rsize)
7960 return 0;
7961 return rsize + isize;
7965 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7966 specified by EXPR into the buffer PTR of length LEN bytes.
7967 Return the number of bytes placed in the buffer, or zero
7968 upon failure. */
7970 static int
7971 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7973 int i, size, offset, count;
7974 tree itype, elem, elements;
7976 offset = 0;
7977 elements = TREE_VECTOR_CST_ELTS (expr);
7978 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7979 itype = TREE_TYPE (TREE_TYPE (expr));
7980 size = GET_MODE_SIZE (TYPE_MODE (itype));
7981 for (i = 0; i < count; i++)
7983 if (elements)
7985 elem = TREE_VALUE (elements);
7986 elements = TREE_CHAIN (elements);
7988 else
7989 elem = NULL_TREE;
7991 if (elem)
7993 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7994 return 0;
7996 else
7998 if (offset + size > len)
7999 return 0;
8000 memset (ptr+offset, 0, size);
8002 offset += size;
8004 return offset;
8008 /* Subroutine of native_encode_expr. Encode the STRING_CST
8009 specified by EXPR into the buffer PTR of length LEN bytes.
8010 Return the number of bytes placed in the buffer, or zero
8011 upon failure. */
8013 static int
8014 native_encode_string (const_tree expr, unsigned char *ptr, int len)
8016 tree type = TREE_TYPE (expr);
8017 HOST_WIDE_INT total_bytes;
8019 if (TREE_CODE (type) != ARRAY_TYPE
8020 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
8021 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
8022 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
8023 return 0;
8024 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
8025 if (total_bytes > len)
8026 return 0;
8027 if (TREE_STRING_LENGTH (expr) < total_bytes)
8029 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
8030 memset (ptr + TREE_STRING_LENGTH (expr), 0,
8031 total_bytes - TREE_STRING_LENGTH (expr));
8033 else
8034 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
8035 return total_bytes;
8039 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
8040 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
8041 buffer PTR of length LEN bytes. Return the number of bytes
8042 placed in the buffer, or zero upon failure. */
8045 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
8047 switch (TREE_CODE (expr))
8049 case INTEGER_CST:
8050 return native_encode_int (expr, ptr, len);
8052 case REAL_CST:
8053 return native_encode_real (expr, ptr, len);
8055 case COMPLEX_CST:
8056 return native_encode_complex (expr, ptr, len);
8058 case VECTOR_CST:
8059 return native_encode_vector (expr, ptr, len);
8061 case STRING_CST:
8062 return native_encode_string (expr, ptr, len);
8064 default:
8065 return 0;
8070 /* Subroutine of native_interpret_expr. Interpret the contents of
8071 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8072 If the buffer cannot be interpreted, return NULL_TREE. */
8074 static tree
8075 native_interpret_int (tree type, const unsigned char *ptr, int len)
8077 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
8078 int byte, offset, word, words;
8079 unsigned char value;
8080 unsigned int HOST_WIDE_INT lo = 0;
8081 HOST_WIDE_INT hi = 0;
8083 if (total_bytes > len)
8084 return NULL_TREE;
8085 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
8086 return NULL_TREE;
8087 words = total_bytes / UNITS_PER_WORD;
8089 for (byte = 0; byte < total_bytes; byte++)
8091 int bitpos = byte * BITS_PER_UNIT;
8092 if (total_bytes > UNITS_PER_WORD)
8094 word = byte / UNITS_PER_WORD;
8095 if (WORDS_BIG_ENDIAN)
8096 word = (words - 1) - word;
8097 offset = word * UNITS_PER_WORD;
8098 if (BYTES_BIG_ENDIAN)
8099 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8100 else
8101 offset += byte % UNITS_PER_WORD;
8103 else
8104 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
8105 value = ptr[offset];
8107 if (bitpos < HOST_BITS_PER_WIDE_INT)
8108 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
8109 else
8110 hi |= (unsigned HOST_WIDE_INT) value
8111 << (bitpos - HOST_BITS_PER_WIDE_INT);
8114 return build_int_cst_wide_type (type, lo, hi);
8118 /* Subroutine of native_interpret_expr. Interpret the contents of
8119 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8120 If the buffer cannot be interpreted, return NULL_TREE. */
8122 static tree
8123 native_interpret_real (tree type, const unsigned char *ptr, int len)
8125 enum machine_mode mode = TYPE_MODE (type);
8126 int total_bytes = GET_MODE_SIZE (mode);
8127 int byte, offset, word, words, bitpos;
8128 unsigned char value;
8129 /* There are always 32 bits in each long, no matter the size of
8130 the hosts long. We handle floating point representations with
8131 up to 192 bits. */
8132 REAL_VALUE_TYPE r;
8133 long tmp[6];
8135 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
8136 if (total_bytes > len || total_bytes > 24)
8137 return NULL_TREE;
8138 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8140 memset (tmp, 0, sizeof (tmp));
8141 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8142 bitpos += BITS_PER_UNIT)
8144 byte = (bitpos / BITS_PER_UNIT) & 3;
8145 if (UNITS_PER_WORD < 4)
8147 word = byte / UNITS_PER_WORD;
8148 if (WORDS_BIG_ENDIAN)
8149 word = (words - 1) - word;
8150 offset = word * UNITS_PER_WORD;
8151 if (BYTES_BIG_ENDIAN)
8152 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8153 else
8154 offset += byte % UNITS_PER_WORD;
8156 else
8157 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
8158 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8160 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8163 real_from_target (&r, tmp, mode);
8164 return build_real (type, r);
8168 /* Subroutine of native_interpret_expr. Interpret the contents of
8169 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8170 If the buffer cannot be interpreted, return NULL_TREE. */
8172 static tree
8173 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8175 tree etype, rpart, ipart;
8176 int size;
8178 etype = TREE_TYPE (type);
8179 size = GET_MODE_SIZE (TYPE_MODE (etype));
8180 if (size * 2 > len)
8181 return NULL_TREE;
8182 rpart = native_interpret_expr (etype, ptr, size);
8183 if (!rpart)
8184 return NULL_TREE;
8185 ipart = native_interpret_expr (etype, ptr+size, size);
8186 if (!ipart)
8187 return NULL_TREE;
8188 return build_complex (type, rpart, ipart);
8192 /* Subroutine of native_interpret_expr. Interpret the contents of
8193 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8194 If the buffer cannot be interpreted, return NULL_TREE. */
8196 static tree
8197 native_interpret_vector (tree type, const unsigned char *ptr, int len)
8199 tree etype, elem, elements;
8200 int i, size, count;
8202 etype = TREE_TYPE (type);
8203 size = GET_MODE_SIZE (TYPE_MODE (etype));
8204 count = TYPE_VECTOR_SUBPARTS (type);
8205 if (size * count > len)
8206 return NULL_TREE;
8208 elements = NULL_TREE;
8209 for (i = count - 1; i >= 0; i--)
8211 elem = native_interpret_expr (etype, ptr+(i*size), size);
8212 if (!elem)
8213 return NULL_TREE;
8214 elements = tree_cons (NULL_TREE, elem, elements);
8216 return build_vector (type, elements);
8220 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8221 the buffer PTR of length LEN as a constant of type TYPE. For
8222 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8223 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8224 return NULL_TREE. */
8226 tree
8227 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8229 switch (TREE_CODE (type))
8231 case INTEGER_TYPE:
8232 case ENUMERAL_TYPE:
8233 case BOOLEAN_TYPE:
8234 return native_interpret_int (type, ptr, len);
8236 case REAL_TYPE:
8237 return native_interpret_real (type, ptr, len);
8239 case COMPLEX_TYPE:
8240 return native_interpret_complex (type, ptr, len);
8242 case VECTOR_TYPE:
8243 return native_interpret_vector (type, ptr, len);
8245 default:
8246 return NULL_TREE;
8251 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8252 TYPE at compile-time. If we're unable to perform the conversion
8253 return NULL_TREE. */
8255 static tree
8256 fold_view_convert_expr (tree type, tree expr)
8258 /* We support up to 512-bit values (for V8DFmode). */
8259 unsigned char buffer[64];
8260 int len;
8262 /* Check that the host and target are sane. */
8263 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8264 return NULL_TREE;
8266 len = native_encode_expr (expr, buffer, sizeof (buffer));
8267 if (len == 0)
8268 return NULL_TREE;
8270 return native_interpret_expr (type, buffer, len);
8273 /* Build an expression for the address of T. Folds away INDIRECT_REF
8274 to avoid confusing the gimplify process. */
8276 tree
8277 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8279 /* The size of the object is not relevant when talking about its address. */
8280 if (TREE_CODE (t) == WITH_SIZE_EXPR)
8281 t = TREE_OPERAND (t, 0);
8283 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
8284 if (TREE_CODE (t) == INDIRECT_REF
8285 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
8287 t = TREE_OPERAND (t, 0);
8289 if (TREE_TYPE (t) != ptrtype)
8291 t = build1 (NOP_EXPR, ptrtype, t);
8292 SET_EXPR_LOCATION (t, loc);
8295 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8297 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8299 if (TREE_TYPE (t) != ptrtype)
8300 t = fold_convert_loc (loc, ptrtype, t);
8302 else
8304 t = build1 (ADDR_EXPR, ptrtype, t);
8305 SET_EXPR_LOCATION (t, loc);
8308 return t;
8311 /* Build an expression for the address of T. */
8313 tree
8314 build_fold_addr_expr_loc (location_t loc, tree t)
8316 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8318 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8321 /* Fold a unary expression of code CODE and type TYPE with operand
8322 OP0. Return the folded expression if folding is successful.
8323 Otherwise, return NULL_TREE. */
8325 tree
8326 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8328 tree tem;
8329 tree arg0;
8330 enum tree_code_class kind = TREE_CODE_CLASS (code);
8332 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8333 && TREE_CODE_LENGTH (code) == 1);
8335 arg0 = op0;
8336 if (arg0)
8338 if (CONVERT_EXPR_CODE_P (code)
8339 || code == FLOAT_EXPR || code == ABS_EXPR)
8341 /* Don't use STRIP_NOPS, because signedness of argument type
8342 matters. */
8343 STRIP_SIGN_NOPS (arg0);
8345 else
8347 /* Strip any conversions that don't change the mode. This
8348 is safe for every expression, except for a comparison
8349 expression because its signedness is derived from its
8350 operands.
8352 Note that this is done as an internal manipulation within
8353 the constant folder, in order to find the simplest
8354 representation of the arguments so that their form can be
8355 studied. In any cases, the appropriate type conversions
8356 should be put back in the tree that will get out of the
8357 constant folder. */
8358 STRIP_NOPS (arg0);
8362 if (TREE_CODE_CLASS (code) == tcc_unary)
8364 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8365 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8366 fold_build1_loc (loc, code, type,
8367 fold_convert_loc (loc, TREE_TYPE (op0),
8368 TREE_OPERAND (arg0, 1))));
8369 else if (TREE_CODE (arg0) == COND_EXPR)
8371 tree arg01 = TREE_OPERAND (arg0, 1);
8372 tree arg02 = TREE_OPERAND (arg0, 2);
8373 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8374 arg01 = fold_build1_loc (loc, code, type,
8375 fold_convert_loc (loc,
8376 TREE_TYPE (op0), arg01));
8377 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8378 arg02 = fold_build1_loc (loc, code, type,
8379 fold_convert_loc (loc,
8380 TREE_TYPE (op0), arg02));
8381 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8382 arg01, arg02);
8384 /* If this was a conversion, and all we did was to move into
8385 inside the COND_EXPR, bring it back out. But leave it if
8386 it is a conversion from integer to integer and the
8387 result precision is no wider than a word since such a
8388 conversion is cheap and may be optimized away by combine,
8389 while it couldn't if it were outside the COND_EXPR. Then return
8390 so we don't get into an infinite recursion loop taking the
8391 conversion out and then back in. */
8393 if ((CONVERT_EXPR_CODE_P (code)
8394 || code == NON_LVALUE_EXPR)
8395 && TREE_CODE (tem) == COND_EXPR
8396 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8397 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8398 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8399 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8400 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8401 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8402 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8403 && (INTEGRAL_TYPE_P
8404 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8405 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8406 || flag_syntax_only))
8408 tem = build1 (code, type,
8409 build3 (COND_EXPR,
8410 TREE_TYPE (TREE_OPERAND
8411 (TREE_OPERAND (tem, 1), 0)),
8412 TREE_OPERAND (tem, 0),
8413 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8414 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8415 SET_EXPR_LOCATION (tem, loc);
8417 return tem;
8419 else if (COMPARISON_CLASS_P (arg0))
8421 if (TREE_CODE (type) == BOOLEAN_TYPE)
8423 arg0 = copy_node (arg0);
8424 TREE_TYPE (arg0) = type;
8425 return arg0;
8427 else if (TREE_CODE (type) != INTEGER_TYPE)
8428 return fold_build3_loc (loc, COND_EXPR, type, arg0,
8429 fold_build1_loc (loc, code, type,
8430 integer_one_node),
8431 fold_build1_loc (loc, code, type,
8432 integer_zero_node));
8436 switch (code)
8438 case PAREN_EXPR:
8439 /* Re-association barriers around constants and other re-association
8440 barriers can be removed. */
8441 if (CONSTANT_CLASS_P (op0)
8442 || TREE_CODE (op0) == PAREN_EXPR)
8443 return fold_convert_loc (loc, type, op0);
8444 return NULL_TREE;
8446 CASE_CONVERT:
8447 case FLOAT_EXPR:
8448 case FIX_TRUNC_EXPR:
8449 if (TREE_TYPE (op0) == type)
8450 return op0;
8452 /* If we have (type) (a CMP b) and type is an integral type, return
8453 new expression involving the new type. */
8454 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8455 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8456 TREE_OPERAND (op0, 1));
8458 /* Handle cases of two conversions in a row. */
8459 if (CONVERT_EXPR_P (op0))
8461 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8462 tree inter_type = TREE_TYPE (op0);
8463 int inside_int = INTEGRAL_TYPE_P (inside_type);
8464 int inside_ptr = POINTER_TYPE_P (inside_type);
8465 int inside_float = FLOAT_TYPE_P (inside_type);
8466 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8467 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8468 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8469 int inter_int = INTEGRAL_TYPE_P (inter_type);
8470 int inter_ptr = POINTER_TYPE_P (inter_type);
8471 int inter_float = FLOAT_TYPE_P (inter_type);
8472 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8473 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8474 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8475 int final_int = INTEGRAL_TYPE_P (type);
8476 int final_ptr = POINTER_TYPE_P (type);
8477 int final_float = FLOAT_TYPE_P (type);
8478 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8479 unsigned int final_prec = TYPE_PRECISION (type);
8480 int final_unsignedp = TYPE_UNSIGNED (type);
8482 /* In addition to the cases of two conversions in a row
8483 handled below, if we are converting something to its own
8484 type via an object of identical or wider precision, neither
8485 conversion is needed. */
8486 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8487 && (((inter_int || inter_ptr) && final_int)
8488 || (inter_float && final_float))
8489 && inter_prec >= final_prec)
8490 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8492 /* Likewise, if the intermediate and initial types are either both
8493 float or both integer, we don't need the middle conversion if the
8494 former is wider than the latter and doesn't change the signedness
8495 (for integers). Avoid this if the final type is a pointer since
8496 then we sometimes need the middle conversion. Likewise if the
8497 final type has a precision not equal to the size of its mode. */
8498 if (((inter_int && inside_int)
8499 || (inter_float && inside_float)
8500 || (inter_vec && inside_vec))
8501 && inter_prec >= inside_prec
8502 && (inter_float || inter_vec
8503 || inter_unsignedp == inside_unsignedp)
8504 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8505 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8506 && ! final_ptr
8507 && (! final_vec || inter_prec == inside_prec))
8508 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8510 /* If we have a sign-extension of a zero-extended value, we can
8511 replace that by a single zero-extension. */
8512 if (inside_int && inter_int && final_int
8513 && inside_prec < inter_prec && inter_prec < final_prec
8514 && inside_unsignedp && !inter_unsignedp)
8515 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8517 /* Two conversions in a row are not needed unless:
8518 - some conversion is floating-point (overstrict for now), or
8519 - some conversion is a vector (overstrict for now), or
8520 - the intermediate type is narrower than both initial and
8521 final, or
8522 - the intermediate type and innermost type differ in signedness,
8523 and the outermost type is wider than the intermediate, or
8524 - the initial type is a pointer type and the precisions of the
8525 intermediate and final types differ, or
8526 - the final type is a pointer type and the precisions of the
8527 initial and intermediate types differ. */
8528 if (! inside_float && ! inter_float && ! final_float
8529 && ! inside_vec && ! inter_vec && ! final_vec
8530 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8531 && ! (inside_int && inter_int
8532 && inter_unsignedp != inside_unsignedp
8533 && inter_prec < final_prec)
8534 && ((inter_unsignedp && inter_prec > inside_prec)
8535 == (final_unsignedp && final_prec > inter_prec))
8536 && ! (inside_ptr && inter_prec != final_prec)
8537 && ! (final_ptr && inside_prec != inter_prec)
8538 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8539 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8540 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8543 /* Handle (T *)&A.B.C for A being of type T and B and C
8544 living at offset zero. This occurs frequently in
8545 C++ upcasting and then accessing the base. */
8546 if (TREE_CODE (op0) == ADDR_EXPR
8547 && POINTER_TYPE_P (type)
8548 && handled_component_p (TREE_OPERAND (op0, 0)))
8550 HOST_WIDE_INT bitsize, bitpos;
8551 tree offset;
8552 enum machine_mode mode;
8553 int unsignedp, volatilep;
8554 tree base = TREE_OPERAND (op0, 0);
8555 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8556 &mode, &unsignedp, &volatilep, false);
8557 /* If the reference was to a (constant) zero offset, we can use
8558 the address of the base if it has the same base type
8559 as the result type. */
8560 if (! offset && bitpos == 0
8561 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8562 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8563 return fold_convert_loc (loc, type,
8564 build_fold_addr_expr_loc (loc, base));
8567 if (TREE_CODE (op0) == MODIFY_EXPR
8568 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8569 /* Detect assigning a bitfield. */
8570 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8571 && DECL_BIT_FIELD
8572 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8574 /* Don't leave an assignment inside a conversion
8575 unless assigning a bitfield. */
8576 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8577 /* First do the assignment, then return converted constant. */
8578 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8579 TREE_NO_WARNING (tem) = 1;
8580 TREE_USED (tem) = 1;
8581 SET_EXPR_LOCATION (tem, loc);
8582 return tem;
8585 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8586 constants (if x has signed type, the sign bit cannot be set
8587 in c). This folds extension into the BIT_AND_EXPR.
8588 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8589 very likely don't have maximal range for their precision and this
8590 transformation effectively doesn't preserve non-maximal ranges. */
8591 if (TREE_CODE (type) == INTEGER_TYPE
8592 && TREE_CODE (op0) == BIT_AND_EXPR
8593 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8595 tree and_expr = op0;
8596 tree and0 = TREE_OPERAND (and_expr, 0);
8597 tree and1 = TREE_OPERAND (and_expr, 1);
8598 int change = 0;
8600 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8601 || (TYPE_PRECISION (type)
8602 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8603 change = 1;
8604 else if (TYPE_PRECISION (TREE_TYPE (and1))
8605 <= HOST_BITS_PER_WIDE_INT
8606 && host_integerp (and1, 1))
8608 unsigned HOST_WIDE_INT cst;
8610 cst = tree_low_cst (and1, 1);
8611 cst &= (HOST_WIDE_INT) -1
8612 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8613 change = (cst == 0);
8614 #ifdef LOAD_EXTEND_OP
8615 if (change
8616 && !flag_syntax_only
8617 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8618 == ZERO_EXTEND))
8620 tree uns = unsigned_type_for (TREE_TYPE (and0));
8621 and0 = fold_convert_loc (loc, uns, and0);
8622 and1 = fold_convert_loc (loc, uns, and1);
8624 #endif
8626 if (change)
8628 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8629 TREE_INT_CST_HIGH (and1), 0,
8630 TREE_OVERFLOW (and1));
8631 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8632 fold_convert_loc (loc, type, and0), tem);
8636 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8637 when one of the new casts will fold away. Conservatively we assume
8638 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8639 if (POINTER_TYPE_P (type)
8640 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8641 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8642 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8643 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8645 tree arg00 = TREE_OPERAND (arg0, 0);
8646 tree arg01 = TREE_OPERAND (arg0, 1);
8648 return fold_build2_loc (loc,
8649 TREE_CODE (arg0), type,
8650 fold_convert_loc (loc, type, arg00),
8651 fold_convert_loc (loc, sizetype, arg01));
8654 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8655 of the same precision, and X is an integer type not narrower than
8656 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8657 if (INTEGRAL_TYPE_P (type)
8658 && TREE_CODE (op0) == BIT_NOT_EXPR
8659 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8660 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8661 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8663 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8664 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8665 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8666 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8667 fold_convert_loc (loc, type, tem));
8670 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8671 type of X and Y (integer types only). */
8672 if (INTEGRAL_TYPE_P (type)
8673 && TREE_CODE (op0) == MULT_EXPR
8674 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8675 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8677 /* Be careful not to introduce new overflows. */
8678 tree mult_type;
8679 if (TYPE_OVERFLOW_WRAPS (type))
8680 mult_type = type;
8681 else
8682 mult_type = unsigned_type_for (type);
8684 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8686 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8687 fold_convert_loc (loc, mult_type,
8688 TREE_OPERAND (op0, 0)),
8689 fold_convert_loc (loc, mult_type,
8690 TREE_OPERAND (op0, 1)));
8691 return fold_convert_loc (loc, type, tem);
8695 tem = fold_convert_const (code, type, op0);
8696 return tem ? tem : NULL_TREE;
8698 case ADDR_SPACE_CONVERT_EXPR:
8699 if (integer_zerop (arg0))
8700 return fold_convert_const (code, type, arg0);
8701 return NULL_TREE;
8703 case FIXED_CONVERT_EXPR:
8704 tem = fold_convert_const (code, type, arg0);
8705 return tem ? tem : NULL_TREE;
8707 case VIEW_CONVERT_EXPR:
8708 if (TREE_TYPE (op0) == type)
8709 return op0;
8710 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8711 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8712 type, TREE_OPERAND (op0, 0));
8714 /* For integral conversions with the same precision or pointer
8715 conversions use a NOP_EXPR instead. */
8716 if ((INTEGRAL_TYPE_P (type)
8717 || POINTER_TYPE_P (type))
8718 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8719 || POINTER_TYPE_P (TREE_TYPE (op0)))
8720 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8721 return fold_convert_loc (loc, type, op0);
8723 /* Strip inner integral conversions that do not change the precision. */
8724 if (CONVERT_EXPR_P (op0)
8725 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8726 || POINTER_TYPE_P (TREE_TYPE (op0)))
8727 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8728 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8729 && (TYPE_PRECISION (TREE_TYPE (op0))
8730 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8731 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8732 type, TREE_OPERAND (op0, 0));
8734 return fold_view_convert_expr (type, op0);
8736 case NEGATE_EXPR:
8737 tem = fold_negate_expr (loc, arg0);
8738 if (tem)
8739 return fold_convert_loc (loc, type, tem);
8740 return NULL_TREE;
8742 case ABS_EXPR:
8743 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8744 return fold_abs_const (arg0, type);
8745 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8746 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8747 /* Convert fabs((double)float) into (double)fabsf(float). */
8748 else if (TREE_CODE (arg0) == NOP_EXPR
8749 && TREE_CODE (type) == REAL_TYPE)
8751 tree targ0 = strip_float_extensions (arg0);
8752 if (targ0 != arg0)
8753 return fold_convert_loc (loc, type,
8754 fold_build1_loc (loc, ABS_EXPR,
8755 TREE_TYPE (targ0),
8756 targ0));
8758 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8759 else if (TREE_CODE (arg0) == ABS_EXPR)
8760 return arg0;
8761 else if (tree_expr_nonnegative_p (arg0))
8762 return arg0;
8764 /* Strip sign ops from argument. */
8765 if (TREE_CODE (type) == REAL_TYPE)
8767 tem = fold_strip_sign_ops (arg0);
8768 if (tem)
8769 return fold_build1_loc (loc, ABS_EXPR, type,
8770 fold_convert_loc (loc, type, tem));
8772 return NULL_TREE;
8774 case CONJ_EXPR:
8775 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8776 return fold_convert_loc (loc, type, arg0);
8777 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8779 tree itype = TREE_TYPE (type);
8780 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8781 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8782 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8783 negate_expr (ipart));
8785 if (TREE_CODE (arg0) == COMPLEX_CST)
8787 tree itype = TREE_TYPE (type);
8788 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8789 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8790 return build_complex (type, rpart, negate_expr (ipart));
8792 if (TREE_CODE (arg0) == CONJ_EXPR)
8793 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8794 return NULL_TREE;
8796 case BIT_NOT_EXPR:
8797 if (TREE_CODE (arg0) == INTEGER_CST)
8798 return fold_not_const (arg0, type);
8799 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8800 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8801 /* Convert ~ (-A) to A - 1. */
8802 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8803 return fold_build2_loc (loc, MINUS_EXPR, type,
8804 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8805 build_int_cst (type, 1));
8806 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8807 else if (INTEGRAL_TYPE_P (type)
8808 && ((TREE_CODE (arg0) == MINUS_EXPR
8809 && integer_onep (TREE_OPERAND (arg0, 1)))
8810 || (TREE_CODE (arg0) == PLUS_EXPR
8811 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8812 return fold_build1_loc (loc, NEGATE_EXPR, type,
8813 fold_convert_loc (loc, type,
8814 TREE_OPERAND (arg0, 0)));
8815 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8816 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8817 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8818 fold_convert_loc (loc, type,
8819 TREE_OPERAND (arg0, 0)))))
8820 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8821 fold_convert_loc (loc, type,
8822 TREE_OPERAND (arg0, 1)));
8823 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8824 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8825 fold_convert_loc (loc, type,
8826 TREE_OPERAND (arg0, 1)))))
8827 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8828 fold_convert_loc (loc, type,
8829 TREE_OPERAND (arg0, 0)), tem);
8830 /* Perform BIT_NOT_EXPR on each element individually. */
8831 else if (TREE_CODE (arg0) == VECTOR_CST)
8833 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8834 int count = TYPE_VECTOR_SUBPARTS (type), i;
8836 for (i = 0; i < count; i++)
8838 if (elements)
8840 elem = TREE_VALUE (elements);
8841 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8842 if (elem == NULL_TREE)
8843 break;
8844 elements = TREE_CHAIN (elements);
8846 else
8847 elem = build_int_cst (TREE_TYPE (type), -1);
8848 list = tree_cons (NULL_TREE, elem, list);
8850 if (i == count)
8851 return build_vector (type, nreverse (list));
8854 return NULL_TREE;
8856 case TRUTH_NOT_EXPR:
8857 /* The argument to invert_truthvalue must have Boolean type. */
8858 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8859 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8861 /* Note that the operand of this must be an int
8862 and its values must be 0 or 1.
8863 ("true" is a fixed value perhaps depending on the language,
8864 but we don't handle values other than 1 correctly yet.) */
8865 tem = fold_truth_not_expr (loc, arg0);
8866 if (!tem)
8867 return NULL_TREE;
8868 return fold_convert_loc (loc, type, tem);
8870 case REALPART_EXPR:
8871 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8872 return fold_convert_loc (loc, type, arg0);
8873 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8874 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8875 TREE_OPERAND (arg0, 1));
8876 if (TREE_CODE (arg0) == COMPLEX_CST)
8877 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8878 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8880 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8881 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8882 fold_build1_loc (loc, REALPART_EXPR, itype,
8883 TREE_OPERAND (arg0, 0)),
8884 fold_build1_loc (loc, REALPART_EXPR, itype,
8885 TREE_OPERAND (arg0, 1)));
8886 return fold_convert_loc (loc, type, tem);
8888 if (TREE_CODE (arg0) == CONJ_EXPR)
8890 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8891 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8892 TREE_OPERAND (arg0, 0));
8893 return fold_convert_loc (loc, type, tem);
8895 if (TREE_CODE (arg0) == CALL_EXPR)
8897 tree fn = get_callee_fndecl (arg0);
8898 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8899 switch (DECL_FUNCTION_CODE (fn))
8901 CASE_FLT_FN (BUILT_IN_CEXPI):
8902 fn = mathfn_built_in (type, BUILT_IN_COS);
8903 if (fn)
8904 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8905 break;
8907 default:
8908 break;
8911 return NULL_TREE;
8913 case IMAGPART_EXPR:
8914 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8915 return fold_convert_loc (loc, type, integer_zero_node);
8916 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8917 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8918 TREE_OPERAND (arg0, 0));
8919 if (TREE_CODE (arg0) == COMPLEX_CST)
8920 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8921 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8923 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8924 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8925 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8926 TREE_OPERAND (arg0, 0)),
8927 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8928 TREE_OPERAND (arg0, 1)));
8929 return fold_convert_loc (loc, type, tem);
8931 if (TREE_CODE (arg0) == CONJ_EXPR)
8933 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8934 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8935 return fold_convert_loc (loc, type, negate_expr (tem));
8937 if (TREE_CODE (arg0) == CALL_EXPR)
8939 tree fn = get_callee_fndecl (arg0);
8940 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8941 switch (DECL_FUNCTION_CODE (fn))
8943 CASE_FLT_FN (BUILT_IN_CEXPI):
8944 fn = mathfn_built_in (type, BUILT_IN_SIN);
8945 if (fn)
8946 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8947 break;
8949 default:
8950 break;
8953 return NULL_TREE;
8955 case INDIRECT_REF:
8956 /* Fold *&X to X if X is an lvalue. */
8957 if (TREE_CODE (op0) == ADDR_EXPR)
8959 tree op00 = TREE_OPERAND (op0, 0);
8960 if ((TREE_CODE (op00) == VAR_DECL
8961 || TREE_CODE (op00) == PARM_DECL
8962 || TREE_CODE (op00) == RESULT_DECL)
8963 && !TREE_READONLY (op00))
8964 return op00;
8966 return NULL_TREE;
8968 default:
8969 return NULL_TREE;
8970 } /* switch (code) */
8974 /* If the operation was a conversion do _not_ mark a resulting constant
8975 with TREE_OVERFLOW if the original constant was not. These conversions
8976 have implementation defined behavior and retaining the TREE_OVERFLOW
8977 flag here would confuse later passes such as VRP. */
8978 tree
8979 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8980 tree type, tree op0)
8982 tree res = fold_unary_loc (loc, code, type, op0);
8983 if (res
8984 && TREE_CODE (res) == INTEGER_CST
8985 && TREE_CODE (op0) == INTEGER_CST
8986 && CONVERT_EXPR_CODE_P (code))
8987 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8989 return res;
8992 /* Fold a binary expression of code CODE and type TYPE with operands
8993 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8994 Return the folded expression if folding is successful. Otherwise,
8995 return NULL_TREE. */
8997 static tree
8998 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
9000 enum tree_code compl_code;
9002 if (code == MIN_EXPR)
9003 compl_code = MAX_EXPR;
9004 else if (code == MAX_EXPR)
9005 compl_code = MIN_EXPR;
9006 else
9007 gcc_unreachable ();
9009 /* MIN (MAX (a, b), b) == b. */
9010 if (TREE_CODE (op0) == compl_code
9011 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
9012 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
9014 /* MIN (MAX (b, a), b) == b. */
9015 if (TREE_CODE (op0) == compl_code
9016 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
9017 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
9018 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
9020 /* MIN (a, MAX (a, b)) == a. */
9021 if (TREE_CODE (op1) == compl_code
9022 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
9023 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
9024 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
9026 /* MIN (a, MAX (b, a)) == a. */
9027 if (TREE_CODE (op1) == compl_code
9028 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
9029 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
9030 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
9032 return NULL_TREE;
9035 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9036 by changing CODE to reduce the magnitude of constants involved in
9037 ARG0 of the comparison.
9038 Returns a canonicalized comparison tree if a simplification was
9039 possible, otherwise returns NULL_TREE.
9040 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9041 valid if signed overflow is undefined. */
9043 static tree
9044 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9045 tree arg0, tree arg1,
9046 bool *strict_overflow_p)
9048 enum tree_code code0 = TREE_CODE (arg0);
9049 tree t, cst0 = NULL_TREE;
9050 int sgn0;
9051 bool swap = false;
9053 /* Match A +- CST code arg1 and CST code arg1. We can change the
9054 first form only if overflow is undefined. */
9055 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9056 /* In principle pointers also have undefined overflow behavior,
9057 but that causes problems elsewhere. */
9058 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9059 && (code0 == MINUS_EXPR
9060 || code0 == PLUS_EXPR)
9061 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9062 || code0 == INTEGER_CST))
9063 return NULL_TREE;
9065 /* Identify the constant in arg0 and its sign. */
9066 if (code0 == INTEGER_CST)
9067 cst0 = arg0;
9068 else
9069 cst0 = TREE_OPERAND (arg0, 1);
9070 sgn0 = tree_int_cst_sgn (cst0);
9072 /* Overflowed constants and zero will cause problems. */
9073 if (integer_zerop (cst0)
9074 || TREE_OVERFLOW (cst0))
9075 return NULL_TREE;
9077 /* See if we can reduce the magnitude of the constant in
9078 arg0 by changing the comparison code. */
9079 if (code0 == INTEGER_CST)
9081 /* CST <= arg1 -> CST-1 < arg1. */
9082 if (code == LE_EXPR && sgn0 == 1)
9083 code = LT_EXPR;
9084 /* -CST < arg1 -> -CST-1 <= arg1. */
9085 else if (code == LT_EXPR && sgn0 == -1)
9086 code = LE_EXPR;
9087 /* CST > arg1 -> CST-1 >= arg1. */
9088 else if (code == GT_EXPR && sgn0 == 1)
9089 code = GE_EXPR;
9090 /* -CST >= arg1 -> -CST-1 > arg1. */
9091 else if (code == GE_EXPR && sgn0 == -1)
9092 code = GT_EXPR;
9093 else
9094 return NULL_TREE;
9095 /* arg1 code' CST' might be more canonical. */
9096 swap = true;
9098 else
9100 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9101 if (code == LT_EXPR
9102 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9103 code = LE_EXPR;
9104 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9105 else if (code == GT_EXPR
9106 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9107 code = GE_EXPR;
9108 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9109 else if (code == LE_EXPR
9110 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9111 code = LT_EXPR;
9112 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9113 else if (code == GE_EXPR
9114 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9115 code = GT_EXPR;
9116 else
9117 return NULL_TREE;
9118 *strict_overflow_p = true;
9121 /* Now build the constant reduced in magnitude. But not if that
9122 would produce one outside of its types range. */
9123 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9124 && ((sgn0 == 1
9125 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9126 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9127 || (sgn0 == -1
9128 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9129 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9130 /* We cannot swap the comparison here as that would cause us to
9131 endlessly recurse. */
9132 return NULL_TREE;
9134 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9135 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
9136 if (code0 != INTEGER_CST)
9137 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9139 /* If swapping might yield to a more canonical form, do so. */
9140 if (swap)
9141 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
9142 else
9143 return fold_build2_loc (loc, code, type, t, arg1);
9146 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9147 overflow further. Try to decrease the magnitude of constants involved
9148 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9149 and put sole constants at the second argument position.
9150 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9152 static tree
9153 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9154 tree arg0, tree arg1)
9156 tree t;
9157 bool strict_overflow_p;
9158 const char * const warnmsg = G_("assuming signed overflow does not occur "
9159 "when reducing constant in comparison");
9161 /* Try canonicalization by simplifying arg0. */
9162 strict_overflow_p = false;
9163 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9164 &strict_overflow_p);
9165 if (t)
9167 if (strict_overflow_p)
9168 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9169 return t;
9172 /* Try canonicalization by simplifying arg1 using the swapped
9173 comparison. */
9174 code = swap_tree_comparison (code);
9175 strict_overflow_p = false;
9176 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9177 &strict_overflow_p);
9178 if (t && strict_overflow_p)
9179 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9180 return t;
9183 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9184 space. This is used to avoid issuing overflow warnings for
9185 expressions like &p->x which can not wrap. */
9187 static bool
9188 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
9190 unsigned HOST_WIDE_INT offset_low, total_low;
9191 HOST_WIDE_INT size, offset_high, total_high;
9193 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9194 return true;
9196 if (bitpos < 0)
9197 return true;
9199 if (offset == NULL_TREE)
9201 offset_low = 0;
9202 offset_high = 0;
9204 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
9205 return true;
9206 else
9208 offset_low = TREE_INT_CST_LOW (offset);
9209 offset_high = TREE_INT_CST_HIGH (offset);
9212 if (add_double_with_sign (offset_low, offset_high,
9213 bitpos / BITS_PER_UNIT, 0,
9214 &total_low, &total_high,
9215 true))
9216 return true;
9218 if (total_high != 0)
9219 return true;
9221 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
9222 if (size <= 0)
9223 return true;
9225 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9226 array. */
9227 if (TREE_CODE (base) == ADDR_EXPR)
9229 HOST_WIDE_INT base_size;
9231 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
9232 if (base_size > 0 && size < base_size)
9233 size = base_size;
9236 return total_low > (unsigned HOST_WIDE_INT) size;
9239 /* Subroutine of fold_binary. This routine performs all of the
9240 transformations that are common to the equality/inequality
9241 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9242 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9243 fold_binary should call fold_binary. Fold a comparison with
9244 tree code CODE and type TYPE with operands OP0 and OP1. Return
9245 the folded comparison or NULL_TREE. */
9247 static tree
9248 fold_comparison (location_t loc, enum tree_code code, tree type,
9249 tree op0, tree op1)
9251 tree arg0, arg1, tem;
9253 arg0 = op0;
9254 arg1 = op1;
9256 STRIP_SIGN_NOPS (arg0);
9257 STRIP_SIGN_NOPS (arg1);
9259 tem = fold_relational_const (code, type, arg0, arg1);
9260 if (tem != NULL_TREE)
9261 return tem;
9263 /* If one arg is a real or integer constant, put it last. */
9264 if (tree_swap_operands_p (arg0, arg1, true))
9265 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9267 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9268 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9269 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9270 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9271 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
9272 && (TREE_CODE (arg1) == INTEGER_CST
9273 && !TREE_OVERFLOW (arg1)))
9275 tree const1 = TREE_OPERAND (arg0, 1);
9276 tree const2 = arg1;
9277 tree variable = TREE_OPERAND (arg0, 0);
9278 tree lhs;
9279 int lhs_add;
9280 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9282 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9283 TREE_TYPE (arg1), const2, const1);
9285 /* If the constant operation overflowed this can be
9286 simplified as a comparison against INT_MAX/INT_MIN. */
9287 if (TREE_CODE (lhs) == INTEGER_CST
9288 && TREE_OVERFLOW (lhs))
9290 int const1_sgn = tree_int_cst_sgn (const1);
9291 enum tree_code code2 = code;
9293 /* Get the sign of the constant on the lhs if the
9294 operation were VARIABLE + CONST1. */
9295 if (TREE_CODE (arg0) == MINUS_EXPR)
9296 const1_sgn = -const1_sgn;
9298 /* The sign of the constant determines if we overflowed
9299 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9300 Canonicalize to the INT_MIN overflow by swapping the comparison
9301 if necessary. */
9302 if (const1_sgn == -1)
9303 code2 = swap_tree_comparison (code);
9305 /* We now can look at the canonicalized case
9306 VARIABLE + 1 CODE2 INT_MIN
9307 and decide on the result. */
9308 if (code2 == LT_EXPR
9309 || code2 == LE_EXPR
9310 || code2 == EQ_EXPR)
9311 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9312 else if (code2 == NE_EXPR
9313 || code2 == GE_EXPR
9314 || code2 == GT_EXPR)
9315 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9318 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9319 && (TREE_CODE (lhs) != INTEGER_CST
9320 || !TREE_OVERFLOW (lhs)))
9322 fold_overflow_warning (("assuming signed overflow does not occur "
9323 "when changing X +- C1 cmp C2 to "
9324 "X cmp C1 +- C2"),
9325 WARN_STRICT_OVERFLOW_COMPARISON);
9326 return fold_build2_loc (loc, code, type, variable, lhs);
9330 /* For comparisons of pointers we can decompose it to a compile time
9331 comparison of the base objects and the offsets into the object.
9332 This requires at least one operand being an ADDR_EXPR or a
9333 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9334 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9335 && (TREE_CODE (arg0) == ADDR_EXPR
9336 || TREE_CODE (arg1) == ADDR_EXPR
9337 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9338 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9340 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9341 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9342 enum machine_mode mode;
9343 int volatilep, unsignedp;
9344 bool indirect_base0 = false, indirect_base1 = false;
9346 /* Get base and offset for the access. Strip ADDR_EXPR for
9347 get_inner_reference, but put it back by stripping INDIRECT_REF
9348 off the base object if possible. indirect_baseN will be true
9349 if baseN is not an address but refers to the object itself. */
9350 base0 = arg0;
9351 if (TREE_CODE (arg0) == ADDR_EXPR)
9353 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9354 &bitsize, &bitpos0, &offset0, &mode,
9355 &unsignedp, &volatilep, false);
9356 if (TREE_CODE (base0) == INDIRECT_REF)
9357 base0 = TREE_OPERAND (base0, 0);
9358 else
9359 indirect_base0 = true;
9361 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9363 base0 = TREE_OPERAND (arg0, 0);
9364 offset0 = TREE_OPERAND (arg0, 1);
9367 base1 = arg1;
9368 if (TREE_CODE (arg1) == ADDR_EXPR)
9370 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9371 &bitsize, &bitpos1, &offset1, &mode,
9372 &unsignedp, &volatilep, false);
9373 if (TREE_CODE (base1) == INDIRECT_REF)
9374 base1 = TREE_OPERAND (base1, 0);
9375 else
9376 indirect_base1 = true;
9378 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9380 base1 = TREE_OPERAND (arg1, 0);
9381 offset1 = TREE_OPERAND (arg1, 1);
9384 /* If we have equivalent bases we might be able to simplify. */
9385 if (indirect_base0 == indirect_base1
9386 && operand_equal_p (base0, base1, 0))
9388 /* We can fold this expression to a constant if the non-constant
9389 offset parts are equal. */
9390 if ((offset0 == offset1
9391 || (offset0 && offset1
9392 && operand_equal_p (offset0, offset1, 0)))
9393 && (code == EQ_EXPR
9394 || code == NE_EXPR
9395 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9398 if (code != EQ_EXPR
9399 && code != NE_EXPR
9400 && bitpos0 != bitpos1
9401 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9402 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9403 fold_overflow_warning (("assuming pointer wraparound does not "
9404 "occur when comparing P +- C1 with "
9405 "P +- C2"),
9406 WARN_STRICT_OVERFLOW_CONDITIONAL);
9408 switch (code)
9410 case EQ_EXPR:
9411 return constant_boolean_node (bitpos0 == bitpos1, type);
9412 case NE_EXPR:
9413 return constant_boolean_node (bitpos0 != bitpos1, type);
9414 case LT_EXPR:
9415 return constant_boolean_node (bitpos0 < bitpos1, type);
9416 case LE_EXPR:
9417 return constant_boolean_node (bitpos0 <= bitpos1, type);
9418 case GE_EXPR:
9419 return constant_boolean_node (bitpos0 >= bitpos1, type);
9420 case GT_EXPR:
9421 return constant_boolean_node (bitpos0 > bitpos1, type);
9422 default:;
9425 /* We can simplify the comparison to a comparison of the variable
9426 offset parts if the constant offset parts are equal.
9427 Be careful to use signed size type here because otherwise we
9428 mess with array offsets in the wrong way. This is possible
9429 because pointer arithmetic is restricted to retain within an
9430 object and overflow on pointer differences is undefined as of
9431 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9432 else if (bitpos0 == bitpos1
9433 && ((code == EQ_EXPR || code == NE_EXPR)
9434 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9436 tree signed_size_type_node;
9437 signed_size_type_node = signed_type_for (size_type_node);
9439 /* By converting to signed size type we cover middle-end pointer
9440 arithmetic which operates on unsigned pointer types of size
9441 type size and ARRAY_REF offsets which are properly sign or
9442 zero extended from their type in case it is narrower than
9443 size type. */
9444 if (offset0 == NULL_TREE)
9445 offset0 = build_int_cst (signed_size_type_node, 0);
9446 else
9447 offset0 = fold_convert_loc (loc, signed_size_type_node,
9448 offset0);
9449 if (offset1 == NULL_TREE)
9450 offset1 = build_int_cst (signed_size_type_node, 0);
9451 else
9452 offset1 = fold_convert_loc (loc, signed_size_type_node,
9453 offset1);
9455 if (code != EQ_EXPR
9456 && code != NE_EXPR
9457 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9458 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9459 fold_overflow_warning (("assuming pointer wraparound does not "
9460 "occur when comparing P +- C1 with "
9461 "P +- C2"),
9462 WARN_STRICT_OVERFLOW_COMPARISON);
9464 return fold_build2_loc (loc, code, type, offset0, offset1);
9467 /* For non-equal bases we can simplify if they are addresses
9468 of local binding decls or constants. */
9469 else if (indirect_base0 && indirect_base1
9470 /* We know that !operand_equal_p (base0, base1, 0)
9471 because the if condition was false. But make
9472 sure two decls are not the same. */
9473 && base0 != base1
9474 && TREE_CODE (arg0) == ADDR_EXPR
9475 && TREE_CODE (arg1) == ADDR_EXPR
9476 && (((TREE_CODE (base0) == VAR_DECL
9477 || TREE_CODE (base0) == PARM_DECL)
9478 && (targetm.binds_local_p (base0)
9479 || CONSTANT_CLASS_P (base1)))
9480 || CONSTANT_CLASS_P (base0))
9481 && (((TREE_CODE (base1) == VAR_DECL
9482 || TREE_CODE (base1) == PARM_DECL)
9483 && (targetm.binds_local_p (base1)
9484 || CONSTANT_CLASS_P (base0)))
9485 || CONSTANT_CLASS_P (base1)))
9487 if (code == EQ_EXPR)
9488 return omit_two_operands_loc (loc, type, boolean_false_node,
9489 arg0, arg1);
9490 else if (code == NE_EXPR)
9491 return omit_two_operands_loc (loc, type, boolean_true_node,
9492 arg0, arg1);
9494 /* For equal offsets we can simplify to a comparison of the
9495 base addresses. */
9496 else if (bitpos0 == bitpos1
9497 && (indirect_base0
9498 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9499 && (indirect_base1
9500 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9501 && ((offset0 == offset1)
9502 || (offset0 && offset1
9503 && operand_equal_p (offset0, offset1, 0))))
9505 if (indirect_base0)
9506 base0 = build_fold_addr_expr_loc (loc, base0);
9507 if (indirect_base1)
9508 base1 = build_fold_addr_expr_loc (loc, base1);
9509 return fold_build2_loc (loc, code, type, base0, base1);
9513 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9514 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9515 the resulting offset is smaller in absolute value than the
9516 original one. */
9517 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9518 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9519 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9520 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9521 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9522 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9523 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9525 tree const1 = TREE_OPERAND (arg0, 1);
9526 tree const2 = TREE_OPERAND (arg1, 1);
9527 tree variable1 = TREE_OPERAND (arg0, 0);
9528 tree variable2 = TREE_OPERAND (arg1, 0);
9529 tree cst;
9530 const char * const warnmsg = G_("assuming signed overflow does not "
9531 "occur when combining constants around "
9532 "a comparison");
9534 /* Put the constant on the side where it doesn't overflow and is
9535 of lower absolute value than before. */
9536 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9537 ? MINUS_EXPR : PLUS_EXPR,
9538 const2, const1, 0);
9539 if (!TREE_OVERFLOW (cst)
9540 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9542 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9543 return fold_build2_loc (loc, code, type,
9544 variable1,
9545 fold_build2_loc (loc,
9546 TREE_CODE (arg1), TREE_TYPE (arg1),
9547 variable2, cst));
9550 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9551 ? MINUS_EXPR : PLUS_EXPR,
9552 const1, const2, 0);
9553 if (!TREE_OVERFLOW (cst)
9554 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9556 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9557 return fold_build2_loc (loc, code, type,
9558 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9559 variable1, cst),
9560 variable2);
9564 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9565 signed arithmetic case. That form is created by the compiler
9566 often enough for folding it to be of value. One example is in
9567 computing loop trip counts after Operator Strength Reduction. */
9568 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9569 && TREE_CODE (arg0) == MULT_EXPR
9570 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9571 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9572 && integer_zerop (arg1))
9574 tree const1 = TREE_OPERAND (arg0, 1);
9575 tree const2 = arg1; /* zero */
9576 tree variable1 = TREE_OPERAND (arg0, 0);
9577 enum tree_code cmp_code = code;
9579 gcc_assert (!integer_zerop (const1));
9581 fold_overflow_warning (("assuming signed overflow does not occur when "
9582 "eliminating multiplication in comparison "
9583 "with zero"),
9584 WARN_STRICT_OVERFLOW_COMPARISON);
9586 /* If const1 is negative we swap the sense of the comparison. */
9587 if (tree_int_cst_sgn (const1) < 0)
9588 cmp_code = swap_tree_comparison (cmp_code);
9590 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9593 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
9594 if (tem)
9595 return tem;
9597 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9599 tree targ0 = strip_float_extensions (arg0);
9600 tree targ1 = strip_float_extensions (arg1);
9601 tree newtype = TREE_TYPE (targ0);
9603 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9604 newtype = TREE_TYPE (targ1);
9606 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9607 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9608 return fold_build2_loc (loc, code, type,
9609 fold_convert_loc (loc, newtype, targ0),
9610 fold_convert_loc (loc, newtype, targ1));
9612 /* (-a) CMP (-b) -> b CMP a */
9613 if (TREE_CODE (arg0) == NEGATE_EXPR
9614 && TREE_CODE (arg1) == NEGATE_EXPR)
9615 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9616 TREE_OPERAND (arg0, 0));
9618 if (TREE_CODE (arg1) == REAL_CST)
9620 REAL_VALUE_TYPE cst;
9621 cst = TREE_REAL_CST (arg1);
9623 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9624 if (TREE_CODE (arg0) == NEGATE_EXPR)
9625 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9626 TREE_OPERAND (arg0, 0),
9627 build_real (TREE_TYPE (arg1),
9628 REAL_VALUE_NEGATE (cst)));
9630 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9631 /* a CMP (-0) -> a CMP 0 */
9632 if (REAL_VALUE_MINUS_ZERO (cst))
9633 return fold_build2_loc (loc, code, type, arg0,
9634 build_real (TREE_TYPE (arg1), dconst0));
9636 /* x != NaN is always true, other ops are always false. */
9637 if (REAL_VALUE_ISNAN (cst)
9638 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9640 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9641 return omit_one_operand_loc (loc, type, tem, arg0);
9644 /* Fold comparisons against infinity. */
9645 if (REAL_VALUE_ISINF (cst)
9646 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9648 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9649 if (tem != NULL_TREE)
9650 return tem;
9654 /* If this is a comparison of a real constant with a PLUS_EXPR
9655 or a MINUS_EXPR of a real constant, we can convert it into a
9656 comparison with a revised real constant as long as no overflow
9657 occurs when unsafe_math_optimizations are enabled. */
9658 if (flag_unsafe_math_optimizations
9659 && TREE_CODE (arg1) == REAL_CST
9660 && (TREE_CODE (arg0) == PLUS_EXPR
9661 || TREE_CODE (arg0) == MINUS_EXPR)
9662 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9663 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9664 ? MINUS_EXPR : PLUS_EXPR,
9665 arg1, TREE_OPERAND (arg0, 1), 0))
9666 && !TREE_OVERFLOW (tem))
9667 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9669 /* Likewise, we can simplify a comparison of a real constant with
9670 a MINUS_EXPR whose first operand is also a real constant, i.e.
9671 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9672 floating-point types only if -fassociative-math is set. */
9673 if (flag_associative_math
9674 && TREE_CODE (arg1) == REAL_CST
9675 && TREE_CODE (arg0) == MINUS_EXPR
9676 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9677 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9678 arg1, 0))
9679 && !TREE_OVERFLOW (tem))
9680 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9681 TREE_OPERAND (arg0, 1), tem);
9683 /* Fold comparisons against built-in math functions. */
9684 if (TREE_CODE (arg1) == REAL_CST
9685 && flag_unsafe_math_optimizations
9686 && ! flag_errno_math)
9688 enum built_in_function fcode = builtin_mathfn_code (arg0);
9690 if (fcode != END_BUILTINS)
9692 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9693 if (tem != NULL_TREE)
9694 return tem;
9699 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9700 && CONVERT_EXPR_P (arg0))
9702 /* If we are widening one operand of an integer comparison,
9703 see if the other operand is similarly being widened. Perhaps we
9704 can do the comparison in the narrower type. */
9705 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9706 if (tem)
9707 return tem;
9709 /* Or if we are changing signedness. */
9710 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9711 if (tem)
9712 return tem;
9715 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9716 constant, we can simplify it. */
9717 if (TREE_CODE (arg1) == INTEGER_CST
9718 && (TREE_CODE (arg0) == MIN_EXPR
9719 || TREE_CODE (arg0) == MAX_EXPR)
9720 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9722 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9723 if (tem)
9724 return tem;
9727 /* Simplify comparison of something with itself. (For IEEE
9728 floating-point, we can only do some of these simplifications.) */
9729 if (operand_equal_p (arg0, arg1, 0))
9731 switch (code)
9733 case EQ_EXPR:
9734 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9735 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9736 return constant_boolean_node (1, type);
9737 break;
9739 case GE_EXPR:
9740 case LE_EXPR:
9741 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9742 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9743 return constant_boolean_node (1, type);
9744 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9746 case NE_EXPR:
9747 /* For NE, we can only do this simplification if integer
9748 or we don't honor IEEE floating point NaNs. */
9749 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9750 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9751 break;
9752 /* ... fall through ... */
9753 case GT_EXPR:
9754 case LT_EXPR:
9755 return constant_boolean_node (0, type);
9756 default:
9757 gcc_unreachable ();
9761 /* If we are comparing an expression that just has comparisons
9762 of two integer values, arithmetic expressions of those comparisons,
9763 and constants, we can simplify it. There are only three cases
9764 to check: the two values can either be equal, the first can be
9765 greater, or the second can be greater. Fold the expression for
9766 those three values. Since each value must be 0 or 1, we have
9767 eight possibilities, each of which corresponds to the constant 0
9768 or 1 or one of the six possible comparisons.
9770 This handles common cases like (a > b) == 0 but also handles
9771 expressions like ((x > y) - (y > x)) > 0, which supposedly
9772 occur in macroized code. */
9774 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9776 tree cval1 = 0, cval2 = 0;
9777 int save_p = 0;
9779 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9780 /* Don't handle degenerate cases here; they should already
9781 have been handled anyway. */
9782 && cval1 != 0 && cval2 != 0
9783 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9784 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9785 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9786 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9787 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9788 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9789 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9791 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9792 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9794 /* We can't just pass T to eval_subst in case cval1 or cval2
9795 was the same as ARG1. */
9797 tree high_result
9798 = fold_build2_loc (loc, code, type,
9799 eval_subst (loc, arg0, cval1, maxval,
9800 cval2, minval),
9801 arg1);
9802 tree equal_result
9803 = fold_build2_loc (loc, code, type,
9804 eval_subst (loc, arg0, cval1, maxval,
9805 cval2, maxval),
9806 arg1);
9807 tree low_result
9808 = fold_build2_loc (loc, code, type,
9809 eval_subst (loc, arg0, cval1, minval,
9810 cval2, maxval),
9811 arg1);
9813 /* All three of these results should be 0 or 1. Confirm they are.
9814 Then use those values to select the proper code to use. */
9816 if (TREE_CODE (high_result) == INTEGER_CST
9817 && TREE_CODE (equal_result) == INTEGER_CST
9818 && TREE_CODE (low_result) == INTEGER_CST)
9820 /* Make a 3-bit mask with the high-order bit being the
9821 value for `>', the next for '=', and the low for '<'. */
9822 switch ((integer_onep (high_result) * 4)
9823 + (integer_onep (equal_result) * 2)
9824 + integer_onep (low_result))
9826 case 0:
9827 /* Always false. */
9828 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9829 case 1:
9830 code = LT_EXPR;
9831 break;
9832 case 2:
9833 code = EQ_EXPR;
9834 break;
9835 case 3:
9836 code = LE_EXPR;
9837 break;
9838 case 4:
9839 code = GT_EXPR;
9840 break;
9841 case 5:
9842 code = NE_EXPR;
9843 break;
9844 case 6:
9845 code = GE_EXPR;
9846 break;
9847 case 7:
9848 /* Always true. */
9849 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9852 if (save_p)
9854 tem = save_expr (build2 (code, type, cval1, cval2));
9855 SET_EXPR_LOCATION (tem, loc);
9856 return tem;
9858 return fold_build2_loc (loc, code, type, cval1, cval2);
9863 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9864 into a single range test. */
9865 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9866 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9867 && TREE_CODE (arg1) == INTEGER_CST
9868 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9869 && !integer_zerop (TREE_OPERAND (arg0, 1))
9870 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9871 && !TREE_OVERFLOW (arg1))
9873 tem = fold_div_compare (loc, code, type, arg0, arg1);
9874 if (tem != NULL_TREE)
9875 return tem;
9878 /* Fold ~X op ~Y as Y op X. */
9879 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9880 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9882 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9883 return fold_build2_loc (loc, code, type,
9884 fold_convert_loc (loc, cmp_type,
9885 TREE_OPERAND (arg1, 0)),
9886 TREE_OPERAND (arg0, 0));
9889 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9890 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9891 && TREE_CODE (arg1) == INTEGER_CST)
9893 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9894 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9895 TREE_OPERAND (arg0, 0),
9896 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9897 fold_convert_loc (loc, cmp_type, arg1)));
9900 return NULL_TREE;
9904 /* Subroutine of fold_binary. Optimize complex multiplications of the
9905 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9906 argument EXPR represents the expression "z" of type TYPE. */
9908 static tree
9909 fold_mult_zconjz (location_t loc, tree type, tree expr)
9911 tree itype = TREE_TYPE (type);
9912 tree rpart, ipart, tem;
9914 if (TREE_CODE (expr) == COMPLEX_EXPR)
9916 rpart = TREE_OPERAND (expr, 0);
9917 ipart = TREE_OPERAND (expr, 1);
9919 else if (TREE_CODE (expr) == COMPLEX_CST)
9921 rpart = TREE_REALPART (expr);
9922 ipart = TREE_IMAGPART (expr);
9924 else
9926 expr = save_expr (expr);
9927 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9928 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9931 rpart = save_expr (rpart);
9932 ipart = save_expr (ipart);
9933 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9934 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9935 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9936 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9937 fold_convert_loc (loc, itype, integer_zero_node));
9941 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9942 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9943 guarantees that P and N have the same least significant log2(M) bits.
9944 N is not otherwise constrained. In particular, N is not normalized to
9945 0 <= N < M as is common. In general, the precise value of P is unknown.
9946 M is chosen as large as possible such that constant N can be determined.
9948 Returns M and sets *RESIDUE to N.
9950 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9951 account. This is not always possible due to PR 35705.
9954 static unsigned HOST_WIDE_INT
9955 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9956 bool allow_func_align)
9958 enum tree_code code;
9960 *residue = 0;
9962 code = TREE_CODE (expr);
9963 if (code == ADDR_EXPR)
9965 expr = TREE_OPERAND (expr, 0);
9966 if (handled_component_p (expr))
9968 HOST_WIDE_INT bitsize, bitpos;
9969 tree offset;
9970 enum machine_mode mode;
9971 int unsignedp, volatilep;
9973 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9974 &mode, &unsignedp, &volatilep, false);
9975 *residue = bitpos / BITS_PER_UNIT;
9976 if (offset)
9978 if (TREE_CODE (offset) == INTEGER_CST)
9979 *residue += TREE_INT_CST_LOW (offset);
9980 else
9981 /* We don't handle more complicated offset expressions. */
9982 return 1;
9986 if (DECL_P (expr)
9987 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9988 return DECL_ALIGN_UNIT (expr);
9990 else if (code == POINTER_PLUS_EXPR)
9992 tree op0, op1;
9993 unsigned HOST_WIDE_INT modulus;
9994 enum tree_code inner_code;
9996 op0 = TREE_OPERAND (expr, 0);
9997 STRIP_NOPS (op0);
9998 modulus = get_pointer_modulus_and_residue (op0, residue,
9999 allow_func_align);
10001 op1 = TREE_OPERAND (expr, 1);
10002 STRIP_NOPS (op1);
10003 inner_code = TREE_CODE (op1);
10004 if (inner_code == INTEGER_CST)
10006 *residue += TREE_INT_CST_LOW (op1);
10007 return modulus;
10009 else if (inner_code == MULT_EXPR)
10011 op1 = TREE_OPERAND (op1, 1);
10012 if (TREE_CODE (op1) == INTEGER_CST)
10014 unsigned HOST_WIDE_INT align;
10016 /* Compute the greatest power-of-2 divisor of op1. */
10017 align = TREE_INT_CST_LOW (op1);
10018 align &= -align;
10020 /* If align is non-zero and less than *modulus, replace
10021 *modulus with align., If align is 0, then either op1 is 0
10022 or the greatest power-of-2 divisor of op1 doesn't fit in an
10023 unsigned HOST_WIDE_INT. In either case, no additional
10024 constraint is imposed. */
10025 if (align)
10026 modulus = MIN (modulus, align);
10028 return modulus;
10033 /* If we get here, we were unable to determine anything useful about the
10034 expression. */
10035 return 1;
10039 /* Fold a binary expression of code CODE and type TYPE with operands
10040 OP0 and OP1. LOC is the location of the resulting expression.
10041 Return the folded expression if folding is successful. Otherwise,
10042 return NULL_TREE. */
10044 tree
10045 fold_binary_loc (location_t loc,
10046 enum tree_code code, tree type, tree op0, tree op1)
10048 enum tree_code_class kind = TREE_CODE_CLASS (code);
10049 tree arg0, arg1, tem;
10050 tree t1 = NULL_TREE;
10051 bool strict_overflow_p;
10053 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10054 && TREE_CODE_LENGTH (code) == 2
10055 && op0 != NULL_TREE
10056 && op1 != NULL_TREE);
10058 arg0 = op0;
10059 arg1 = op1;
10061 /* Strip any conversions that don't change the mode. This is
10062 safe for every expression, except for a comparison expression
10063 because its signedness is derived from its operands. So, in
10064 the latter case, only strip conversions that don't change the
10065 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10066 preserved.
10068 Note that this is done as an internal manipulation within the
10069 constant folder, in order to find the simplest representation
10070 of the arguments so that their form can be studied. In any
10071 cases, the appropriate type conversions should be put back in
10072 the tree that will get out of the constant folder. */
10074 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10076 STRIP_SIGN_NOPS (arg0);
10077 STRIP_SIGN_NOPS (arg1);
10079 else
10081 STRIP_NOPS (arg0);
10082 STRIP_NOPS (arg1);
10085 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10086 constant but we can't do arithmetic on them. */
10087 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10088 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10089 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10090 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10091 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10092 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
10094 if (kind == tcc_binary)
10096 /* Make sure type and arg0 have the same saturating flag. */
10097 gcc_assert (TYPE_SATURATING (type)
10098 == TYPE_SATURATING (TREE_TYPE (arg0)));
10099 tem = const_binop (code, arg0, arg1, 0);
10101 else if (kind == tcc_comparison)
10102 tem = fold_relational_const (code, type, arg0, arg1);
10103 else
10104 tem = NULL_TREE;
10106 if (tem != NULL_TREE)
10108 if (TREE_TYPE (tem) != type)
10109 tem = fold_convert_loc (loc, type, tem);
10110 return tem;
10114 /* If this is a commutative operation, and ARG0 is a constant, move it
10115 to ARG1 to reduce the number of tests below. */
10116 if (commutative_tree_code (code)
10117 && tree_swap_operands_p (arg0, arg1, true))
10118 return fold_build2_loc (loc, code, type, op1, op0);
10120 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10122 First check for cases where an arithmetic operation is applied to a
10123 compound, conditional, or comparison operation. Push the arithmetic
10124 operation inside the compound or conditional to see if any folding
10125 can then be done. Convert comparison to conditional for this purpose.
10126 The also optimizes non-constant cases that used to be done in
10127 expand_expr.
10129 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10130 one of the operands is a comparison and the other is a comparison, a
10131 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10132 code below would make the expression more complex. Change it to a
10133 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10134 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10136 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10137 || code == EQ_EXPR || code == NE_EXPR)
10138 && ((truth_value_p (TREE_CODE (arg0))
10139 && (truth_value_p (TREE_CODE (arg1))
10140 || (TREE_CODE (arg1) == BIT_AND_EXPR
10141 && integer_onep (TREE_OPERAND (arg1, 1)))))
10142 || (truth_value_p (TREE_CODE (arg1))
10143 && (truth_value_p (TREE_CODE (arg0))
10144 || (TREE_CODE (arg0) == BIT_AND_EXPR
10145 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10147 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10148 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10149 : TRUTH_XOR_EXPR,
10150 boolean_type_node,
10151 fold_convert_loc (loc, boolean_type_node, arg0),
10152 fold_convert_loc (loc, boolean_type_node, arg1));
10154 if (code == EQ_EXPR)
10155 tem = invert_truthvalue_loc (loc, tem);
10157 return fold_convert_loc (loc, type, tem);
10160 if (TREE_CODE_CLASS (code) == tcc_binary
10161 || TREE_CODE_CLASS (code) == tcc_comparison)
10163 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10165 tem = fold_build2_loc (loc, code, type,
10166 fold_convert_loc (loc, TREE_TYPE (op0),
10167 TREE_OPERAND (arg0, 1)), op1);
10168 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
10169 goto fold_binary_exit;
10171 if (TREE_CODE (arg1) == COMPOUND_EXPR
10172 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10174 tem = fold_build2_loc (loc, code, type, op0,
10175 fold_convert_loc (loc, TREE_TYPE (op1),
10176 TREE_OPERAND (arg1, 1)));
10177 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
10178 goto fold_binary_exit;
10181 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
10183 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10184 arg0, arg1,
10185 /*cond_first_p=*/1);
10186 if (tem != NULL_TREE)
10187 return tem;
10190 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
10192 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10193 arg1, arg0,
10194 /*cond_first_p=*/0);
10195 if (tem != NULL_TREE)
10196 return tem;
10200 switch (code)
10202 case POINTER_PLUS_EXPR:
10203 /* 0 +p index -> (type)index */
10204 if (integer_zerop (arg0))
10205 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10207 /* PTR +p 0 -> PTR */
10208 if (integer_zerop (arg1))
10209 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10211 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10212 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10213 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10214 return fold_convert_loc (loc, type,
10215 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10216 fold_convert_loc (loc, sizetype,
10217 arg1),
10218 fold_convert_loc (loc, sizetype,
10219 arg0)));
10221 /* index +p PTR -> PTR +p index */
10222 if (POINTER_TYPE_P (TREE_TYPE (arg1))
10223 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10224 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
10225 fold_convert_loc (loc, type, arg1),
10226 fold_convert_loc (loc, sizetype, arg0));
10228 /* (PTR +p B) +p A -> PTR +p (B + A) */
10229 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10231 tree inner;
10232 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10233 tree arg00 = TREE_OPERAND (arg0, 0);
10234 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10235 arg01, fold_convert_loc (loc, sizetype, arg1));
10236 return fold_convert_loc (loc, type,
10237 fold_build2_loc (loc, POINTER_PLUS_EXPR,
10238 TREE_TYPE (arg00),
10239 arg00, inner));
10242 /* PTR_CST +p CST -> CST1 */
10243 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10244 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10245 fold_convert_loc (loc, type, arg1));
10247 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10248 of the array. Loop optimizer sometimes produce this type of
10249 expressions. */
10250 if (TREE_CODE (arg0) == ADDR_EXPR)
10252 tem = try_move_mult_to_index (loc, arg0,
10253 fold_convert_loc (loc, sizetype, arg1));
10254 if (tem)
10255 return fold_convert_loc (loc, type, tem);
10258 return NULL_TREE;
10260 case PLUS_EXPR:
10261 /* A + (-B) -> A - B */
10262 if (TREE_CODE (arg1) == NEGATE_EXPR)
10263 return fold_build2_loc (loc, MINUS_EXPR, type,
10264 fold_convert_loc (loc, type, arg0),
10265 fold_convert_loc (loc, type,
10266 TREE_OPERAND (arg1, 0)));
10267 /* (-A) + B -> B - A */
10268 if (TREE_CODE (arg0) == NEGATE_EXPR
10269 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10270 return fold_build2_loc (loc, MINUS_EXPR, type,
10271 fold_convert_loc (loc, type, arg1),
10272 fold_convert_loc (loc, type,
10273 TREE_OPERAND (arg0, 0)));
10275 if (INTEGRAL_TYPE_P (type))
10277 /* Convert ~A + 1 to -A. */
10278 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10279 && integer_onep (arg1))
10280 return fold_build1_loc (loc, NEGATE_EXPR, type,
10281 fold_convert_loc (loc, type,
10282 TREE_OPERAND (arg0, 0)));
10284 /* ~X + X is -1. */
10285 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10286 && !TYPE_OVERFLOW_TRAPS (type))
10288 tree tem = TREE_OPERAND (arg0, 0);
10290 STRIP_NOPS (tem);
10291 if (operand_equal_p (tem, arg1, 0))
10293 t1 = build_int_cst_type (type, -1);
10294 return omit_one_operand_loc (loc, type, t1, arg1);
10298 /* X + ~X is -1. */
10299 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10300 && !TYPE_OVERFLOW_TRAPS (type))
10302 tree tem = TREE_OPERAND (arg1, 0);
10304 STRIP_NOPS (tem);
10305 if (operand_equal_p (arg0, tem, 0))
10307 t1 = build_int_cst_type (type, -1);
10308 return omit_one_operand_loc (loc, type, t1, arg0);
10312 /* X + (X / CST) * -CST is X % CST. */
10313 if (TREE_CODE (arg1) == MULT_EXPR
10314 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10315 && operand_equal_p (arg0,
10316 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10318 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10319 tree cst1 = TREE_OPERAND (arg1, 1);
10320 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10321 cst1, cst0);
10322 if (sum && integer_zerop (sum))
10323 return fold_convert_loc (loc, type,
10324 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10325 TREE_TYPE (arg0), arg0,
10326 cst0));
10330 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
10331 same or one. Make sure type is not saturating.
10332 fold_plusminus_mult_expr will re-associate. */
10333 if ((TREE_CODE (arg0) == MULT_EXPR
10334 || TREE_CODE (arg1) == MULT_EXPR)
10335 && !TYPE_SATURATING (type)
10336 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10338 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10339 if (tem)
10340 return tem;
10343 if (! FLOAT_TYPE_P (type))
10345 if (integer_zerop (arg1))
10346 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10348 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10349 with a constant, and the two constants have no bits in common,
10350 we should treat this as a BIT_IOR_EXPR since this may produce more
10351 simplifications. */
10352 if (TREE_CODE (arg0) == BIT_AND_EXPR
10353 && TREE_CODE (arg1) == BIT_AND_EXPR
10354 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10355 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10356 && integer_zerop (const_binop (BIT_AND_EXPR,
10357 TREE_OPERAND (arg0, 1),
10358 TREE_OPERAND (arg1, 1), 0)))
10360 code = BIT_IOR_EXPR;
10361 goto bit_ior;
10364 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10365 (plus (plus (mult) (mult)) (foo)) so that we can
10366 take advantage of the factoring cases below. */
10367 if (((TREE_CODE (arg0) == PLUS_EXPR
10368 || TREE_CODE (arg0) == MINUS_EXPR)
10369 && TREE_CODE (arg1) == MULT_EXPR)
10370 || ((TREE_CODE (arg1) == PLUS_EXPR
10371 || TREE_CODE (arg1) == MINUS_EXPR)
10372 && TREE_CODE (arg0) == MULT_EXPR))
10374 tree parg0, parg1, parg, marg;
10375 enum tree_code pcode;
10377 if (TREE_CODE (arg1) == MULT_EXPR)
10378 parg = arg0, marg = arg1;
10379 else
10380 parg = arg1, marg = arg0;
10381 pcode = TREE_CODE (parg);
10382 parg0 = TREE_OPERAND (parg, 0);
10383 parg1 = TREE_OPERAND (parg, 1);
10384 STRIP_NOPS (parg0);
10385 STRIP_NOPS (parg1);
10387 if (TREE_CODE (parg0) == MULT_EXPR
10388 && TREE_CODE (parg1) != MULT_EXPR)
10389 return fold_build2_loc (loc, pcode, type,
10390 fold_build2_loc (loc, PLUS_EXPR, type,
10391 fold_convert_loc (loc, type,
10392 parg0),
10393 fold_convert_loc (loc, type,
10394 marg)),
10395 fold_convert_loc (loc, type, parg1));
10396 if (TREE_CODE (parg0) != MULT_EXPR
10397 && TREE_CODE (parg1) == MULT_EXPR)
10398 return
10399 fold_build2_loc (loc, PLUS_EXPR, type,
10400 fold_convert_loc (loc, type, parg0),
10401 fold_build2_loc (loc, pcode, type,
10402 fold_convert_loc (loc, type, marg),
10403 fold_convert_loc (loc, type,
10404 parg1)));
10407 else
10409 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10410 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10411 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10413 /* Likewise if the operands are reversed. */
10414 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10415 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10417 /* Convert X + -C into X - C. */
10418 if (TREE_CODE (arg1) == REAL_CST
10419 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10421 tem = fold_negate_const (arg1, type);
10422 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10423 return fold_build2_loc (loc, MINUS_EXPR, type,
10424 fold_convert_loc (loc, type, arg0),
10425 fold_convert_loc (loc, type, tem));
10428 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10429 to __complex__ ( x, y ). This is not the same for SNaNs or
10430 if signed zeros are involved. */
10431 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10432 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10433 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10435 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10436 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10437 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10438 bool arg0rz = false, arg0iz = false;
10439 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10440 || (arg0i && (arg0iz = real_zerop (arg0i))))
10442 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10443 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10444 if (arg0rz && arg1i && real_zerop (arg1i))
10446 tree rp = arg1r ? arg1r
10447 : build1 (REALPART_EXPR, rtype, arg1);
10448 tree ip = arg0i ? arg0i
10449 : build1 (IMAGPART_EXPR, rtype, arg0);
10450 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10452 else if (arg0iz && arg1r && real_zerop (arg1r))
10454 tree rp = arg0r ? arg0r
10455 : build1 (REALPART_EXPR, rtype, arg0);
10456 tree ip = arg1i ? arg1i
10457 : build1 (IMAGPART_EXPR, rtype, arg1);
10458 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10463 if (flag_unsafe_math_optimizations
10464 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10465 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10466 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10467 return tem;
10469 /* Convert x+x into x*2.0. */
10470 if (operand_equal_p (arg0, arg1, 0)
10471 && SCALAR_FLOAT_TYPE_P (type))
10472 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10473 build_real (type, dconst2));
10475 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10476 We associate floats only if the user has specified
10477 -fassociative-math. */
10478 if (flag_associative_math
10479 && TREE_CODE (arg1) == PLUS_EXPR
10480 && TREE_CODE (arg0) != MULT_EXPR)
10482 tree tree10 = TREE_OPERAND (arg1, 0);
10483 tree tree11 = TREE_OPERAND (arg1, 1);
10484 if (TREE_CODE (tree11) == MULT_EXPR
10485 && TREE_CODE (tree10) == MULT_EXPR)
10487 tree tree0;
10488 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10489 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10492 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10493 We associate floats only if the user has specified
10494 -fassociative-math. */
10495 if (flag_associative_math
10496 && TREE_CODE (arg0) == PLUS_EXPR
10497 && TREE_CODE (arg1) != MULT_EXPR)
10499 tree tree00 = TREE_OPERAND (arg0, 0);
10500 tree tree01 = TREE_OPERAND (arg0, 1);
10501 if (TREE_CODE (tree01) == MULT_EXPR
10502 && TREE_CODE (tree00) == MULT_EXPR)
10504 tree tree0;
10505 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10506 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10511 bit_rotate:
10512 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10513 is a rotate of A by C1 bits. */
10514 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10515 is a rotate of A by B bits. */
10517 enum tree_code code0, code1;
10518 tree rtype;
10519 code0 = TREE_CODE (arg0);
10520 code1 = TREE_CODE (arg1);
10521 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10522 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10523 && operand_equal_p (TREE_OPERAND (arg0, 0),
10524 TREE_OPERAND (arg1, 0), 0)
10525 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10526 TYPE_UNSIGNED (rtype))
10527 /* Only create rotates in complete modes. Other cases are not
10528 expanded properly. */
10529 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10531 tree tree01, tree11;
10532 enum tree_code code01, code11;
10534 tree01 = TREE_OPERAND (arg0, 1);
10535 tree11 = TREE_OPERAND (arg1, 1);
10536 STRIP_NOPS (tree01);
10537 STRIP_NOPS (tree11);
10538 code01 = TREE_CODE (tree01);
10539 code11 = TREE_CODE (tree11);
10540 if (code01 == INTEGER_CST
10541 && code11 == INTEGER_CST
10542 && TREE_INT_CST_HIGH (tree01) == 0
10543 && TREE_INT_CST_HIGH (tree11) == 0
10544 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10545 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10547 tem = build2 (LROTATE_EXPR,
10548 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10549 TREE_OPERAND (arg0, 0),
10550 code0 == LSHIFT_EXPR
10551 ? tree01 : tree11);
10552 SET_EXPR_LOCATION (tem, loc);
10553 return fold_convert_loc (loc, type, tem);
10555 else if (code11 == MINUS_EXPR)
10557 tree tree110, tree111;
10558 tree110 = TREE_OPERAND (tree11, 0);
10559 tree111 = TREE_OPERAND (tree11, 1);
10560 STRIP_NOPS (tree110);
10561 STRIP_NOPS (tree111);
10562 if (TREE_CODE (tree110) == INTEGER_CST
10563 && 0 == compare_tree_int (tree110,
10564 TYPE_PRECISION
10565 (TREE_TYPE (TREE_OPERAND
10566 (arg0, 0))))
10567 && operand_equal_p (tree01, tree111, 0))
10568 return
10569 fold_convert_loc (loc, type,
10570 build2 ((code0 == LSHIFT_EXPR
10571 ? LROTATE_EXPR
10572 : RROTATE_EXPR),
10573 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10574 TREE_OPERAND (arg0, 0), tree01));
10576 else if (code01 == MINUS_EXPR)
10578 tree tree010, tree011;
10579 tree010 = TREE_OPERAND (tree01, 0);
10580 tree011 = TREE_OPERAND (tree01, 1);
10581 STRIP_NOPS (tree010);
10582 STRIP_NOPS (tree011);
10583 if (TREE_CODE (tree010) == INTEGER_CST
10584 && 0 == compare_tree_int (tree010,
10585 TYPE_PRECISION
10586 (TREE_TYPE (TREE_OPERAND
10587 (arg0, 0))))
10588 && operand_equal_p (tree11, tree011, 0))
10589 return fold_convert_loc
10590 (loc, type,
10591 build2 ((code0 != LSHIFT_EXPR
10592 ? LROTATE_EXPR
10593 : RROTATE_EXPR),
10594 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10595 TREE_OPERAND (arg0, 0), tree11));
10600 associate:
10601 /* In most languages, can't associate operations on floats through
10602 parentheses. Rather than remember where the parentheses were, we
10603 don't associate floats at all, unless the user has specified
10604 -fassociative-math.
10605 And, we need to make sure type is not saturating. */
10607 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10608 && !TYPE_SATURATING (type))
10610 tree var0, con0, lit0, minus_lit0;
10611 tree var1, con1, lit1, minus_lit1;
10612 bool ok = true;
10614 /* Split both trees into variables, constants, and literals. Then
10615 associate each group together, the constants with literals,
10616 then the result with variables. This increases the chances of
10617 literals being recombined later and of generating relocatable
10618 expressions for the sum of a constant and literal. */
10619 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10620 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10621 code == MINUS_EXPR);
10623 /* With undefined overflow we can only associate constants
10624 with one variable. */
10625 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10626 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10627 && var0 && var1)
10629 tree tmp0 = var0;
10630 tree tmp1 = var1;
10632 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10633 tmp0 = TREE_OPERAND (tmp0, 0);
10634 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10635 tmp1 = TREE_OPERAND (tmp1, 0);
10636 /* The only case we can still associate with two variables
10637 is if they are the same, modulo negation. */
10638 if (!operand_equal_p (tmp0, tmp1, 0))
10639 ok = false;
10642 /* Only do something if we found more than two objects. Otherwise,
10643 nothing has changed and we risk infinite recursion. */
10644 if (ok
10645 && (2 < ((var0 != 0) + (var1 != 0)
10646 + (con0 != 0) + (con1 != 0)
10647 + (lit0 != 0) + (lit1 != 0)
10648 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10650 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10651 if (code == MINUS_EXPR)
10652 code = PLUS_EXPR;
10654 var0 = associate_trees (loc, var0, var1, code, type);
10655 con0 = associate_trees (loc, con0, con1, code, type);
10656 lit0 = associate_trees (loc, lit0, lit1, code, type);
10657 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10659 /* Preserve the MINUS_EXPR if the negative part of the literal is
10660 greater than the positive part. Otherwise, the multiplicative
10661 folding code (i.e extract_muldiv) may be fooled in case
10662 unsigned constants are subtracted, like in the following
10663 example: ((X*2 + 4) - 8U)/2. */
10664 if (minus_lit0 && lit0)
10666 if (TREE_CODE (lit0) == INTEGER_CST
10667 && TREE_CODE (minus_lit0) == INTEGER_CST
10668 && tree_int_cst_lt (lit0, minus_lit0))
10670 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10671 MINUS_EXPR, type);
10672 lit0 = 0;
10674 else
10676 lit0 = associate_trees (loc, lit0, minus_lit0,
10677 MINUS_EXPR, type);
10678 minus_lit0 = 0;
10681 if (minus_lit0)
10683 if (con0 == 0)
10684 return
10685 fold_convert_loc (loc, type,
10686 associate_trees (loc, var0, minus_lit0,
10687 MINUS_EXPR, type));
10688 else
10690 con0 = associate_trees (loc, con0, minus_lit0,
10691 MINUS_EXPR, type);
10692 return
10693 fold_convert_loc (loc, type,
10694 associate_trees (loc, var0, con0,
10695 PLUS_EXPR, type));
10699 con0 = associate_trees (loc, con0, lit0, code, type);
10700 return
10701 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10702 code, type));
10706 return NULL_TREE;
10708 case MINUS_EXPR:
10709 /* Pointer simplifications for subtraction, simple reassociations. */
10710 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10712 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10713 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10714 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10716 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10717 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10718 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10719 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10720 return fold_build2_loc (loc, PLUS_EXPR, type,
10721 fold_build2_loc (loc, MINUS_EXPR, type,
10722 arg00, arg10),
10723 fold_build2_loc (loc, MINUS_EXPR, type,
10724 arg01, arg11));
10726 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10727 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10729 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10730 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10731 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10732 fold_convert_loc (loc, type, arg1));
10733 if (tmp)
10734 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10737 /* A - (-B) -> A + B */
10738 if (TREE_CODE (arg1) == NEGATE_EXPR)
10739 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10740 fold_convert_loc (loc, type,
10741 TREE_OPERAND (arg1, 0)));
10742 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10743 if (TREE_CODE (arg0) == NEGATE_EXPR
10744 && (FLOAT_TYPE_P (type)
10745 || INTEGRAL_TYPE_P (type))
10746 && negate_expr_p (arg1)
10747 && reorder_operands_p (arg0, arg1))
10748 return fold_build2_loc (loc, MINUS_EXPR, type,
10749 fold_convert_loc (loc, type,
10750 negate_expr (arg1)),
10751 fold_convert_loc (loc, type,
10752 TREE_OPERAND (arg0, 0)));
10753 /* Convert -A - 1 to ~A. */
10754 if (INTEGRAL_TYPE_P (type)
10755 && TREE_CODE (arg0) == NEGATE_EXPR
10756 && integer_onep (arg1)
10757 && !TYPE_OVERFLOW_TRAPS (type))
10758 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10759 fold_convert_loc (loc, type,
10760 TREE_OPERAND (arg0, 0)));
10762 /* Convert -1 - A to ~A. */
10763 if (INTEGRAL_TYPE_P (type)
10764 && integer_all_onesp (arg0))
10765 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10768 /* X - (X / CST) * CST is X % CST. */
10769 if (INTEGRAL_TYPE_P (type)
10770 && TREE_CODE (arg1) == MULT_EXPR
10771 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10772 && operand_equal_p (arg0,
10773 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10774 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10775 TREE_OPERAND (arg1, 1), 0))
10776 return
10777 fold_convert_loc (loc, type,
10778 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10779 arg0, TREE_OPERAND (arg1, 1)));
10781 if (! FLOAT_TYPE_P (type))
10783 if (integer_zerop (arg0))
10784 return negate_expr (fold_convert_loc (loc, type, arg1));
10785 if (integer_zerop (arg1))
10786 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10788 /* Fold A - (A & B) into ~B & A. */
10789 if (!TREE_SIDE_EFFECTS (arg0)
10790 && TREE_CODE (arg1) == BIT_AND_EXPR)
10792 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10794 tree arg10 = fold_convert_loc (loc, type,
10795 TREE_OPERAND (arg1, 0));
10796 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10797 fold_build1_loc (loc, BIT_NOT_EXPR,
10798 type, arg10),
10799 fold_convert_loc (loc, type, arg0));
10801 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10803 tree arg11 = fold_convert_loc (loc,
10804 type, TREE_OPERAND (arg1, 1));
10805 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10806 fold_build1_loc (loc, BIT_NOT_EXPR,
10807 type, arg11),
10808 fold_convert_loc (loc, type, arg0));
10812 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10813 any power of 2 minus 1. */
10814 if (TREE_CODE (arg0) == BIT_AND_EXPR
10815 && TREE_CODE (arg1) == BIT_AND_EXPR
10816 && operand_equal_p (TREE_OPERAND (arg0, 0),
10817 TREE_OPERAND (arg1, 0), 0))
10819 tree mask0 = TREE_OPERAND (arg0, 1);
10820 tree mask1 = TREE_OPERAND (arg1, 1);
10821 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10823 if (operand_equal_p (tem, mask1, 0))
10825 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10826 TREE_OPERAND (arg0, 0), mask1);
10827 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10832 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10833 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10834 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10836 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10837 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10838 (-ARG1 + ARG0) reduces to -ARG1. */
10839 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10840 return negate_expr (fold_convert_loc (loc, type, arg1));
10842 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10843 __complex__ ( x, -y ). This is not the same for SNaNs or if
10844 signed zeros are involved. */
10845 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10846 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10847 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10849 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10850 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10851 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10852 bool arg0rz = false, arg0iz = false;
10853 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10854 || (arg0i && (arg0iz = real_zerop (arg0i))))
10856 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10857 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10858 if (arg0rz && arg1i && real_zerop (arg1i))
10860 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10861 arg1r ? arg1r
10862 : build1 (REALPART_EXPR, rtype, arg1));
10863 tree ip = arg0i ? arg0i
10864 : build1 (IMAGPART_EXPR, rtype, arg0);
10865 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10867 else if (arg0iz && arg1r && real_zerop (arg1r))
10869 tree rp = arg0r ? arg0r
10870 : build1 (REALPART_EXPR, rtype, arg0);
10871 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10872 arg1i ? arg1i
10873 : build1 (IMAGPART_EXPR, rtype, arg1));
10874 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10879 /* Fold &x - &x. This can happen from &x.foo - &x.
10880 This is unsafe for certain floats even in non-IEEE formats.
10881 In IEEE, it is unsafe because it does wrong for NaNs.
10882 Also note that operand_equal_p is always false if an operand
10883 is volatile. */
10885 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10886 && operand_equal_p (arg0, arg1, 0))
10887 return fold_convert_loc (loc, type, integer_zero_node);
10889 /* A - B -> A + (-B) if B is easily negatable. */
10890 if (negate_expr_p (arg1)
10891 && ((FLOAT_TYPE_P (type)
10892 /* Avoid this transformation if B is a positive REAL_CST. */
10893 && (TREE_CODE (arg1) != REAL_CST
10894 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10895 || INTEGRAL_TYPE_P (type)))
10896 return fold_build2_loc (loc, PLUS_EXPR, type,
10897 fold_convert_loc (loc, type, arg0),
10898 fold_convert_loc (loc, type,
10899 negate_expr (arg1)));
10901 /* Try folding difference of addresses. */
10903 HOST_WIDE_INT diff;
10905 if ((TREE_CODE (arg0) == ADDR_EXPR
10906 || TREE_CODE (arg1) == ADDR_EXPR)
10907 && ptr_difference_const (arg0, arg1, &diff))
10908 return build_int_cst_type (type, diff);
10911 /* Fold &a[i] - &a[j] to i-j. */
10912 if (TREE_CODE (arg0) == ADDR_EXPR
10913 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10914 && TREE_CODE (arg1) == ADDR_EXPR
10915 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10917 tree aref0 = TREE_OPERAND (arg0, 0);
10918 tree aref1 = TREE_OPERAND (arg1, 0);
10919 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10920 TREE_OPERAND (aref1, 0), 0))
10922 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10923 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10924 tree esz = array_ref_element_size (aref0);
10925 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10926 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10927 fold_convert_loc (loc, type, esz));
10932 if (FLOAT_TYPE_P (type)
10933 && flag_unsafe_math_optimizations
10934 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10935 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10936 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10937 return tem;
10939 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10940 same or one. Make sure type is not saturating.
10941 fold_plusminus_mult_expr will re-associate. */
10942 if ((TREE_CODE (arg0) == MULT_EXPR
10943 || TREE_CODE (arg1) == MULT_EXPR)
10944 && !TYPE_SATURATING (type)
10945 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10947 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10948 if (tem)
10949 return tem;
10952 goto associate;
10954 case MULT_EXPR:
10955 /* (-A) * (-B) -> A * B */
10956 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10957 return fold_build2_loc (loc, MULT_EXPR, type,
10958 fold_convert_loc (loc, type,
10959 TREE_OPERAND (arg0, 0)),
10960 fold_convert_loc (loc, type,
10961 negate_expr (arg1)));
10962 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10963 return fold_build2_loc (loc, MULT_EXPR, type,
10964 fold_convert_loc (loc, type,
10965 negate_expr (arg0)),
10966 fold_convert_loc (loc, type,
10967 TREE_OPERAND (arg1, 0)));
10969 if (! FLOAT_TYPE_P (type))
10971 if (integer_zerop (arg1))
10972 return omit_one_operand_loc (loc, type, arg1, arg0);
10973 if (integer_onep (arg1))
10974 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10975 /* Transform x * -1 into -x. Make sure to do the negation
10976 on the original operand with conversions not stripped
10977 because we can only strip non-sign-changing conversions. */
10978 if (integer_all_onesp (arg1))
10979 return fold_convert_loc (loc, type, negate_expr (op0));
10980 /* Transform x * -C into -x * C if x is easily negatable. */
10981 if (TREE_CODE (arg1) == INTEGER_CST
10982 && tree_int_cst_sgn (arg1) == -1
10983 && negate_expr_p (arg0)
10984 && (tem = negate_expr (arg1)) != arg1
10985 && !TREE_OVERFLOW (tem))
10986 return fold_build2_loc (loc, MULT_EXPR, type,
10987 fold_convert_loc (loc, type,
10988 negate_expr (arg0)),
10989 tem);
10991 /* (a * (1 << b)) is (a << b) */
10992 if (TREE_CODE (arg1) == LSHIFT_EXPR
10993 && integer_onep (TREE_OPERAND (arg1, 0)))
10994 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10995 TREE_OPERAND (arg1, 1));
10996 if (TREE_CODE (arg0) == LSHIFT_EXPR
10997 && integer_onep (TREE_OPERAND (arg0, 0)))
10998 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10999 TREE_OPERAND (arg0, 1));
11001 /* (A + A) * C -> A * 2 * C */
11002 if (TREE_CODE (arg0) == PLUS_EXPR
11003 && TREE_CODE (arg1) == INTEGER_CST
11004 && operand_equal_p (TREE_OPERAND (arg0, 0),
11005 TREE_OPERAND (arg0, 1), 0))
11006 return fold_build2_loc (loc, MULT_EXPR, type,
11007 omit_one_operand_loc (loc, type,
11008 TREE_OPERAND (arg0, 0),
11009 TREE_OPERAND (arg0, 1)),
11010 fold_build2_loc (loc, MULT_EXPR, type,
11011 build_int_cst (type, 2) , arg1));
11013 strict_overflow_p = false;
11014 if (TREE_CODE (arg1) == INTEGER_CST
11015 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11016 &strict_overflow_p)))
11018 if (strict_overflow_p)
11019 fold_overflow_warning (("assuming signed overflow does not "
11020 "occur when simplifying "
11021 "multiplication"),
11022 WARN_STRICT_OVERFLOW_MISC);
11023 return fold_convert_loc (loc, type, tem);
11026 /* Optimize z * conj(z) for integer complex numbers. */
11027 if (TREE_CODE (arg0) == CONJ_EXPR
11028 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11029 return fold_mult_zconjz (loc, type, arg1);
11030 if (TREE_CODE (arg1) == CONJ_EXPR
11031 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11032 return fold_mult_zconjz (loc, type, arg0);
11034 else
11036 /* Maybe fold x * 0 to 0. The expressions aren't the same
11037 when x is NaN, since x * 0 is also NaN. Nor are they the
11038 same in modes with signed zeros, since multiplying a
11039 negative value by 0 gives -0, not +0. */
11040 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11041 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11042 && real_zerop (arg1))
11043 return omit_one_operand_loc (loc, type, arg1, arg0);
11044 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11045 Likewise for complex arithmetic with signed zeros. */
11046 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11047 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11048 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11049 && real_onep (arg1))
11050 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11052 /* Transform x * -1.0 into -x. */
11053 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11054 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11055 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11056 && real_minus_onep (arg1))
11057 return fold_convert_loc (loc, type, negate_expr (arg0));
11059 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11060 the result for floating point types due to rounding so it is applied
11061 only if -fassociative-math was specify. */
11062 if (flag_associative_math
11063 && TREE_CODE (arg0) == RDIV_EXPR
11064 && TREE_CODE (arg1) == REAL_CST
11065 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11067 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11068 arg1, 0);
11069 if (tem)
11070 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11071 TREE_OPERAND (arg0, 1));
11074 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11075 if (operand_equal_p (arg0, arg1, 0))
11077 tree tem = fold_strip_sign_ops (arg0);
11078 if (tem != NULL_TREE)
11080 tem = fold_convert_loc (loc, type, tem);
11081 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11085 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11086 This is not the same for NaNs or if signed zeros are
11087 involved. */
11088 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11089 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11090 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11091 && TREE_CODE (arg1) == COMPLEX_CST
11092 && real_zerop (TREE_REALPART (arg1)))
11094 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11095 if (real_onep (TREE_IMAGPART (arg1)))
11096 return
11097 fold_build2_loc (loc, COMPLEX_EXPR, type,
11098 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11099 rtype, arg0)),
11100 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11101 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11102 return
11103 fold_build2_loc (loc, COMPLEX_EXPR, type,
11104 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11105 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11106 rtype, arg0)));
11109 /* Optimize z * conj(z) for floating point complex numbers.
11110 Guarded by flag_unsafe_math_optimizations as non-finite
11111 imaginary components don't produce scalar results. */
11112 if (flag_unsafe_math_optimizations
11113 && TREE_CODE (arg0) == CONJ_EXPR
11114 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11115 return fold_mult_zconjz (loc, type, arg1);
11116 if (flag_unsafe_math_optimizations
11117 && TREE_CODE (arg1) == CONJ_EXPR
11118 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11119 return fold_mult_zconjz (loc, type, arg0);
11121 if (flag_unsafe_math_optimizations)
11123 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11124 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11126 /* Optimizations of root(...)*root(...). */
11127 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11129 tree rootfn, arg;
11130 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11131 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11133 /* Optimize sqrt(x)*sqrt(x) as x. */
11134 if (BUILTIN_SQRT_P (fcode0)
11135 && operand_equal_p (arg00, arg10, 0)
11136 && ! HONOR_SNANS (TYPE_MODE (type)))
11137 return arg00;
11139 /* Optimize root(x)*root(y) as root(x*y). */
11140 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11141 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11142 return build_call_expr_loc (loc, rootfn, 1, arg);
11145 /* Optimize expN(x)*expN(y) as expN(x+y). */
11146 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11148 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11149 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11150 CALL_EXPR_ARG (arg0, 0),
11151 CALL_EXPR_ARG (arg1, 0));
11152 return build_call_expr_loc (loc, expfn, 1, arg);
11155 /* Optimizations of pow(...)*pow(...). */
11156 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11157 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11158 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11160 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11161 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11162 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11163 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11165 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11166 if (operand_equal_p (arg01, arg11, 0))
11168 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11169 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11170 arg00, arg10);
11171 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11174 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11175 if (operand_equal_p (arg00, arg10, 0))
11177 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11178 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11179 arg01, arg11);
11180 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11184 /* Optimize tan(x)*cos(x) as sin(x). */
11185 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11186 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11187 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11188 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11189 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11190 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11191 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11192 CALL_EXPR_ARG (arg1, 0), 0))
11194 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11196 if (sinfn != NULL_TREE)
11197 return build_call_expr_loc (loc, sinfn, 1,
11198 CALL_EXPR_ARG (arg0, 0));
11201 /* Optimize x*pow(x,c) as pow(x,c+1). */
11202 if (fcode1 == BUILT_IN_POW
11203 || fcode1 == BUILT_IN_POWF
11204 || fcode1 == BUILT_IN_POWL)
11206 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11207 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11208 if (TREE_CODE (arg11) == REAL_CST
11209 && !TREE_OVERFLOW (arg11)
11210 && operand_equal_p (arg0, arg10, 0))
11212 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11213 REAL_VALUE_TYPE c;
11214 tree arg;
11216 c = TREE_REAL_CST (arg11);
11217 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11218 arg = build_real (type, c);
11219 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11223 /* Optimize pow(x,c)*x as pow(x,c+1). */
11224 if (fcode0 == BUILT_IN_POW
11225 || fcode0 == BUILT_IN_POWF
11226 || fcode0 == BUILT_IN_POWL)
11228 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11229 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11230 if (TREE_CODE (arg01) == REAL_CST
11231 && !TREE_OVERFLOW (arg01)
11232 && operand_equal_p (arg1, arg00, 0))
11234 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11235 REAL_VALUE_TYPE c;
11236 tree arg;
11238 c = TREE_REAL_CST (arg01);
11239 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11240 arg = build_real (type, c);
11241 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11245 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
11246 if (optimize_function_for_speed_p (cfun)
11247 && operand_equal_p (arg0, arg1, 0))
11249 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11251 if (powfn)
11253 tree arg = build_real (type, dconst2);
11254 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11259 goto associate;
11261 case BIT_IOR_EXPR:
11262 bit_ior:
11263 if (integer_all_onesp (arg1))
11264 return omit_one_operand_loc (loc, type, arg1, arg0);
11265 if (integer_zerop (arg1))
11266 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11267 if (operand_equal_p (arg0, arg1, 0))
11268 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11270 /* ~X | X is -1. */
11271 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11272 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11274 t1 = fold_convert_loc (loc, type, integer_zero_node);
11275 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11276 return omit_one_operand_loc (loc, type, t1, arg1);
11279 /* X | ~X is -1. */
11280 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11281 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11283 t1 = fold_convert_loc (loc, type, integer_zero_node);
11284 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11285 return omit_one_operand_loc (loc, type, t1, arg0);
11288 /* Canonicalize (X & C1) | C2. */
11289 if (TREE_CODE (arg0) == BIT_AND_EXPR
11290 && TREE_CODE (arg1) == INTEGER_CST
11291 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11293 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
11294 int width = TYPE_PRECISION (type), w;
11295 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
11296 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11297 hi2 = TREE_INT_CST_HIGH (arg1);
11298 lo2 = TREE_INT_CST_LOW (arg1);
11300 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11301 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
11302 return omit_one_operand_loc (loc, type, arg1,
11303 TREE_OPERAND (arg0, 0));
11305 if (width > HOST_BITS_PER_WIDE_INT)
11307 mhi = (unsigned HOST_WIDE_INT) -1
11308 >> (2 * HOST_BITS_PER_WIDE_INT - width);
11309 mlo = -1;
11311 else
11313 mhi = 0;
11314 mlo = (unsigned HOST_WIDE_INT) -1
11315 >> (HOST_BITS_PER_WIDE_INT - width);
11318 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11319 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
11320 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11321 TREE_OPERAND (arg0, 0), arg1);
11323 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11324 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11325 mode which allows further optimizations. */
11326 hi1 &= mhi;
11327 lo1 &= mlo;
11328 hi2 &= mhi;
11329 lo2 &= mlo;
11330 hi3 = hi1 & ~hi2;
11331 lo3 = lo1 & ~lo2;
11332 for (w = BITS_PER_UNIT;
11333 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11334 w <<= 1)
11336 unsigned HOST_WIDE_INT mask
11337 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11338 if (((lo1 | lo2) & mask) == mask
11339 && (lo1 & ~mask) == 0 && hi1 == 0)
11341 hi3 = 0;
11342 lo3 = mask;
11343 break;
11346 if (hi3 != hi1 || lo3 != lo1)
11347 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11348 fold_build2_loc (loc, BIT_AND_EXPR, type,
11349 TREE_OPERAND (arg0, 0),
11350 build_int_cst_wide (type,
11351 lo3, hi3)),
11352 arg1);
11355 /* (X & Y) | Y is (X, Y). */
11356 if (TREE_CODE (arg0) == BIT_AND_EXPR
11357 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11358 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11359 /* (X & Y) | X is (Y, X). */
11360 if (TREE_CODE (arg0) == BIT_AND_EXPR
11361 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11362 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11363 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11364 /* X | (X & Y) is (Y, X). */
11365 if (TREE_CODE (arg1) == BIT_AND_EXPR
11366 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11367 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11368 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11369 /* X | (Y & X) is (Y, X). */
11370 if (TREE_CODE (arg1) == BIT_AND_EXPR
11371 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11372 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11373 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11375 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11376 if (t1 != NULL_TREE)
11377 return t1;
11379 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11381 This results in more efficient code for machines without a NAND
11382 instruction. Combine will canonicalize to the first form
11383 which will allow use of NAND instructions provided by the
11384 backend if they exist. */
11385 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11386 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11388 return
11389 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11390 build2 (BIT_AND_EXPR, type,
11391 fold_convert_loc (loc, type,
11392 TREE_OPERAND (arg0, 0)),
11393 fold_convert_loc (loc, type,
11394 TREE_OPERAND (arg1, 0))));
11397 /* See if this can be simplified into a rotate first. If that
11398 is unsuccessful continue in the association code. */
11399 goto bit_rotate;
11401 case BIT_XOR_EXPR:
11402 if (integer_zerop (arg1))
11403 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11404 if (integer_all_onesp (arg1))
11405 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11406 if (operand_equal_p (arg0, arg1, 0))
11407 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11409 /* ~X ^ X is -1. */
11410 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11411 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11413 t1 = fold_convert_loc (loc, type, integer_zero_node);
11414 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11415 return omit_one_operand_loc (loc, type, t1, arg1);
11418 /* X ^ ~X is -1. */
11419 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11420 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11422 t1 = fold_convert_loc (loc, type, integer_zero_node);
11423 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11424 return omit_one_operand_loc (loc, type, t1, arg0);
11427 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11428 with a constant, and the two constants have no bits in common,
11429 we should treat this as a BIT_IOR_EXPR since this may produce more
11430 simplifications. */
11431 if (TREE_CODE (arg0) == BIT_AND_EXPR
11432 && TREE_CODE (arg1) == BIT_AND_EXPR
11433 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11434 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11435 && integer_zerop (const_binop (BIT_AND_EXPR,
11436 TREE_OPERAND (arg0, 1),
11437 TREE_OPERAND (arg1, 1), 0)))
11439 code = BIT_IOR_EXPR;
11440 goto bit_ior;
11443 /* (X | Y) ^ X -> Y & ~ X*/
11444 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11445 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11447 tree t2 = TREE_OPERAND (arg0, 1);
11448 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11449 arg1);
11450 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11451 fold_convert_loc (loc, type, t2),
11452 fold_convert_loc (loc, type, t1));
11453 return t1;
11456 /* (Y | X) ^ X -> Y & ~ X*/
11457 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11458 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11460 tree t2 = TREE_OPERAND (arg0, 0);
11461 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11462 arg1);
11463 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11464 fold_convert_loc (loc, type, t2),
11465 fold_convert_loc (loc, type, t1));
11466 return t1;
11469 /* X ^ (X | Y) -> Y & ~ X*/
11470 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11471 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11473 tree t2 = TREE_OPERAND (arg1, 1);
11474 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11475 arg0);
11476 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11477 fold_convert_loc (loc, type, t2),
11478 fold_convert_loc (loc, type, t1));
11479 return t1;
11482 /* X ^ (Y | X) -> Y & ~ X*/
11483 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11484 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11486 tree t2 = TREE_OPERAND (arg1, 0);
11487 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11488 arg0);
11489 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11490 fold_convert_loc (loc, type, t2),
11491 fold_convert_loc (loc, type, t1));
11492 return t1;
11495 /* Convert ~X ^ ~Y to X ^ Y. */
11496 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11497 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11498 return fold_build2_loc (loc, code, type,
11499 fold_convert_loc (loc, type,
11500 TREE_OPERAND (arg0, 0)),
11501 fold_convert_loc (loc, type,
11502 TREE_OPERAND (arg1, 0)));
11504 /* Convert ~X ^ C to X ^ ~C. */
11505 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11506 && TREE_CODE (arg1) == INTEGER_CST)
11507 return fold_build2_loc (loc, code, type,
11508 fold_convert_loc (loc, type,
11509 TREE_OPERAND (arg0, 0)),
11510 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11512 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11513 if (TREE_CODE (arg0) == BIT_AND_EXPR
11514 && integer_onep (TREE_OPERAND (arg0, 1))
11515 && integer_onep (arg1))
11516 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11517 build_int_cst (TREE_TYPE (arg0), 0));
11519 /* Fold (X & Y) ^ Y as ~X & Y. */
11520 if (TREE_CODE (arg0) == BIT_AND_EXPR
11521 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11523 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11524 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11525 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11526 fold_convert_loc (loc, type, arg1));
11528 /* Fold (X & Y) ^ X as ~Y & X. */
11529 if (TREE_CODE (arg0) == BIT_AND_EXPR
11530 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11531 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11533 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11534 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11535 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11536 fold_convert_loc (loc, type, arg1));
11538 /* Fold X ^ (X & Y) as X & ~Y. */
11539 if (TREE_CODE (arg1) == BIT_AND_EXPR
11540 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11542 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11543 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11544 fold_convert_loc (loc, type, arg0),
11545 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11547 /* Fold X ^ (Y & X) as ~Y & X. */
11548 if (TREE_CODE (arg1) == BIT_AND_EXPR
11549 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11550 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11552 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11553 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11554 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11555 fold_convert_loc (loc, type, arg0));
11558 /* See if this can be simplified into a rotate first. If that
11559 is unsuccessful continue in the association code. */
11560 goto bit_rotate;
11562 case BIT_AND_EXPR:
11563 if (integer_all_onesp (arg1))
11564 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11565 if (integer_zerop (arg1))
11566 return omit_one_operand_loc (loc, type, arg1, arg0);
11567 if (operand_equal_p (arg0, arg1, 0))
11568 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11570 /* ~X & X is always zero. */
11571 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11572 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11573 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11575 /* X & ~X is always zero. */
11576 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11577 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11578 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11580 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11581 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11582 && TREE_CODE (arg1) == INTEGER_CST
11583 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11585 tree tmp1 = fold_convert_loc (loc, type, arg1);
11586 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11587 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11588 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11589 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11590 return
11591 fold_convert_loc (loc, type,
11592 fold_build2_loc (loc, BIT_IOR_EXPR,
11593 type, tmp2, tmp3));
11596 /* (X | Y) & Y is (X, Y). */
11597 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11598 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11599 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11600 /* (X | Y) & X is (Y, X). */
11601 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11602 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11603 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11604 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11605 /* X & (X | Y) is (Y, X). */
11606 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11607 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11608 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11609 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11610 /* X & (Y | X) is (Y, X). */
11611 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11612 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11613 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11614 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11616 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11617 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11618 && integer_onep (TREE_OPERAND (arg0, 1))
11619 && integer_onep (arg1))
11621 tem = TREE_OPERAND (arg0, 0);
11622 return fold_build2_loc (loc, EQ_EXPR, type,
11623 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11624 build_int_cst (TREE_TYPE (tem), 1)),
11625 build_int_cst (TREE_TYPE (tem), 0));
11627 /* Fold ~X & 1 as (X & 1) == 0. */
11628 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11629 && integer_onep (arg1))
11631 tem = TREE_OPERAND (arg0, 0);
11632 return fold_build2_loc (loc, EQ_EXPR, type,
11633 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11634 build_int_cst (TREE_TYPE (tem), 1)),
11635 build_int_cst (TREE_TYPE (tem), 0));
11638 /* Fold (X ^ Y) & Y as ~X & Y. */
11639 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11640 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11642 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11643 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11644 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11645 fold_convert_loc (loc, type, arg1));
11647 /* Fold (X ^ Y) & X as ~Y & X. */
11648 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11649 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11650 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11652 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11653 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11654 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11655 fold_convert_loc (loc, type, arg1));
11657 /* Fold X & (X ^ Y) as X & ~Y. */
11658 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11659 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11661 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11662 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11663 fold_convert_loc (loc, type, arg0),
11664 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11666 /* Fold X & (Y ^ X) as ~Y & X. */
11667 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11668 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11669 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11671 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11672 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11673 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11674 fold_convert_loc (loc, type, arg0));
11677 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11678 if (t1 != NULL_TREE)
11679 return t1;
11680 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11681 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11682 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11684 unsigned int prec
11685 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11687 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11688 && (~TREE_INT_CST_LOW (arg1)
11689 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11690 return
11691 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11694 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11696 This results in more efficient code for machines without a NOR
11697 instruction. Combine will canonicalize to the first form
11698 which will allow use of NOR instructions provided by the
11699 backend if they exist. */
11700 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11701 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11703 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11704 build2 (BIT_IOR_EXPR, type,
11705 fold_convert_loc (loc, type,
11706 TREE_OPERAND (arg0, 0)),
11707 fold_convert_loc (loc, type,
11708 TREE_OPERAND (arg1, 0))));
11711 /* If arg0 is derived from the address of an object or function, we may
11712 be able to fold this expression using the object or function's
11713 alignment. */
11714 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11716 unsigned HOST_WIDE_INT modulus, residue;
11717 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11719 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11720 integer_onep (arg1));
11722 /* This works because modulus is a power of 2. If this weren't the
11723 case, we'd have to replace it by its greatest power-of-2
11724 divisor: modulus & -modulus. */
11725 if (low < modulus)
11726 return build_int_cst (type, residue & low);
11729 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11730 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11731 if the new mask might be further optimized. */
11732 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11733 || TREE_CODE (arg0) == RSHIFT_EXPR)
11734 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11735 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11736 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11737 < TYPE_PRECISION (TREE_TYPE (arg0))
11738 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11739 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11741 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11742 unsigned HOST_WIDE_INT mask
11743 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11744 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11745 tree shift_type = TREE_TYPE (arg0);
11747 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11748 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11749 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11750 && TYPE_PRECISION (TREE_TYPE (arg0))
11751 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11753 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11754 tree arg00 = TREE_OPERAND (arg0, 0);
11755 /* See if more bits can be proven as zero because of
11756 zero extension. */
11757 if (TREE_CODE (arg00) == NOP_EXPR
11758 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11760 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11761 if (TYPE_PRECISION (inner_type)
11762 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11763 && TYPE_PRECISION (inner_type) < prec)
11765 prec = TYPE_PRECISION (inner_type);
11766 /* See if we can shorten the right shift. */
11767 if (shiftc < prec)
11768 shift_type = inner_type;
11771 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11772 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11773 zerobits <<= prec - shiftc;
11774 /* For arithmetic shift if sign bit could be set, zerobits
11775 can contain actually sign bits, so no transformation is
11776 possible, unless MASK masks them all away. In that
11777 case the shift needs to be converted into logical shift. */
11778 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11779 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11781 if ((mask & zerobits) == 0)
11782 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11783 else
11784 zerobits = 0;
11788 /* ((X << 16) & 0xff00) is (X, 0). */
11789 if ((mask & zerobits) == mask)
11790 return omit_one_operand_loc (loc, type,
11791 build_int_cst (type, 0), arg0);
11793 newmask = mask | zerobits;
11794 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11796 unsigned int prec;
11798 /* Only do the transformation if NEWMASK is some integer
11799 mode's mask. */
11800 for (prec = BITS_PER_UNIT;
11801 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11802 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11803 break;
11804 if (prec < HOST_BITS_PER_WIDE_INT
11805 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11807 tree newmaskt;
11809 if (shift_type != TREE_TYPE (arg0))
11811 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11812 fold_convert_loc (loc, shift_type,
11813 TREE_OPERAND (arg0, 0)),
11814 TREE_OPERAND (arg0, 1));
11815 tem = fold_convert_loc (loc, type, tem);
11817 else
11818 tem = op0;
11819 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11820 if (!tree_int_cst_equal (newmaskt, arg1))
11821 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11826 goto associate;
11828 case RDIV_EXPR:
11829 /* Don't touch a floating-point divide by zero unless the mode
11830 of the constant can represent infinity. */
11831 if (TREE_CODE (arg1) == REAL_CST
11832 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11833 && real_zerop (arg1))
11834 return NULL_TREE;
11836 /* Optimize A / A to 1.0 if we don't care about
11837 NaNs or Infinities. Skip the transformation
11838 for non-real operands. */
11839 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11840 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11841 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11842 && operand_equal_p (arg0, arg1, 0))
11844 tree r = build_real (TREE_TYPE (arg0), dconst1);
11846 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11849 /* The complex version of the above A / A optimization. */
11850 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11851 && operand_equal_p (arg0, arg1, 0))
11853 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11854 if (! HONOR_NANS (TYPE_MODE (elem_type))
11855 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11857 tree r = build_real (elem_type, dconst1);
11858 /* omit_two_operands will call fold_convert for us. */
11859 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11863 /* (-A) / (-B) -> A / B */
11864 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11865 return fold_build2_loc (loc, RDIV_EXPR, type,
11866 TREE_OPERAND (arg0, 0),
11867 negate_expr (arg1));
11868 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11869 return fold_build2_loc (loc, RDIV_EXPR, type,
11870 negate_expr (arg0),
11871 TREE_OPERAND (arg1, 0));
11873 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11874 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11875 && real_onep (arg1))
11876 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11878 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11879 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11880 && real_minus_onep (arg1))
11881 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11882 negate_expr (arg0)));
11884 /* If ARG1 is a constant, we can convert this to a multiply by the
11885 reciprocal. This does not have the same rounding properties,
11886 so only do this if -freciprocal-math. We can actually
11887 always safely do it if ARG1 is a power of two, but it's hard to
11888 tell if it is or not in a portable manner. */
11889 if (TREE_CODE (arg1) == REAL_CST)
11891 if (flag_reciprocal_math
11892 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11893 arg1, 0)))
11894 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11895 /* Find the reciprocal if optimizing and the result is exact. */
11896 if (optimize)
11898 REAL_VALUE_TYPE r;
11899 r = TREE_REAL_CST (arg1);
11900 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11902 tem = build_real (type, r);
11903 return fold_build2_loc (loc, MULT_EXPR, type,
11904 fold_convert_loc (loc, type, arg0), tem);
11908 /* Convert A/B/C to A/(B*C). */
11909 if (flag_reciprocal_math
11910 && TREE_CODE (arg0) == RDIV_EXPR)
11911 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11912 fold_build2_loc (loc, MULT_EXPR, type,
11913 TREE_OPERAND (arg0, 1), arg1));
11915 /* Convert A/(B/C) to (A/B)*C. */
11916 if (flag_reciprocal_math
11917 && TREE_CODE (arg1) == RDIV_EXPR)
11918 return fold_build2_loc (loc, MULT_EXPR, type,
11919 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11920 TREE_OPERAND (arg1, 0)),
11921 TREE_OPERAND (arg1, 1));
11923 /* Convert C1/(X*C2) into (C1/C2)/X. */
11924 if (flag_reciprocal_math
11925 && TREE_CODE (arg1) == MULT_EXPR
11926 && TREE_CODE (arg0) == REAL_CST
11927 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11929 tree tem = const_binop (RDIV_EXPR, arg0,
11930 TREE_OPERAND (arg1, 1), 0);
11931 if (tem)
11932 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11933 TREE_OPERAND (arg1, 0));
11936 if (flag_unsafe_math_optimizations)
11938 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11939 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11941 /* Optimize sin(x)/cos(x) as tan(x). */
11942 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11943 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11944 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11945 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11946 CALL_EXPR_ARG (arg1, 0), 0))
11948 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11950 if (tanfn != NULL_TREE)
11951 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11954 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11955 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11956 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11957 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11958 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11959 CALL_EXPR_ARG (arg1, 0), 0))
11961 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11963 if (tanfn != NULL_TREE)
11965 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11966 CALL_EXPR_ARG (arg0, 0));
11967 return fold_build2_loc (loc, RDIV_EXPR, type,
11968 build_real (type, dconst1), tmp);
11972 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11973 NaNs or Infinities. */
11974 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11975 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11976 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11978 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11979 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11981 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11982 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11983 && operand_equal_p (arg00, arg01, 0))
11985 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11987 if (cosfn != NULL_TREE)
11988 return build_call_expr_loc (loc, cosfn, 1, arg00);
11992 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11993 NaNs or Infinities. */
11994 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11995 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11996 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11998 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11999 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12001 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12002 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12003 && operand_equal_p (arg00, arg01, 0))
12005 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12007 if (cosfn != NULL_TREE)
12009 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12010 return fold_build2_loc (loc, RDIV_EXPR, type,
12011 build_real (type, dconst1),
12012 tmp);
12017 /* Optimize pow(x,c)/x as pow(x,c-1). */
12018 if (fcode0 == BUILT_IN_POW
12019 || fcode0 == BUILT_IN_POWF
12020 || fcode0 == BUILT_IN_POWL)
12022 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12023 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12024 if (TREE_CODE (arg01) == REAL_CST
12025 && !TREE_OVERFLOW (arg01)
12026 && operand_equal_p (arg1, arg00, 0))
12028 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12029 REAL_VALUE_TYPE c;
12030 tree arg;
12032 c = TREE_REAL_CST (arg01);
12033 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12034 arg = build_real (type, c);
12035 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12039 /* Optimize a/root(b/c) into a*root(c/b). */
12040 if (BUILTIN_ROOT_P (fcode1))
12042 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12044 if (TREE_CODE (rootarg) == RDIV_EXPR)
12046 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12047 tree b = TREE_OPERAND (rootarg, 0);
12048 tree c = TREE_OPERAND (rootarg, 1);
12050 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12052 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12053 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12057 /* Optimize x/expN(y) into x*expN(-y). */
12058 if (BUILTIN_EXPONENT_P (fcode1))
12060 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12061 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12062 arg1 = build_call_expr_loc (loc,
12063 expfn, 1,
12064 fold_convert_loc (loc, type, arg));
12065 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12068 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12069 if (fcode1 == BUILT_IN_POW
12070 || fcode1 == BUILT_IN_POWF
12071 || fcode1 == BUILT_IN_POWL)
12073 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12074 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12075 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12076 tree neg11 = fold_convert_loc (loc, type,
12077 negate_expr (arg11));
12078 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12079 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12082 return NULL_TREE;
12084 case TRUNC_DIV_EXPR:
12085 case FLOOR_DIV_EXPR:
12086 /* Simplify A / (B << N) where A and B are positive and B is
12087 a power of 2, to A >> (N + log2(B)). */
12088 strict_overflow_p = false;
12089 if (TREE_CODE (arg1) == LSHIFT_EXPR
12090 && (TYPE_UNSIGNED (type)
12091 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12093 tree sval = TREE_OPERAND (arg1, 0);
12094 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12096 tree sh_cnt = TREE_OPERAND (arg1, 1);
12097 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12099 if (strict_overflow_p)
12100 fold_overflow_warning (("assuming signed overflow does not "
12101 "occur when simplifying A / (B << N)"),
12102 WARN_STRICT_OVERFLOW_MISC);
12104 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12105 sh_cnt, build_int_cst (NULL_TREE, pow2));
12106 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12107 fold_convert_loc (loc, type, arg0), sh_cnt);
12111 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12112 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12113 if (INTEGRAL_TYPE_P (type)
12114 && TYPE_UNSIGNED (type)
12115 && code == FLOOR_DIV_EXPR)
12116 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12118 /* Fall thru */
12120 case ROUND_DIV_EXPR:
12121 case CEIL_DIV_EXPR:
12122 case EXACT_DIV_EXPR:
12123 if (integer_onep (arg1))
12124 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12125 if (integer_zerop (arg1))
12126 return NULL_TREE;
12127 /* X / -1 is -X. */
12128 if (!TYPE_UNSIGNED (type)
12129 && TREE_CODE (arg1) == INTEGER_CST
12130 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12131 && TREE_INT_CST_HIGH (arg1) == -1)
12132 return fold_convert_loc (loc, type, negate_expr (arg0));
12134 /* Convert -A / -B to A / B when the type is signed and overflow is
12135 undefined. */
12136 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12137 && TREE_CODE (arg0) == NEGATE_EXPR
12138 && negate_expr_p (arg1))
12140 if (INTEGRAL_TYPE_P (type))
12141 fold_overflow_warning (("assuming signed overflow does not occur "
12142 "when distributing negation across "
12143 "division"),
12144 WARN_STRICT_OVERFLOW_MISC);
12145 return fold_build2_loc (loc, code, type,
12146 fold_convert_loc (loc, type,
12147 TREE_OPERAND (arg0, 0)),
12148 fold_convert_loc (loc, type,
12149 negate_expr (arg1)));
12151 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12152 && TREE_CODE (arg1) == NEGATE_EXPR
12153 && negate_expr_p (arg0))
12155 if (INTEGRAL_TYPE_P (type))
12156 fold_overflow_warning (("assuming signed overflow does not occur "
12157 "when distributing negation across "
12158 "division"),
12159 WARN_STRICT_OVERFLOW_MISC);
12160 return fold_build2_loc (loc, code, type,
12161 fold_convert_loc (loc, type,
12162 negate_expr (arg0)),
12163 fold_convert_loc (loc, type,
12164 TREE_OPERAND (arg1, 0)));
12167 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12168 operation, EXACT_DIV_EXPR.
12170 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12171 At one time others generated faster code, it's not clear if they do
12172 after the last round to changes to the DIV code in expmed.c. */
12173 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12174 && multiple_of_p (type, arg0, arg1))
12175 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12177 strict_overflow_p = false;
12178 if (TREE_CODE (arg1) == INTEGER_CST
12179 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12180 &strict_overflow_p)))
12182 if (strict_overflow_p)
12183 fold_overflow_warning (("assuming signed overflow does not occur "
12184 "when simplifying division"),
12185 WARN_STRICT_OVERFLOW_MISC);
12186 return fold_convert_loc (loc, type, tem);
12189 return NULL_TREE;
12191 case CEIL_MOD_EXPR:
12192 case FLOOR_MOD_EXPR:
12193 case ROUND_MOD_EXPR:
12194 case TRUNC_MOD_EXPR:
12195 /* X % 1 is always zero, but be sure to preserve any side
12196 effects in X. */
12197 if (integer_onep (arg1))
12198 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12200 /* X % 0, return X % 0 unchanged so that we can get the
12201 proper warnings and errors. */
12202 if (integer_zerop (arg1))
12203 return NULL_TREE;
12205 /* 0 % X is always zero, but be sure to preserve any side
12206 effects in X. Place this after checking for X == 0. */
12207 if (integer_zerop (arg0))
12208 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12210 /* X % -1 is zero. */
12211 if (!TYPE_UNSIGNED (type)
12212 && TREE_CODE (arg1) == INTEGER_CST
12213 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12214 && TREE_INT_CST_HIGH (arg1) == -1)
12215 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12217 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12218 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12219 strict_overflow_p = false;
12220 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12221 && (TYPE_UNSIGNED (type)
12222 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12224 tree c = arg1;
12225 /* Also optimize A % (C << N) where C is a power of 2,
12226 to A & ((C << N) - 1). */
12227 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12228 c = TREE_OPERAND (arg1, 0);
12230 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12232 tree mask = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12233 build_int_cst (TREE_TYPE (arg1), 1));
12234 if (strict_overflow_p)
12235 fold_overflow_warning (("assuming signed overflow does not "
12236 "occur when simplifying "
12237 "X % (power of two)"),
12238 WARN_STRICT_OVERFLOW_MISC);
12239 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12240 fold_convert_loc (loc, type, arg0),
12241 fold_convert_loc (loc, type, mask));
12245 /* X % -C is the same as X % C. */
12246 if (code == TRUNC_MOD_EXPR
12247 && !TYPE_UNSIGNED (type)
12248 && TREE_CODE (arg1) == INTEGER_CST
12249 && !TREE_OVERFLOW (arg1)
12250 && TREE_INT_CST_HIGH (arg1) < 0
12251 && !TYPE_OVERFLOW_TRAPS (type)
12252 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12253 && !sign_bit_p (arg1, arg1))
12254 return fold_build2_loc (loc, code, type,
12255 fold_convert_loc (loc, type, arg0),
12256 fold_convert_loc (loc, type,
12257 negate_expr (arg1)));
12259 /* X % -Y is the same as X % Y. */
12260 if (code == TRUNC_MOD_EXPR
12261 && !TYPE_UNSIGNED (type)
12262 && TREE_CODE (arg1) == NEGATE_EXPR
12263 && !TYPE_OVERFLOW_TRAPS (type))
12264 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12265 fold_convert_loc (loc, type,
12266 TREE_OPERAND (arg1, 0)));
12268 if (TREE_CODE (arg1) == INTEGER_CST
12269 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12270 &strict_overflow_p)))
12272 if (strict_overflow_p)
12273 fold_overflow_warning (("assuming signed overflow does not occur "
12274 "when simplifying modulus"),
12275 WARN_STRICT_OVERFLOW_MISC);
12276 return fold_convert_loc (loc, type, tem);
12279 return NULL_TREE;
12281 case LROTATE_EXPR:
12282 case RROTATE_EXPR:
12283 if (integer_all_onesp (arg0))
12284 return omit_one_operand_loc (loc, type, arg0, arg1);
12285 goto shift;
12287 case RSHIFT_EXPR:
12288 /* Optimize -1 >> x for arithmetic right shifts. */
12289 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12290 && tree_expr_nonnegative_p (arg1))
12291 return omit_one_operand_loc (loc, type, arg0, arg1);
12292 /* ... fall through ... */
12294 case LSHIFT_EXPR:
12295 shift:
12296 if (integer_zerop (arg1))
12297 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12298 if (integer_zerop (arg0))
12299 return omit_one_operand_loc (loc, type, arg0, arg1);
12301 /* Since negative shift count is not well-defined,
12302 don't try to compute it in the compiler. */
12303 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12304 return NULL_TREE;
12306 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12307 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12308 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12309 && host_integerp (TREE_OPERAND (arg0, 1), false)
12310 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12312 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12313 + TREE_INT_CST_LOW (arg1));
12315 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12316 being well defined. */
12317 if (low >= TYPE_PRECISION (type))
12319 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12320 low = low % TYPE_PRECISION (type);
12321 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12322 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12323 TREE_OPERAND (arg0, 0));
12324 else
12325 low = TYPE_PRECISION (type) - 1;
12328 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12329 build_int_cst (type, low));
12332 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12333 into x & ((unsigned)-1 >> c) for unsigned types. */
12334 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12335 || (TYPE_UNSIGNED (type)
12336 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12337 && host_integerp (arg1, false)
12338 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12339 && host_integerp (TREE_OPERAND (arg0, 1), false)
12340 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12342 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12343 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12344 tree lshift;
12345 tree arg00;
12347 if (low0 == low1)
12349 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12351 lshift = build_int_cst (type, -1);
12352 lshift = int_const_binop (code, lshift, arg1, 0);
12354 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12358 /* Rewrite an LROTATE_EXPR by a constant into an
12359 RROTATE_EXPR by a new constant. */
12360 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12362 tree tem = build_int_cst (TREE_TYPE (arg1),
12363 TYPE_PRECISION (type));
12364 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
12365 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12368 /* If we have a rotate of a bit operation with the rotate count and
12369 the second operand of the bit operation both constant,
12370 permute the two operations. */
12371 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12372 && (TREE_CODE (arg0) == BIT_AND_EXPR
12373 || TREE_CODE (arg0) == BIT_IOR_EXPR
12374 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12375 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12376 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12377 fold_build2_loc (loc, code, type,
12378 TREE_OPERAND (arg0, 0), arg1),
12379 fold_build2_loc (loc, code, type,
12380 TREE_OPERAND (arg0, 1), arg1));
12382 /* Two consecutive rotates adding up to the precision of the
12383 type can be ignored. */
12384 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12385 && TREE_CODE (arg0) == RROTATE_EXPR
12386 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12387 && TREE_INT_CST_HIGH (arg1) == 0
12388 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12389 && ((TREE_INT_CST_LOW (arg1)
12390 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12391 == (unsigned int) TYPE_PRECISION (type)))
12392 return TREE_OPERAND (arg0, 0);
12394 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12395 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12396 if the latter can be further optimized. */
12397 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12398 && TREE_CODE (arg0) == BIT_AND_EXPR
12399 && TREE_CODE (arg1) == INTEGER_CST
12400 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12402 tree mask = fold_build2_loc (loc, code, type,
12403 fold_convert_loc (loc, type,
12404 TREE_OPERAND (arg0, 1)),
12405 arg1);
12406 tree shift = fold_build2_loc (loc, code, type,
12407 fold_convert_loc (loc, type,
12408 TREE_OPERAND (arg0, 0)),
12409 arg1);
12410 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12411 if (tem)
12412 return tem;
12415 return NULL_TREE;
12417 case MIN_EXPR:
12418 if (operand_equal_p (arg0, arg1, 0))
12419 return omit_one_operand_loc (loc, type, arg0, arg1);
12420 if (INTEGRAL_TYPE_P (type)
12421 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12422 return omit_one_operand_loc (loc, type, arg1, arg0);
12423 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12424 if (tem)
12425 return tem;
12426 goto associate;
12428 case MAX_EXPR:
12429 if (operand_equal_p (arg0, arg1, 0))
12430 return omit_one_operand_loc (loc, type, arg0, arg1);
12431 if (INTEGRAL_TYPE_P (type)
12432 && TYPE_MAX_VALUE (type)
12433 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12434 return omit_one_operand_loc (loc, type, arg1, arg0);
12435 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12436 if (tem)
12437 return tem;
12438 goto associate;
12440 case TRUTH_ANDIF_EXPR:
12441 /* Note that the operands of this must be ints
12442 and their values must be 0 or 1.
12443 ("true" is a fixed value perhaps depending on the language.) */
12444 /* If first arg is constant zero, return it. */
12445 if (integer_zerop (arg0))
12446 return fold_convert_loc (loc, type, arg0);
12447 case TRUTH_AND_EXPR:
12448 /* If either arg is constant true, drop it. */
12449 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12450 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12451 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12452 /* Preserve sequence points. */
12453 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12454 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12455 /* If second arg is constant zero, result is zero, but first arg
12456 must be evaluated. */
12457 if (integer_zerop (arg1))
12458 return omit_one_operand_loc (loc, type, arg1, arg0);
12459 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12460 case will be handled here. */
12461 if (integer_zerop (arg0))
12462 return omit_one_operand_loc (loc, type, arg0, arg1);
12464 /* !X && X is always false. */
12465 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12466 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12467 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12468 /* X && !X is always false. */
12469 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12470 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12471 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12473 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12474 means A >= Y && A != MAX, but in this case we know that
12475 A < X <= MAX. */
12477 if (!TREE_SIDE_EFFECTS (arg0)
12478 && !TREE_SIDE_EFFECTS (arg1))
12480 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12481 if (tem && !operand_equal_p (tem, arg0, 0))
12482 return fold_build2_loc (loc, code, type, tem, arg1);
12484 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12485 if (tem && !operand_equal_p (tem, arg1, 0))
12486 return fold_build2_loc (loc, code, type, arg0, tem);
12489 truth_andor:
12490 /* We only do these simplifications if we are optimizing. */
12491 if (!optimize)
12492 return NULL_TREE;
12494 /* Check for things like (A || B) && (A || C). We can convert this
12495 to A || (B && C). Note that either operator can be any of the four
12496 truth and/or operations and the transformation will still be
12497 valid. Also note that we only care about order for the
12498 ANDIF and ORIF operators. If B contains side effects, this
12499 might change the truth-value of A. */
12500 if (TREE_CODE (arg0) == TREE_CODE (arg1)
12501 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12502 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12503 || TREE_CODE (arg0) == TRUTH_AND_EXPR
12504 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12505 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12507 tree a00 = TREE_OPERAND (arg0, 0);
12508 tree a01 = TREE_OPERAND (arg0, 1);
12509 tree a10 = TREE_OPERAND (arg1, 0);
12510 tree a11 = TREE_OPERAND (arg1, 1);
12511 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12512 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12513 && (code == TRUTH_AND_EXPR
12514 || code == TRUTH_OR_EXPR));
12516 if (operand_equal_p (a00, a10, 0))
12517 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12518 fold_build2_loc (loc, code, type, a01, a11));
12519 else if (commutative && operand_equal_p (a00, a11, 0))
12520 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12521 fold_build2_loc (loc, code, type, a01, a10));
12522 else if (commutative && operand_equal_p (a01, a10, 0))
12523 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
12524 fold_build2_loc (loc, code, type, a00, a11));
12526 /* This case if tricky because we must either have commutative
12527 operators or else A10 must not have side-effects. */
12529 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12530 && operand_equal_p (a01, a11, 0))
12531 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12532 fold_build2_loc (loc, code, type, a00, a10),
12533 a01);
12536 /* See if we can build a range comparison. */
12537 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
12538 return tem;
12540 /* Check for the possibility of merging component references. If our
12541 lhs is another similar operation, try to merge its rhs with our
12542 rhs. Then try to merge our lhs and rhs. */
12543 if (TREE_CODE (arg0) == code
12544 && 0 != (tem = fold_truthop (loc, code, type,
12545 TREE_OPERAND (arg0, 1), arg1)))
12546 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12548 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
12549 return tem;
12551 return NULL_TREE;
12553 case TRUTH_ORIF_EXPR:
12554 /* Note that the operands of this must be ints
12555 and their values must be 0 or true.
12556 ("true" is a fixed value perhaps depending on the language.) */
12557 /* If first arg is constant true, return it. */
12558 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12559 return fold_convert_loc (loc, type, arg0);
12560 case TRUTH_OR_EXPR:
12561 /* If either arg is constant zero, drop it. */
12562 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12563 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12564 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12565 /* Preserve sequence points. */
12566 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12567 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12568 /* If second arg is constant true, result is true, but we must
12569 evaluate first arg. */
12570 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12571 return omit_one_operand_loc (loc, type, arg1, arg0);
12572 /* Likewise for first arg, but note this only occurs here for
12573 TRUTH_OR_EXPR. */
12574 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12575 return omit_one_operand_loc (loc, type, arg0, arg1);
12577 /* !X || X is always true. */
12578 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12579 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12580 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12581 /* X || !X is always true. */
12582 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12583 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12584 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12586 goto truth_andor;
12588 case TRUTH_XOR_EXPR:
12589 /* If the second arg is constant zero, drop it. */
12590 if (integer_zerop (arg1))
12591 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12592 /* If the second arg is constant true, this is a logical inversion. */
12593 if (integer_onep (arg1))
12595 /* Only call invert_truthvalue if operand is a truth value. */
12596 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12597 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12598 else
12599 tem = invert_truthvalue_loc (loc, arg0);
12600 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12602 /* Identical arguments cancel to zero. */
12603 if (operand_equal_p (arg0, arg1, 0))
12604 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12606 /* !X ^ X is always true. */
12607 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12608 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12609 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12611 /* X ^ !X is always true. */
12612 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12613 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12614 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12616 return NULL_TREE;
12618 case EQ_EXPR:
12619 case NE_EXPR:
12620 tem = fold_comparison (loc, code, type, op0, op1);
12621 if (tem != NULL_TREE)
12622 return tem;
12624 /* bool_var != 0 becomes bool_var. */
12625 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12626 && code == NE_EXPR)
12627 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12629 /* bool_var == 1 becomes bool_var. */
12630 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12631 && code == EQ_EXPR)
12632 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12634 /* bool_var != 1 becomes !bool_var. */
12635 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12636 && code == NE_EXPR)
12637 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12638 fold_convert_loc (loc, type, arg0));
12640 /* bool_var == 0 becomes !bool_var. */
12641 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12642 && code == EQ_EXPR)
12643 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12644 fold_convert_loc (loc, type, arg0));
12646 /* !exp != 0 becomes !exp */
12647 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12648 && code == NE_EXPR)
12649 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12651 /* If this is an equality comparison of the address of two non-weak,
12652 unaliased symbols neither of which are extern (since we do not
12653 have access to attributes for externs), then we know the result. */
12654 if (TREE_CODE (arg0) == ADDR_EXPR
12655 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12656 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12657 && ! lookup_attribute ("alias",
12658 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12659 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12660 && TREE_CODE (arg1) == ADDR_EXPR
12661 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12662 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12663 && ! lookup_attribute ("alias",
12664 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12665 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12667 /* We know that we're looking at the address of two
12668 non-weak, unaliased, static _DECL nodes.
12670 It is both wasteful and incorrect to call operand_equal_p
12671 to compare the two ADDR_EXPR nodes. It is wasteful in that
12672 all we need to do is test pointer equality for the arguments
12673 to the two ADDR_EXPR nodes. It is incorrect to use
12674 operand_equal_p as that function is NOT equivalent to a
12675 C equality test. It can in fact return false for two
12676 objects which would test as equal using the C equality
12677 operator. */
12678 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12679 return constant_boolean_node (equal
12680 ? code == EQ_EXPR : code != EQ_EXPR,
12681 type);
12684 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12685 a MINUS_EXPR of a constant, we can convert it into a comparison with
12686 a revised constant as long as no overflow occurs. */
12687 if (TREE_CODE (arg1) == INTEGER_CST
12688 && (TREE_CODE (arg0) == PLUS_EXPR
12689 || TREE_CODE (arg0) == MINUS_EXPR)
12690 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12691 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12692 ? MINUS_EXPR : PLUS_EXPR,
12693 fold_convert_loc (loc, TREE_TYPE (arg0),
12694 arg1),
12695 TREE_OPERAND (arg0, 1), 0))
12696 && !TREE_OVERFLOW (tem))
12697 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12699 /* Similarly for a NEGATE_EXPR. */
12700 if (TREE_CODE (arg0) == NEGATE_EXPR
12701 && TREE_CODE (arg1) == INTEGER_CST
12702 && 0 != (tem = negate_expr (arg1))
12703 && TREE_CODE (tem) == INTEGER_CST
12704 && !TREE_OVERFLOW (tem))
12705 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12707 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12708 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12709 && TREE_CODE (arg1) == INTEGER_CST
12710 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12711 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12712 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12713 fold_convert_loc (loc,
12714 TREE_TYPE (arg0),
12715 arg1),
12716 TREE_OPERAND (arg0, 1)));
12718 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12719 if ((TREE_CODE (arg0) == PLUS_EXPR
12720 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12721 || TREE_CODE (arg0) == MINUS_EXPR)
12722 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12723 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12724 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12726 tree val = TREE_OPERAND (arg0, 1);
12727 return omit_two_operands_loc (loc, type,
12728 fold_build2_loc (loc, code, type,
12729 val,
12730 build_int_cst (TREE_TYPE (val),
12731 0)),
12732 TREE_OPERAND (arg0, 0), arg1);
12735 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12736 if (TREE_CODE (arg0) == MINUS_EXPR
12737 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12738 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12739 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12741 return omit_two_operands_loc (loc, type,
12742 code == NE_EXPR
12743 ? boolean_true_node : boolean_false_node,
12744 TREE_OPERAND (arg0, 1), arg1);
12747 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12748 for !=. Don't do this for ordered comparisons due to overflow. */
12749 if (TREE_CODE (arg0) == MINUS_EXPR
12750 && integer_zerop (arg1))
12751 return fold_build2_loc (loc, code, type,
12752 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12754 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12755 if (TREE_CODE (arg0) == ABS_EXPR
12756 && (integer_zerop (arg1) || real_zerop (arg1)))
12757 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12759 /* If this is an EQ or NE comparison with zero and ARG0 is
12760 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12761 two operations, but the latter can be done in one less insn
12762 on machines that have only two-operand insns or on which a
12763 constant cannot be the first operand. */
12764 if (TREE_CODE (arg0) == BIT_AND_EXPR
12765 && integer_zerop (arg1))
12767 tree arg00 = TREE_OPERAND (arg0, 0);
12768 tree arg01 = TREE_OPERAND (arg0, 1);
12769 if (TREE_CODE (arg00) == LSHIFT_EXPR
12770 && integer_onep (TREE_OPERAND (arg00, 0)))
12772 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12773 arg01, TREE_OPERAND (arg00, 1));
12774 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12775 build_int_cst (TREE_TYPE (arg0), 1));
12776 return fold_build2_loc (loc, code, type,
12777 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12778 arg1);
12780 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12781 && integer_onep (TREE_OPERAND (arg01, 0)))
12783 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12784 arg00, TREE_OPERAND (arg01, 1));
12785 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12786 build_int_cst (TREE_TYPE (arg0), 1));
12787 return fold_build2_loc (loc, code, type,
12788 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12789 arg1);
12793 /* If this is an NE or EQ comparison of zero against the result of a
12794 signed MOD operation whose second operand is a power of 2, make
12795 the MOD operation unsigned since it is simpler and equivalent. */
12796 if (integer_zerop (arg1)
12797 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12798 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12799 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12800 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12801 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12802 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12804 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12805 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12806 fold_convert_loc (loc, newtype,
12807 TREE_OPERAND (arg0, 0)),
12808 fold_convert_loc (loc, newtype,
12809 TREE_OPERAND (arg0, 1)));
12811 return fold_build2_loc (loc, code, type, newmod,
12812 fold_convert_loc (loc, newtype, arg1));
12815 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12816 C1 is a valid shift constant, and C2 is a power of two, i.e.
12817 a single bit. */
12818 if (TREE_CODE (arg0) == BIT_AND_EXPR
12819 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12820 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12821 == INTEGER_CST
12822 && integer_pow2p (TREE_OPERAND (arg0, 1))
12823 && integer_zerop (arg1))
12825 tree itype = TREE_TYPE (arg0);
12826 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12827 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12829 /* Check for a valid shift count. */
12830 if (TREE_INT_CST_HIGH (arg001) == 0
12831 && TREE_INT_CST_LOW (arg001) < prec)
12833 tree arg01 = TREE_OPERAND (arg0, 1);
12834 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12835 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12836 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12837 can be rewritten as (X & (C2 << C1)) != 0. */
12838 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12840 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12841 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12842 return fold_build2_loc (loc, code, type, tem, arg1);
12844 /* Otherwise, for signed (arithmetic) shifts,
12845 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12846 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12847 else if (!TYPE_UNSIGNED (itype))
12848 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12849 arg000, build_int_cst (itype, 0));
12850 /* Otherwise, of unsigned (logical) shifts,
12851 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12852 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12853 else
12854 return omit_one_operand_loc (loc, type,
12855 code == EQ_EXPR ? integer_one_node
12856 : integer_zero_node,
12857 arg000);
12861 /* If this is an NE comparison of zero with an AND of one, remove the
12862 comparison since the AND will give the correct value. */
12863 if (code == NE_EXPR
12864 && integer_zerop (arg1)
12865 && TREE_CODE (arg0) == BIT_AND_EXPR
12866 && integer_onep (TREE_OPERAND (arg0, 1)))
12867 return fold_convert_loc (loc, type, arg0);
12869 /* If we have (A & C) == C where C is a power of 2, convert this into
12870 (A & C) != 0. Similarly for NE_EXPR. */
12871 if (TREE_CODE (arg0) == BIT_AND_EXPR
12872 && integer_pow2p (TREE_OPERAND (arg0, 1))
12873 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12874 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12875 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12876 integer_zero_node));
12878 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12879 bit, then fold the expression into A < 0 or A >= 0. */
12880 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12881 if (tem)
12882 return tem;
12884 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12885 Similarly for NE_EXPR. */
12886 if (TREE_CODE (arg0) == BIT_AND_EXPR
12887 && TREE_CODE (arg1) == INTEGER_CST
12888 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12890 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12891 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12892 TREE_OPERAND (arg0, 1));
12893 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12894 arg1, notc);
12895 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12896 if (integer_nonzerop (dandnotc))
12897 return omit_one_operand_loc (loc, type, rslt, arg0);
12900 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12901 Similarly for NE_EXPR. */
12902 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12903 && TREE_CODE (arg1) == INTEGER_CST
12904 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12906 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12907 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12908 TREE_OPERAND (arg0, 1), notd);
12909 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12910 if (integer_nonzerop (candnotd))
12911 return omit_one_operand_loc (loc, type, rslt, arg0);
12914 /* If this is a comparison of a field, we may be able to simplify it. */
12915 if ((TREE_CODE (arg0) == COMPONENT_REF
12916 || TREE_CODE (arg0) == BIT_FIELD_REF)
12917 /* Handle the constant case even without -O
12918 to make sure the warnings are given. */
12919 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12921 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12922 if (t1)
12923 return t1;
12926 /* Optimize comparisons of strlen vs zero to a compare of the
12927 first character of the string vs zero. To wit,
12928 strlen(ptr) == 0 => *ptr == 0
12929 strlen(ptr) != 0 => *ptr != 0
12930 Other cases should reduce to one of these two (or a constant)
12931 due to the return value of strlen being unsigned. */
12932 if (TREE_CODE (arg0) == CALL_EXPR
12933 && integer_zerop (arg1))
12935 tree fndecl = get_callee_fndecl (arg0);
12937 if (fndecl
12938 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12939 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12940 && call_expr_nargs (arg0) == 1
12941 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12943 tree iref = build_fold_indirect_ref_loc (loc,
12944 CALL_EXPR_ARG (arg0, 0));
12945 return fold_build2_loc (loc, code, type, iref,
12946 build_int_cst (TREE_TYPE (iref), 0));
12950 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12951 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12952 if (TREE_CODE (arg0) == RSHIFT_EXPR
12953 && integer_zerop (arg1)
12954 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12956 tree arg00 = TREE_OPERAND (arg0, 0);
12957 tree arg01 = TREE_OPERAND (arg0, 1);
12958 tree itype = TREE_TYPE (arg00);
12959 if (TREE_INT_CST_HIGH (arg01) == 0
12960 && TREE_INT_CST_LOW (arg01)
12961 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12963 if (TYPE_UNSIGNED (itype))
12965 itype = signed_type_for (itype);
12966 arg00 = fold_convert_loc (loc, itype, arg00);
12968 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12969 type, arg00, build_int_cst (itype, 0));
12973 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12974 if (integer_zerop (arg1)
12975 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12976 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12977 TREE_OPERAND (arg0, 1));
12979 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12980 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12981 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12982 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12983 build_int_cst (TREE_TYPE (arg1), 0));
12984 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12985 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12986 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12987 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12988 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12989 build_int_cst (TREE_TYPE (arg1), 0));
12991 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12992 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12993 && TREE_CODE (arg1) == INTEGER_CST
12994 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12995 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12996 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12997 TREE_OPERAND (arg0, 1), arg1));
12999 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13000 (X & C) == 0 when C is a single bit. */
13001 if (TREE_CODE (arg0) == BIT_AND_EXPR
13002 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13003 && integer_zerop (arg1)
13004 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13006 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13007 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13008 TREE_OPERAND (arg0, 1));
13009 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13010 type, tem, arg1);
13013 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13014 constant C is a power of two, i.e. a single bit. */
13015 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13016 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13017 && integer_zerop (arg1)
13018 && integer_pow2p (TREE_OPERAND (arg0, 1))
13019 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13020 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13022 tree arg00 = TREE_OPERAND (arg0, 0);
13023 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13024 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13027 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13028 when is C is a power of two, i.e. a single bit. */
13029 if (TREE_CODE (arg0) == BIT_AND_EXPR
13030 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13031 && integer_zerop (arg1)
13032 && integer_pow2p (TREE_OPERAND (arg0, 1))
13033 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13034 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13036 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13037 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13038 arg000, TREE_OPERAND (arg0, 1));
13039 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13040 tem, build_int_cst (TREE_TYPE (tem), 0));
13043 if (integer_zerop (arg1)
13044 && tree_expr_nonzero_p (arg0))
13046 tree res = constant_boolean_node (code==NE_EXPR, type);
13047 return omit_one_operand_loc (loc, type, res, arg0);
13050 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13051 if (TREE_CODE (arg0) == NEGATE_EXPR
13052 && TREE_CODE (arg1) == NEGATE_EXPR)
13053 return fold_build2_loc (loc, code, type,
13054 TREE_OPERAND (arg0, 0),
13055 TREE_OPERAND (arg1, 0));
13057 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13058 if (TREE_CODE (arg0) == BIT_AND_EXPR
13059 && TREE_CODE (arg1) == BIT_AND_EXPR)
13061 tree arg00 = TREE_OPERAND (arg0, 0);
13062 tree arg01 = TREE_OPERAND (arg0, 1);
13063 tree arg10 = TREE_OPERAND (arg1, 0);
13064 tree arg11 = TREE_OPERAND (arg1, 1);
13065 tree itype = TREE_TYPE (arg0);
13067 if (operand_equal_p (arg01, arg11, 0))
13068 return fold_build2_loc (loc, code, type,
13069 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13070 fold_build2_loc (loc,
13071 BIT_XOR_EXPR, itype,
13072 arg00, arg10),
13073 arg01),
13074 build_int_cst (itype, 0));
13076 if (operand_equal_p (arg01, arg10, 0))
13077 return fold_build2_loc (loc, code, type,
13078 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13079 fold_build2_loc (loc,
13080 BIT_XOR_EXPR, itype,
13081 arg00, arg11),
13082 arg01),
13083 build_int_cst (itype, 0));
13085 if (operand_equal_p (arg00, arg11, 0))
13086 return fold_build2_loc (loc, code, type,
13087 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13088 fold_build2_loc (loc,
13089 BIT_XOR_EXPR, itype,
13090 arg01, arg10),
13091 arg00),
13092 build_int_cst (itype, 0));
13094 if (operand_equal_p (arg00, arg10, 0))
13095 return fold_build2_loc (loc, code, type,
13096 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13097 fold_build2_loc (loc,
13098 BIT_XOR_EXPR, itype,
13099 arg01, arg11),
13100 arg00),
13101 build_int_cst (itype, 0));
13104 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13105 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13107 tree arg00 = TREE_OPERAND (arg0, 0);
13108 tree arg01 = TREE_OPERAND (arg0, 1);
13109 tree arg10 = TREE_OPERAND (arg1, 0);
13110 tree arg11 = TREE_OPERAND (arg1, 1);
13111 tree itype = TREE_TYPE (arg0);
13113 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13114 operand_equal_p guarantees no side-effects so we don't need
13115 to use omit_one_operand on Z. */
13116 if (operand_equal_p (arg01, arg11, 0))
13117 return fold_build2_loc (loc, code, type, arg00, arg10);
13118 if (operand_equal_p (arg01, arg10, 0))
13119 return fold_build2_loc (loc, code, type, arg00, arg11);
13120 if (operand_equal_p (arg00, arg11, 0))
13121 return fold_build2_loc (loc, code, type, arg01, arg10);
13122 if (operand_equal_p (arg00, arg10, 0))
13123 return fold_build2_loc (loc, code, type, arg01, arg11);
13125 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13126 if (TREE_CODE (arg01) == INTEGER_CST
13127 && TREE_CODE (arg11) == INTEGER_CST)
13128 return fold_build2_loc (loc, code, type,
13129 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
13130 fold_build2_loc (loc,
13131 BIT_XOR_EXPR, itype,
13132 arg01, arg11)),
13133 arg10);
13136 /* Attempt to simplify equality/inequality comparisons of complex
13137 values. Only lower the comparison if the result is known or
13138 can be simplified to a single scalar comparison. */
13139 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13140 || TREE_CODE (arg0) == COMPLEX_CST)
13141 && (TREE_CODE (arg1) == COMPLEX_EXPR
13142 || TREE_CODE (arg1) == COMPLEX_CST))
13144 tree real0, imag0, real1, imag1;
13145 tree rcond, icond;
13147 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13149 real0 = TREE_OPERAND (arg0, 0);
13150 imag0 = TREE_OPERAND (arg0, 1);
13152 else
13154 real0 = TREE_REALPART (arg0);
13155 imag0 = TREE_IMAGPART (arg0);
13158 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13160 real1 = TREE_OPERAND (arg1, 0);
13161 imag1 = TREE_OPERAND (arg1, 1);
13163 else
13165 real1 = TREE_REALPART (arg1);
13166 imag1 = TREE_IMAGPART (arg1);
13169 rcond = fold_binary_loc (loc, code, type, real0, real1);
13170 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13172 if (integer_zerop (rcond))
13174 if (code == EQ_EXPR)
13175 return omit_two_operands_loc (loc, type, boolean_false_node,
13176 imag0, imag1);
13177 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13179 else
13181 if (code == NE_EXPR)
13182 return omit_two_operands_loc (loc, type, boolean_true_node,
13183 imag0, imag1);
13184 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13188 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13189 if (icond && TREE_CODE (icond) == INTEGER_CST)
13191 if (integer_zerop (icond))
13193 if (code == EQ_EXPR)
13194 return omit_two_operands_loc (loc, type, boolean_false_node,
13195 real0, real1);
13196 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13198 else
13200 if (code == NE_EXPR)
13201 return omit_two_operands_loc (loc, type, boolean_true_node,
13202 real0, real1);
13203 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13208 return NULL_TREE;
13210 case LT_EXPR:
13211 case GT_EXPR:
13212 case LE_EXPR:
13213 case GE_EXPR:
13214 tem = fold_comparison (loc, code, type, op0, op1);
13215 if (tem != NULL_TREE)
13216 return tem;
13218 /* Transform comparisons of the form X +- C CMP X. */
13219 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13220 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13221 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13222 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13223 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13224 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13226 tree arg01 = TREE_OPERAND (arg0, 1);
13227 enum tree_code code0 = TREE_CODE (arg0);
13228 int is_positive;
13230 if (TREE_CODE (arg01) == REAL_CST)
13231 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13232 else
13233 is_positive = tree_int_cst_sgn (arg01);
13235 /* (X - c) > X becomes false. */
13236 if (code == GT_EXPR
13237 && ((code0 == MINUS_EXPR && is_positive >= 0)
13238 || (code0 == PLUS_EXPR && is_positive <= 0)))
13240 if (TREE_CODE (arg01) == INTEGER_CST
13241 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13242 fold_overflow_warning (("assuming signed overflow does not "
13243 "occur when assuming that (X - c) > X "
13244 "is always false"),
13245 WARN_STRICT_OVERFLOW_ALL);
13246 return constant_boolean_node (0, type);
13249 /* Likewise (X + c) < X becomes false. */
13250 if (code == LT_EXPR
13251 && ((code0 == PLUS_EXPR && is_positive >= 0)
13252 || (code0 == MINUS_EXPR && is_positive <= 0)))
13254 if (TREE_CODE (arg01) == INTEGER_CST
13255 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13256 fold_overflow_warning (("assuming signed overflow does not "
13257 "occur when assuming that "
13258 "(X + c) < X is always false"),
13259 WARN_STRICT_OVERFLOW_ALL);
13260 return constant_boolean_node (0, type);
13263 /* Convert (X - c) <= X to true. */
13264 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13265 && code == LE_EXPR
13266 && ((code0 == MINUS_EXPR && is_positive >= 0)
13267 || (code0 == PLUS_EXPR && is_positive <= 0)))
13269 if (TREE_CODE (arg01) == INTEGER_CST
13270 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13271 fold_overflow_warning (("assuming signed overflow does not "
13272 "occur when assuming that "
13273 "(X - c) <= X is always true"),
13274 WARN_STRICT_OVERFLOW_ALL);
13275 return constant_boolean_node (1, type);
13278 /* Convert (X + c) >= X to true. */
13279 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13280 && code == GE_EXPR
13281 && ((code0 == PLUS_EXPR && is_positive >= 0)
13282 || (code0 == MINUS_EXPR && is_positive <= 0)))
13284 if (TREE_CODE (arg01) == INTEGER_CST
13285 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13286 fold_overflow_warning (("assuming signed overflow does not "
13287 "occur when assuming that "
13288 "(X + c) >= X is always true"),
13289 WARN_STRICT_OVERFLOW_ALL);
13290 return constant_boolean_node (1, type);
13293 if (TREE_CODE (arg01) == INTEGER_CST)
13295 /* Convert X + c > X and X - c < X to true for integers. */
13296 if (code == GT_EXPR
13297 && ((code0 == PLUS_EXPR && is_positive > 0)
13298 || (code0 == MINUS_EXPR && is_positive < 0)))
13300 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13301 fold_overflow_warning (("assuming signed overflow does "
13302 "not occur when assuming that "
13303 "(X + c) > X is always true"),
13304 WARN_STRICT_OVERFLOW_ALL);
13305 return constant_boolean_node (1, type);
13308 if (code == LT_EXPR
13309 && ((code0 == MINUS_EXPR && is_positive > 0)
13310 || (code0 == PLUS_EXPR && is_positive < 0)))
13312 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13313 fold_overflow_warning (("assuming signed overflow does "
13314 "not occur when assuming that "
13315 "(X - c) < X is always true"),
13316 WARN_STRICT_OVERFLOW_ALL);
13317 return constant_boolean_node (1, type);
13320 /* Convert X + c <= X and X - c >= X to false for integers. */
13321 if (code == LE_EXPR
13322 && ((code0 == PLUS_EXPR && is_positive > 0)
13323 || (code0 == MINUS_EXPR && is_positive < 0)))
13325 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13326 fold_overflow_warning (("assuming signed overflow does "
13327 "not occur when assuming that "
13328 "(X + c) <= X is always false"),
13329 WARN_STRICT_OVERFLOW_ALL);
13330 return constant_boolean_node (0, type);
13333 if (code == GE_EXPR
13334 && ((code0 == MINUS_EXPR && is_positive > 0)
13335 || (code0 == PLUS_EXPR && is_positive < 0)))
13337 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13338 fold_overflow_warning (("assuming signed overflow does "
13339 "not occur when assuming that "
13340 "(X - c) >= X is always false"),
13341 WARN_STRICT_OVERFLOW_ALL);
13342 return constant_boolean_node (0, type);
13347 /* Comparisons with the highest or lowest possible integer of
13348 the specified precision will have known values. */
13350 tree arg1_type = TREE_TYPE (arg1);
13351 unsigned int width = TYPE_PRECISION (arg1_type);
13353 if (TREE_CODE (arg1) == INTEGER_CST
13354 && width <= 2 * HOST_BITS_PER_WIDE_INT
13355 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13357 HOST_WIDE_INT signed_max_hi;
13358 unsigned HOST_WIDE_INT signed_max_lo;
13359 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13361 if (width <= HOST_BITS_PER_WIDE_INT)
13363 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13364 - 1;
13365 signed_max_hi = 0;
13366 max_hi = 0;
13368 if (TYPE_UNSIGNED (arg1_type))
13370 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13371 min_lo = 0;
13372 min_hi = 0;
13374 else
13376 max_lo = signed_max_lo;
13377 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13378 min_hi = -1;
13381 else
13383 width -= HOST_BITS_PER_WIDE_INT;
13384 signed_max_lo = -1;
13385 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13386 - 1;
13387 max_lo = -1;
13388 min_lo = 0;
13390 if (TYPE_UNSIGNED (arg1_type))
13392 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13393 min_hi = 0;
13395 else
13397 max_hi = signed_max_hi;
13398 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13402 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13403 && TREE_INT_CST_LOW (arg1) == max_lo)
13404 switch (code)
13406 case GT_EXPR:
13407 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13409 case GE_EXPR:
13410 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13412 case LE_EXPR:
13413 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13415 case LT_EXPR:
13416 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13418 /* The GE_EXPR and LT_EXPR cases above are not normally
13419 reached because of previous transformations. */
13421 default:
13422 break;
13424 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13425 == max_hi
13426 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13427 switch (code)
13429 case GT_EXPR:
13430 arg1 = const_binop (PLUS_EXPR, arg1,
13431 build_int_cst (TREE_TYPE (arg1), 1), 0);
13432 return fold_build2_loc (loc, EQ_EXPR, type,
13433 fold_convert_loc (loc,
13434 TREE_TYPE (arg1), arg0),
13435 arg1);
13436 case LE_EXPR:
13437 arg1 = const_binop (PLUS_EXPR, arg1,
13438 build_int_cst (TREE_TYPE (arg1), 1), 0);
13439 return fold_build2_loc (loc, NE_EXPR, type,
13440 fold_convert_loc (loc, TREE_TYPE (arg1),
13441 arg0),
13442 arg1);
13443 default:
13444 break;
13446 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13447 == min_hi
13448 && TREE_INT_CST_LOW (arg1) == min_lo)
13449 switch (code)
13451 case LT_EXPR:
13452 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13454 case LE_EXPR:
13455 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13457 case GE_EXPR:
13458 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13460 case GT_EXPR:
13461 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13463 default:
13464 break;
13466 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13467 == min_hi
13468 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13469 switch (code)
13471 case GE_EXPR:
13472 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13473 return fold_build2_loc (loc, NE_EXPR, type,
13474 fold_convert_loc (loc,
13475 TREE_TYPE (arg1), arg0),
13476 arg1);
13477 case LT_EXPR:
13478 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13479 return fold_build2_loc (loc, EQ_EXPR, type,
13480 fold_convert_loc (loc, TREE_TYPE (arg1),
13481 arg0),
13482 arg1);
13483 default:
13484 break;
13487 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13488 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13489 && TYPE_UNSIGNED (arg1_type)
13490 /* We will flip the signedness of the comparison operator
13491 associated with the mode of arg1, so the sign bit is
13492 specified by this mode. Check that arg1 is the signed
13493 max associated with this sign bit. */
13494 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13495 /* signed_type does not work on pointer types. */
13496 && INTEGRAL_TYPE_P (arg1_type))
13498 /* The following case also applies to X < signed_max+1
13499 and X >= signed_max+1 because previous transformations. */
13500 if (code == LE_EXPR || code == GT_EXPR)
13502 tree st;
13503 st = signed_type_for (TREE_TYPE (arg1));
13504 return fold_build2_loc (loc,
13505 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13506 type, fold_convert_loc (loc, st, arg0),
13507 build_int_cst (st, 0));
13513 /* If we are comparing an ABS_EXPR with a constant, we can
13514 convert all the cases into explicit comparisons, but they may
13515 well not be faster than doing the ABS and one comparison.
13516 But ABS (X) <= C is a range comparison, which becomes a subtraction
13517 and a comparison, and is probably faster. */
13518 if (code == LE_EXPR
13519 && TREE_CODE (arg1) == INTEGER_CST
13520 && TREE_CODE (arg0) == ABS_EXPR
13521 && ! TREE_SIDE_EFFECTS (arg0)
13522 && (0 != (tem = negate_expr (arg1)))
13523 && TREE_CODE (tem) == INTEGER_CST
13524 && !TREE_OVERFLOW (tem))
13525 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13526 build2 (GE_EXPR, type,
13527 TREE_OPERAND (arg0, 0), tem),
13528 build2 (LE_EXPR, type,
13529 TREE_OPERAND (arg0, 0), arg1));
13531 /* Convert ABS_EXPR<x> >= 0 to true. */
13532 strict_overflow_p = false;
13533 if (code == GE_EXPR
13534 && (integer_zerop (arg1)
13535 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13536 && real_zerop (arg1)))
13537 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13539 if (strict_overflow_p)
13540 fold_overflow_warning (("assuming signed overflow does not occur "
13541 "when simplifying comparison of "
13542 "absolute value and zero"),
13543 WARN_STRICT_OVERFLOW_CONDITIONAL);
13544 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13547 /* Convert ABS_EXPR<x> < 0 to false. */
13548 strict_overflow_p = false;
13549 if (code == LT_EXPR
13550 && (integer_zerop (arg1) || real_zerop (arg1))
13551 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13553 if (strict_overflow_p)
13554 fold_overflow_warning (("assuming signed overflow does not occur "
13555 "when simplifying comparison of "
13556 "absolute value and zero"),
13557 WARN_STRICT_OVERFLOW_CONDITIONAL);
13558 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13561 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13562 and similarly for >= into !=. */
13563 if ((code == LT_EXPR || code == GE_EXPR)
13564 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13565 && TREE_CODE (arg1) == LSHIFT_EXPR
13566 && integer_onep (TREE_OPERAND (arg1, 0)))
13568 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13569 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13570 TREE_OPERAND (arg1, 1)),
13571 build_int_cst (TREE_TYPE (arg0), 0));
13572 goto fold_binary_exit;
13575 if ((code == LT_EXPR || code == GE_EXPR)
13576 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13577 && CONVERT_EXPR_P (arg1)
13578 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13579 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13581 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13582 fold_convert_loc (loc, TREE_TYPE (arg0),
13583 build2 (RSHIFT_EXPR,
13584 TREE_TYPE (arg0), arg0,
13585 TREE_OPERAND (TREE_OPERAND (arg1, 0),
13586 1))),
13587 build_int_cst (TREE_TYPE (arg0), 0));
13588 goto fold_binary_exit;
13591 return NULL_TREE;
13593 case UNORDERED_EXPR:
13594 case ORDERED_EXPR:
13595 case UNLT_EXPR:
13596 case UNLE_EXPR:
13597 case UNGT_EXPR:
13598 case UNGE_EXPR:
13599 case UNEQ_EXPR:
13600 case LTGT_EXPR:
13601 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13603 t1 = fold_relational_const (code, type, arg0, arg1);
13604 if (t1 != NULL_TREE)
13605 return t1;
13608 /* If the first operand is NaN, the result is constant. */
13609 if (TREE_CODE (arg0) == REAL_CST
13610 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13611 && (code != LTGT_EXPR || ! flag_trapping_math))
13613 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13614 ? integer_zero_node
13615 : integer_one_node;
13616 return omit_one_operand_loc (loc, type, t1, arg1);
13619 /* If the second operand is NaN, the result is constant. */
13620 if (TREE_CODE (arg1) == REAL_CST
13621 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13622 && (code != LTGT_EXPR || ! flag_trapping_math))
13624 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13625 ? integer_zero_node
13626 : integer_one_node;
13627 return omit_one_operand_loc (loc, type, t1, arg0);
13630 /* Simplify unordered comparison of something with itself. */
13631 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13632 && operand_equal_p (arg0, arg1, 0))
13633 return constant_boolean_node (1, type);
13635 if (code == LTGT_EXPR
13636 && !flag_trapping_math
13637 && operand_equal_p (arg0, arg1, 0))
13638 return constant_boolean_node (0, type);
13640 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13642 tree targ0 = strip_float_extensions (arg0);
13643 tree targ1 = strip_float_extensions (arg1);
13644 tree newtype = TREE_TYPE (targ0);
13646 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13647 newtype = TREE_TYPE (targ1);
13649 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13650 return fold_build2_loc (loc, code, type,
13651 fold_convert_loc (loc, newtype, targ0),
13652 fold_convert_loc (loc, newtype, targ1));
13655 return NULL_TREE;
13657 case COMPOUND_EXPR:
13658 /* When pedantic, a compound expression can be neither an lvalue
13659 nor an integer constant expression. */
13660 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13661 return NULL_TREE;
13662 /* Don't let (0, 0) be null pointer constant. */
13663 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13664 : fold_convert_loc (loc, type, arg1);
13665 return pedantic_non_lvalue_loc (loc, tem);
13667 case COMPLEX_EXPR:
13668 if ((TREE_CODE (arg0) == REAL_CST
13669 && TREE_CODE (arg1) == REAL_CST)
13670 || (TREE_CODE (arg0) == INTEGER_CST
13671 && TREE_CODE (arg1) == INTEGER_CST))
13672 return build_complex (type, arg0, arg1);
13673 return NULL_TREE;
13675 case ASSERT_EXPR:
13676 /* An ASSERT_EXPR should never be passed to fold_binary. */
13677 gcc_unreachable ();
13679 default:
13680 return NULL_TREE;
13681 } /* switch (code) */
13682 fold_binary_exit:
13683 protected_set_expr_location (tem, loc);
13684 return tem;
13687 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13688 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13689 of GOTO_EXPR. */
13691 static tree
13692 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13694 switch (TREE_CODE (*tp))
13696 case LABEL_EXPR:
13697 return *tp;
13699 case GOTO_EXPR:
13700 *walk_subtrees = 0;
13702 /* ... fall through ... */
13704 default:
13705 return NULL_TREE;
13709 /* Return whether the sub-tree ST contains a label which is accessible from
13710 outside the sub-tree. */
13712 static bool
13713 contains_label_p (tree st)
13715 return
13716 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13719 /* Fold a ternary expression of code CODE and type TYPE with operands
13720 OP0, OP1, and OP2. Return the folded expression if folding is
13721 successful. Otherwise, return NULL_TREE. */
13723 tree
13724 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13725 tree op0, tree op1, tree op2)
13727 tree tem;
13728 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13729 enum tree_code_class kind = TREE_CODE_CLASS (code);
13731 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13732 && TREE_CODE_LENGTH (code) == 3);
13734 /* Strip any conversions that don't change the mode. This is safe
13735 for every expression, except for a comparison expression because
13736 its signedness is derived from its operands. So, in the latter
13737 case, only strip conversions that don't change the signedness.
13739 Note that this is done as an internal manipulation within the
13740 constant folder, in order to find the simplest representation of
13741 the arguments so that their form can be studied. In any cases,
13742 the appropriate type conversions should be put back in the tree
13743 that will get out of the constant folder. */
13744 if (op0)
13746 arg0 = op0;
13747 STRIP_NOPS (arg0);
13750 if (op1)
13752 arg1 = op1;
13753 STRIP_NOPS (arg1);
13756 switch (code)
13758 case COMPONENT_REF:
13759 if (TREE_CODE (arg0) == CONSTRUCTOR
13760 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13762 unsigned HOST_WIDE_INT idx;
13763 tree field, value;
13764 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13765 if (field == arg1)
13766 return value;
13768 return NULL_TREE;
13770 case COND_EXPR:
13771 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13772 so all simple results must be passed through pedantic_non_lvalue. */
13773 if (TREE_CODE (arg0) == INTEGER_CST)
13775 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13776 tem = integer_zerop (arg0) ? op2 : op1;
13777 /* Only optimize constant conditions when the selected branch
13778 has the same type as the COND_EXPR. This avoids optimizing
13779 away "c ? x : throw", where the throw has a void type.
13780 Avoid throwing away that operand which contains label. */
13781 if ((!TREE_SIDE_EFFECTS (unused_op)
13782 || !contains_label_p (unused_op))
13783 && (! VOID_TYPE_P (TREE_TYPE (tem))
13784 || VOID_TYPE_P (type)))
13785 return pedantic_non_lvalue_loc (loc, tem);
13786 return NULL_TREE;
13788 if (operand_equal_p (arg1, op2, 0))
13789 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13791 /* If we have A op B ? A : C, we may be able to convert this to a
13792 simpler expression, depending on the operation and the values
13793 of B and C. Signed zeros prevent all of these transformations,
13794 for reasons given above each one.
13796 Also try swapping the arguments and inverting the conditional. */
13797 if (COMPARISON_CLASS_P (arg0)
13798 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13799 arg1, TREE_OPERAND (arg0, 1))
13800 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13802 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13803 if (tem)
13804 return tem;
13807 if (COMPARISON_CLASS_P (arg0)
13808 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13809 op2,
13810 TREE_OPERAND (arg0, 1))
13811 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13813 tem = fold_truth_not_expr (loc, arg0);
13814 if (tem && COMPARISON_CLASS_P (tem))
13816 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13817 if (tem)
13818 return tem;
13822 /* If the second operand is simpler than the third, swap them
13823 since that produces better jump optimization results. */
13824 if (truth_value_p (TREE_CODE (arg0))
13825 && tree_swap_operands_p (op1, op2, false))
13827 /* See if this can be inverted. If it can't, possibly because
13828 it was a floating-point inequality comparison, don't do
13829 anything. */
13830 tem = fold_truth_not_expr (loc, arg0);
13831 if (tem)
13832 return fold_build3_loc (loc, code, type, tem, op2, op1);
13835 /* Convert A ? 1 : 0 to simply A. */
13836 if (integer_onep (op1)
13837 && integer_zerop (op2)
13838 /* If we try to convert OP0 to our type, the
13839 call to fold will try to move the conversion inside
13840 a COND, which will recurse. In that case, the COND_EXPR
13841 is probably the best choice, so leave it alone. */
13842 && type == TREE_TYPE (arg0))
13843 return pedantic_non_lvalue_loc (loc, arg0);
13845 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13846 over COND_EXPR in cases such as floating point comparisons. */
13847 if (integer_zerop (op1)
13848 && integer_onep (op2)
13849 && truth_value_p (TREE_CODE (arg0)))
13850 return pedantic_non_lvalue_loc (loc,
13851 fold_convert_loc (loc, type,
13852 invert_truthvalue_loc (loc,
13853 arg0)));
13855 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13856 if (TREE_CODE (arg0) == LT_EXPR
13857 && integer_zerop (TREE_OPERAND (arg0, 1))
13858 && integer_zerop (op2)
13859 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13861 /* sign_bit_p only checks ARG1 bits within A's precision.
13862 If <sign bit of A> has wider type than A, bits outside
13863 of A's precision in <sign bit of A> need to be checked.
13864 If they are all 0, this optimization needs to be done
13865 in unsigned A's type, if they are all 1 in signed A's type,
13866 otherwise this can't be done. */
13867 if (TYPE_PRECISION (TREE_TYPE (tem))
13868 < TYPE_PRECISION (TREE_TYPE (arg1))
13869 && TYPE_PRECISION (TREE_TYPE (tem))
13870 < TYPE_PRECISION (type))
13872 unsigned HOST_WIDE_INT mask_lo;
13873 HOST_WIDE_INT mask_hi;
13874 int inner_width, outer_width;
13875 tree tem_type;
13877 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13878 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13879 if (outer_width > TYPE_PRECISION (type))
13880 outer_width = TYPE_PRECISION (type);
13882 if (outer_width > HOST_BITS_PER_WIDE_INT)
13884 mask_hi = ((unsigned HOST_WIDE_INT) -1
13885 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13886 mask_lo = -1;
13888 else
13890 mask_hi = 0;
13891 mask_lo = ((unsigned HOST_WIDE_INT) -1
13892 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13894 if (inner_width > HOST_BITS_PER_WIDE_INT)
13896 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13897 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13898 mask_lo = 0;
13900 else
13901 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13902 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13904 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13905 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13907 tem_type = signed_type_for (TREE_TYPE (tem));
13908 tem = fold_convert_loc (loc, tem_type, tem);
13910 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13911 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13913 tem_type = unsigned_type_for (TREE_TYPE (tem));
13914 tem = fold_convert_loc (loc, tem_type, tem);
13916 else
13917 tem = NULL;
13920 if (tem)
13921 return
13922 fold_convert_loc (loc, type,
13923 fold_build2_loc (loc, BIT_AND_EXPR,
13924 TREE_TYPE (tem), tem,
13925 fold_convert_loc (loc,
13926 TREE_TYPE (tem),
13927 arg1)));
13930 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13931 already handled above. */
13932 if (TREE_CODE (arg0) == BIT_AND_EXPR
13933 && integer_onep (TREE_OPERAND (arg0, 1))
13934 && integer_zerop (op2)
13935 && integer_pow2p (arg1))
13937 tree tem = TREE_OPERAND (arg0, 0);
13938 STRIP_NOPS (tem);
13939 if (TREE_CODE (tem) == RSHIFT_EXPR
13940 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13941 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13942 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13943 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13944 TREE_OPERAND (tem, 0), arg1);
13947 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13948 is probably obsolete because the first operand should be a
13949 truth value (that's why we have the two cases above), but let's
13950 leave it in until we can confirm this for all front-ends. */
13951 if (integer_zerop (op2)
13952 && TREE_CODE (arg0) == NE_EXPR
13953 && integer_zerop (TREE_OPERAND (arg0, 1))
13954 && integer_pow2p (arg1)
13955 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13956 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13957 arg1, OEP_ONLY_CONST))
13958 return pedantic_non_lvalue_loc (loc,
13959 fold_convert_loc (loc, type,
13960 TREE_OPERAND (arg0, 0)));
13962 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13963 if (integer_zerop (op2)
13964 && truth_value_p (TREE_CODE (arg0))
13965 && truth_value_p (TREE_CODE (arg1)))
13966 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13967 fold_convert_loc (loc, type, arg0),
13968 arg1);
13970 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13971 if (integer_onep (op2)
13972 && truth_value_p (TREE_CODE (arg0))
13973 && truth_value_p (TREE_CODE (arg1)))
13975 /* Only perform transformation if ARG0 is easily inverted. */
13976 tem = fold_truth_not_expr (loc, arg0);
13977 if (tem)
13978 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13979 fold_convert_loc (loc, type, tem),
13980 arg1);
13983 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13984 if (integer_zerop (arg1)
13985 && truth_value_p (TREE_CODE (arg0))
13986 && truth_value_p (TREE_CODE (op2)))
13988 /* Only perform transformation if ARG0 is easily inverted. */
13989 tem = fold_truth_not_expr (loc, arg0);
13990 if (tem)
13991 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13992 fold_convert_loc (loc, type, tem),
13993 op2);
13996 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13997 if (integer_onep (arg1)
13998 && truth_value_p (TREE_CODE (arg0))
13999 && truth_value_p (TREE_CODE (op2)))
14000 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14001 fold_convert_loc (loc, type, arg0),
14002 op2);
14004 return NULL_TREE;
14006 case CALL_EXPR:
14007 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14008 of fold_ternary on them. */
14009 gcc_unreachable ();
14011 case BIT_FIELD_REF:
14012 if ((TREE_CODE (arg0) == VECTOR_CST
14013 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
14014 && type == TREE_TYPE (TREE_TYPE (arg0)))
14016 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
14017 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14019 if (width != 0
14020 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
14021 && (idx % width) == 0
14022 && (idx = idx / width)
14023 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14025 tree elements = NULL_TREE;
14027 if (TREE_CODE (arg0) == VECTOR_CST)
14028 elements = TREE_VECTOR_CST_ELTS (arg0);
14029 else
14031 unsigned HOST_WIDE_INT idx;
14032 tree value;
14034 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
14035 elements = tree_cons (NULL_TREE, value, elements);
14037 while (idx-- > 0 && elements)
14038 elements = TREE_CHAIN (elements);
14039 if (elements)
14040 return TREE_VALUE (elements);
14041 else
14042 return fold_convert_loc (loc, type, integer_zero_node);
14046 /* A bit-field-ref that referenced the full argument can be stripped. */
14047 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14048 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14049 && integer_zerop (op2))
14050 return fold_convert_loc (loc, type, arg0);
14052 return NULL_TREE;
14054 default:
14055 return NULL_TREE;
14056 } /* switch (code) */
14059 /* Perform constant folding and related simplification of EXPR.
14060 The related simplifications include x*1 => x, x*0 => 0, etc.,
14061 and application of the associative law.
14062 NOP_EXPR conversions may be removed freely (as long as we
14063 are careful not to change the type of the overall expression).
14064 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14065 but we can constant-fold them if they have constant operands. */
14067 #ifdef ENABLE_FOLD_CHECKING
14068 # define fold(x) fold_1 (x)
14069 static tree fold_1 (tree);
14070 static
14071 #endif
14072 tree
14073 fold (tree expr)
14075 const tree t = expr;
14076 enum tree_code code = TREE_CODE (t);
14077 enum tree_code_class kind = TREE_CODE_CLASS (code);
14078 tree tem;
14079 location_t loc = EXPR_LOCATION (expr);
14081 /* Return right away if a constant. */
14082 if (kind == tcc_constant)
14083 return t;
14085 /* CALL_EXPR-like objects with variable numbers of operands are
14086 treated specially. */
14087 if (kind == tcc_vl_exp)
14089 if (code == CALL_EXPR)
14091 tem = fold_call_expr (loc, expr, false);
14092 return tem ? tem : expr;
14094 return expr;
14097 if (IS_EXPR_CODE_CLASS (kind))
14099 tree type = TREE_TYPE (t);
14100 tree op0, op1, op2;
14102 switch (TREE_CODE_LENGTH (code))
14104 case 1:
14105 op0 = TREE_OPERAND (t, 0);
14106 tem = fold_unary_loc (loc, code, type, op0);
14107 return tem ? tem : expr;
14108 case 2:
14109 op0 = TREE_OPERAND (t, 0);
14110 op1 = TREE_OPERAND (t, 1);
14111 tem = fold_binary_loc (loc, code, type, op0, op1);
14112 return tem ? tem : expr;
14113 case 3:
14114 op0 = TREE_OPERAND (t, 0);
14115 op1 = TREE_OPERAND (t, 1);
14116 op2 = TREE_OPERAND (t, 2);
14117 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14118 return tem ? tem : expr;
14119 default:
14120 break;
14124 switch (code)
14126 case ARRAY_REF:
14128 tree op0 = TREE_OPERAND (t, 0);
14129 tree op1 = TREE_OPERAND (t, 1);
14131 if (TREE_CODE (op1) == INTEGER_CST
14132 && TREE_CODE (op0) == CONSTRUCTOR
14133 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14135 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
14136 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
14137 unsigned HOST_WIDE_INT begin = 0;
14139 /* Find a matching index by means of a binary search. */
14140 while (begin != end)
14142 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14143 tree index = VEC_index (constructor_elt, elts, middle)->index;
14145 if (TREE_CODE (index) == INTEGER_CST
14146 && tree_int_cst_lt (index, op1))
14147 begin = middle + 1;
14148 else if (TREE_CODE (index) == INTEGER_CST
14149 && tree_int_cst_lt (op1, index))
14150 end = middle;
14151 else if (TREE_CODE (index) == RANGE_EXPR
14152 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14153 begin = middle + 1;
14154 else if (TREE_CODE (index) == RANGE_EXPR
14155 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14156 end = middle;
14157 else
14158 return VEC_index (constructor_elt, elts, middle)->value;
14162 return t;
14165 case CONST_DECL:
14166 return fold (DECL_INITIAL (t));
14168 default:
14169 return t;
14170 } /* switch (code) */
14173 #ifdef ENABLE_FOLD_CHECKING
14174 #undef fold
14176 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
14177 static void fold_check_failed (const_tree, const_tree);
14178 void print_fold_checksum (const_tree);
14180 /* When --enable-checking=fold, compute a digest of expr before
14181 and after actual fold call to see if fold did not accidentally
14182 change original expr. */
14184 tree
14185 fold (tree expr)
14187 tree ret;
14188 struct md5_ctx ctx;
14189 unsigned char checksum_before[16], checksum_after[16];
14190 htab_t ht;
14192 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14193 md5_init_ctx (&ctx);
14194 fold_checksum_tree (expr, &ctx, ht);
14195 md5_finish_ctx (&ctx, checksum_before);
14196 htab_empty (ht);
14198 ret = fold_1 (expr);
14200 md5_init_ctx (&ctx);
14201 fold_checksum_tree (expr, &ctx, ht);
14202 md5_finish_ctx (&ctx, checksum_after);
14203 htab_delete (ht);
14205 if (memcmp (checksum_before, checksum_after, 16))
14206 fold_check_failed (expr, ret);
14208 return ret;
14211 void
14212 print_fold_checksum (const_tree expr)
14214 struct md5_ctx ctx;
14215 unsigned char checksum[16], cnt;
14216 htab_t ht;
14218 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14219 md5_init_ctx (&ctx);
14220 fold_checksum_tree (expr, &ctx, ht);
14221 md5_finish_ctx (&ctx, checksum);
14222 htab_delete (ht);
14223 for (cnt = 0; cnt < 16; ++cnt)
14224 fprintf (stderr, "%02x", checksum[cnt]);
14225 putc ('\n', stderr);
14228 static void
14229 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14231 internal_error ("fold check: original tree changed by fold");
14234 static void
14235 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
14237 const void **slot;
14238 enum tree_code code;
14239 union tree_node buf;
14240 int i, len;
14242 recursive_label:
14244 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
14245 <= sizeof (struct tree_function_decl))
14246 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
14247 if (expr == NULL)
14248 return;
14249 slot = (const void **) htab_find_slot (ht, expr, INSERT);
14250 if (*slot != NULL)
14251 return;
14252 *slot = expr;
14253 code = TREE_CODE (expr);
14254 if (TREE_CODE_CLASS (code) == tcc_declaration
14255 && DECL_ASSEMBLER_NAME_SET_P (expr))
14257 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14258 memcpy ((char *) &buf, expr, tree_size (expr));
14259 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14260 expr = (tree) &buf;
14262 else if (TREE_CODE_CLASS (code) == tcc_type
14263 && (TYPE_POINTER_TO (expr)
14264 || TYPE_REFERENCE_TO (expr)
14265 || TYPE_CACHED_VALUES_P (expr)
14266 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14267 || TYPE_NEXT_VARIANT (expr)))
14269 /* Allow these fields to be modified. */
14270 tree tmp;
14271 memcpy ((char *) &buf, expr, tree_size (expr));
14272 expr = tmp = (tree) &buf;
14273 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14274 TYPE_POINTER_TO (tmp) = NULL;
14275 TYPE_REFERENCE_TO (tmp) = NULL;
14276 TYPE_NEXT_VARIANT (tmp) = NULL;
14277 if (TYPE_CACHED_VALUES_P (tmp))
14279 TYPE_CACHED_VALUES_P (tmp) = 0;
14280 TYPE_CACHED_VALUES (tmp) = NULL;
14283 md5_process_bytes (expr, tree_size (expr), ctx);
14284 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14285 if (TREE_CODE_CLASS (code) != tcc_type
14286 && TREE_CODE_CLASS (code) != tcc_declaration
14287 && code != TREE_LIST
14288 && code != SSA_NAME)
14289 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14290 switch (TREE_CODE_CLASS (code))
14292 case tcc_constant:
14293 switch (code)
14295 case STRING_CST:
14296 md5_process_bytes (TREE_STRING_POINTER (expr),
14297 TREE_STRING_LENGTH (expr), ctx);
14298 break;
14299 case COMPLEX_CST:
14300 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14301 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14302 break;
14303 case VECTOR_CST:
14304 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
14305 break;
14306 default:
14307 break;
14309 break;
14310 case tcc_exceptional:
14311 switch (code)
14313 case TREE_LIST:
14314 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14315 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14316 expr = TREE_CHAIN (expr);
14317 goto recursive_label;
14318 break;
14319 case TREE_VEC:
14320 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14321 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14322 break;
14323 default:
14324 break;
14326 break;
14327 case tcc_expression:
14328 case tcc_reference:
14329 case tcc_comparison:
14330 case tcc_unary:
14331 case tcc_binary:
14332 case tcc_statement:
14333 case tcc_vl_exp:
14334 len = TREE_OPERAND_LENGTH (expr);
14335 for (i = 0; i < len; ++i)
14336 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14337 break;
14338 case tcc_declaration:
14339 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14340 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14341 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14343 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14344 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14345 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14346 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14347 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14349 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14350 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14352 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14354 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14355 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14356 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14358 break;
14359 case tcc_type:
14360 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14361 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14362 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14363 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14364 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14365 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14366 if (INTEGRAL_TYPE_P (expr)
14367 || SCALAR_FLOAT_TYPE_P (expr))
14369 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14370 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14372 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14373 if (TREE_CODE (expr) == RECORD_TYPE
14374 || TREE_CODE (expr) == UNION_TYPE
14375 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14376 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14377 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14378 break;
14379 default:
14380 break;
14384 /* Helper function for outputting the checksum of a tree T. When
14385 debugging with gdb, you can "define mynext" to be "next" followed
14386 by "call debug_fold_checksum (op0)", then just trace down till the
14387 outputs differ. */
14389 void
14390 debug_fold_checksum (const_tree t)
14392 int i;
14393 unsigned char checksum[16];
14394 struct md5_ctx ctx;
14395 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14397 md5_init_ctx (&ctx);
14398 fold_checksum_tree (t, &ctx, ht);
14399 md5_finish_ctx (&ctx, checksum);
14400 htab_empty (ht);
14402 for (i = 0; i < 16; i++)
14403 fprintf (stderr, "%d ", checksum[i]);
14405 fprintf (stderr, "\n");
14408 #endif
14410 /* Fold a unary tree expression with code CODE of type TYPE with an
14411 operand OP0. LOC is the location of the resulting expression.
14412 Return a folded expression if successful. Otherwise, return a tree
14413 expression with code CODE of type TYPE with an operand OP0. */
14415 tree
14416 fold_build1_stat_loc (location_t loc,
14417 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14419 tree tem;
14420 #ifdef ENABLE_FOLD_CHECKING
14421 unsigned char checksum_before[16], checksum_after[16];
14422 struct md5_ctx ctx;
14423 htab_t ht;
14425 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14426 md5_init_ctx (&ctx);
14427 fold_checksum_tree (op0, &ctx, ht);
14428 md5_finish_ctx (&ctx, checksum_before);
14429 htab_empty (ht);
14430 #endif
14432 tem = fold_unary_loc (loc, code, type, op0);
14433 if (!tem)
14435 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
14436 SET_EXPR_LOCATION (tem, loc);
14439 #ifdef ENABLE_FOLD_CHECKING
14440 md5_init_ctx (&ctx);
14441 fold_checksum_tree (op0, &ctx, ht);
14442 md5_finish_ctx (&ctx, checksum_after);
14443 htab_delete (ht);
14445 if (memcmp (checksum_before, checksum_after, 16))
14446 fold_check_failed (op0, tem);
14447 #endif
14448 return tem;
14451 /* Fold a binary tree expression with code CODE of type TYPE with
14452 operands OP0 and OP1. LOC is the location of the resulting
14453 expression. Return a folded expression if successful. Otherwise,
14454 return a tree expression with code CODE of type TYPE with operands
14455 OP0 and OP1. */
14457 tree
14458 fold_build2_stat_loc (location_t loc,
14459 enum tree_code code, tree type, tree op0, tree op1
14460 MEM_STAT_DECL)
14462 tree tem;
14463 #ifdef ENABLE_FOLD_CHECKING
14464 unsigned char checksum_before_op0[16],
14465 checksum_before_op1[16],
14466 checksum_after_op0[16],
14467 checksum_after_op1[16];
14468 struct md5_ctx ctx;
14469 htab_t ht;
14471 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14472 md5_init_ctx (&ctx);
14473 fold_checksum_tree (op0, &ctx, ht);
14474 md5_finish_ctx (&ctx, checksum_before_op0);
14475 htab_empty (ht);
14477 md5_init_ctx (&ctx);
14478 fold_checksum_tree (op1, &ctx, ht);
14479 md5_finish_ctx (&ctx, checksum_before_op1);
14480 htab_empty (ht);
14481 #endif
14483 tem = fold_binary_loc (loc, code, type, op0, op1);
14484 if (!tem)
14486 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
14487 SET_EXPR_LOCATION (tem, loc);
14490 #ifdef ENABLE_FOLD_CHECKING
14491 md5_init_ctx (&ctx);
14492 fold_checksum_tree (op0, &ctx, ht);
14493 md5_finish_ctx (&ctx, checksum_after_op0);
14494 htab_empty (ht);
14496 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14497 fold_check_failed (op0, tem);
14499 md5_init_ctx (&ctx);
14500 fold_checksum_tree (op1, &ctx, ht);
14501 md5_finish_ctx (&ctx, checksum_after_op1);
14502 htab_delete (ht);
14504 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14505 fold_check_failed (op1, tem);
14506 #endif
14507 return tem;
14510 /* Fold a ternary tree expression with code CODE of type TYPE with
14511 operands OP0, OP1, and OP2. Return a folded expression if
14512 successful. Otherwise, return a tree expression with code CODE of
14513 type TYPE with operands OP0, OP1, and OP2. */
14515 tree
14516 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14517 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14519 tree tem;
14520 #ifdef ENABLE_FOLD_CHECKING
14521 unsigned char checksum_before_op0[16],
14522 checksum_before_op1[16],
14523 checksum_before_op2[16],
14524 checksum_after_op0[16],
14525 checksum_after_op1[16],
14526 checksum_after_op2[16];
14527 struct md5_ctx ctx;
14528 htab_t ht;
14530 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14531 md5_init_ctx (&ctx);
14532 fold_checksum_tree (op0, &ctx, ht);
14533 md5_finish_ctx (&ctx, checksum_before_op0);
14534 htab_empty (ht);
14536 md5_init_ctx (&ctx);
14537 fold_checksum_tree (op1, &ctx, ht);
14538 md5_finish_ctx (&ctx, checksum_before_op1);
14539 htab_empty (ht);
14541 md5_init_ctx (&ctx);
14542 fold_checksum_tree (op2, &ctx, ht);
14543 md5_finish_ctx (&ctx, checksum_before_op2);
14544 htab_empty (ht);
14545 #endif
14547 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14548 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14549 if (!tem)
14551 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14552 SET_EXPR_LOCATION (tem, loc);
14555 #ifdef ENABLE_FOLD_CHECKING
14556 md5_init_ctx (&ctx);
14557 fold_checksum_tree (op0, &ctx, ht);
14558 md5_finish_ctx (&ctx, checksum_after_op0);
14559 htab_empty (ht);
14561 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14562 fold_check_failed (op0, tem);
14564 md5_init_ctx (&ctx);
14565 fold_checksum_tree (op1, &ctx, ht);
14566 md5_finish_ctx (&ctx, checksum_after_op1);
14567 htab_empty (ht);
14569 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14570 fold_check_failed (op1, tem);
14572 md5_init_ctx (&ctx);
14573 fold_checksum_tree (op2, &ctx, ht);
14574 md5_finish_ctx (&ctx, checksum_after_op2);
14575 htab_delete (ht);
14577 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14578 fold_check_failed (op2, tem);
14579 #endif
14580 return tem;
14583 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14584 arguments in ARGARRAY, and a null static chain.
14585 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14586 of type TYPE from the given operands as constructed by build_call_array. */
14588 tree
14589 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14590 int nargs, tree *argarray)
14592 tree tem;
14593 #ifdef ENABLE_FOLD_CHECKING
14594 unsigned char checksum_before_fn[16],
14595 checksum_before_arglist[16],
14596 checksum_after_fn[16],
14597 checksum_after_arglist[16];
14598 struct md5_ctx ctx;
14599 htab_t ht;
14600 int i;
14602 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14603 md5_init_ctx (&ctx);
14604 fold_checksum_tree (fn, &ctx, ht);
14605 md5_finish_ctx (&ctx, checksum_before_fn);
14606 htab_empty (ht);
14608 md5_init_ctx (&ctx);
14609 for (i = 0; i < nargs; i++)
14610 fold_checksum_tree (argarray[i], &ctx, ht);
14611 md5_finish_ctx (&ctx, checksum_before_arglist);
14612 htab_empty (ht);
14613 #endif
14615 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14617 #ifdef ENABLE_FOLD_CHECKING
14618 md5_init_ctx (&ctx);
14619 fold_checksum_tree (fn, &ctx, ht);
14620 md5_finish_ctx (&ctx, checksum_after_fn);
14621 htab_empty (ht);
14623 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14624 fold_check_failed (fn, tem);
14626 md5_init_ctx (&ctx);
14627 for (i = 0; i < nargs; i++)
14628 fold_checksum_tree (argarray[i], &ctx, ht);
14629 md5_finish_ctx (&ctx, checksum_after_arglist);
14630 htab_delete (ht);
14632 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14633 fold_check_failed (NULL_TREE, tem);
14634 #endif
14635 return tem;
14638 /* Perform constant folding and related simplification of initializer
14639 expression EXPR. These behave identically to "fold_buildN" but ignore
14640 potential run-time traps and exceptions that fold must preserve. */
14642 #define START_FOLD_INIT \
14643 int saved_signaling_nans = flag_signaling_nans;\
14644 int saved_trapping_math = flag_trapping_math;\
14645 int saved_rounding_math = flag_rounding_math;\
14646 int saved_trapv = flag_trapv;\
14647 int saved_folding_initializer = folding_initializer;\
14648 flag_signaling_nans = 0;\
14649 flag_trapping_math = 0;\
14650 flag_rounding_math = 0;\
14651 flag_trapv = 0;\
14652 folding_initializer = 1;
14654 #define END_FOLD_INIT \
14655 flag_signaling_nans = saved_signaling_nans;\
14656 flag_trapping_math = saved_trapping_math;\
14657 flag_rounding_math = saved_rounding_math;\
14658 flag_trapv = saved_trapv;\
14659 folding_initializer = saved_folding_initializer;
14661 tree
14662 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14663 tree type, tree op)
14665 tree result;
14666 START_FOLD_INIT;
14668 result = fold_build1_loc (loc, code, type, op);
14670 END_FOLD_INIT;
14671 return result;
14674 tree
14675 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14676 tree type, tree op0, tree op1)
14678 tree result;
14679 START_FOLD_INIT;
14681 result = fold_build2_loc (loc, code, type, op0, op1);
14683 END_FOLD_INIT;
14684 return result;
14687 tree
14688 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14689 tree type, tree op0, tree op1, tree op2)
14691 tree result;
14692 START_FOLD_INIT;
14694 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14696 END_FOLD_INIT;
14697 return result;
14700 tree
14701 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14702 int nargs, tree *argarray)
14704 tree result;
14705 START_FOLD_INIT;
14707 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14709 END_FOLD_INIT;
14710 return result;
14713 #undef START_FOLD_INIT
14714 #undef END_FOLD_INIT
14716 /* Determine if first argument is a multiple of second argument. Return 0 if
14717 it is not, or we cannot easily determined it to be.
14719 An example of the sort of thing we care about (at this point; this routine
14720 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14721 fold cases do now) is discovering that
14723 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14725 is a multiple of
14727 SAVE_EXPR (J * 8)
14729 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14731 This code also handles discovering that
14733 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14735 is a multiple of 8 so we don't have to worry about dealing with a
14736 possible remainder.
14738 Note that we *look* inside a SAVE_EXPR only to determine how it was
14739 calculated; it is not safe for fold to do much of anything else with the
14740 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14741 at run time. For example, the latter example above *cannot* be implemented
14742 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14743 evaluation time of the original SAVE_EXPR is not necessarily the same at
14744 the time the new expression is evaluated. The only optimization of this
14745 sort that would be valid is changing
14747 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14749 divided by 8 to
14751 SAVE_EXPR (I) * SAVE_EXPR (J)
14753 (where the same SAVE_EXPR (J) is used in the original and the
14754 transformed version). */
14757 multiple_of_p (tree type, const_tree top, const_tree bottom)
14759 if (operand_equal_p (top, bottom, 0))
14760 return 1;
14762 if (TREE_CODE (type) != INTEGER_TYPE)
14763 return 0;
14765 switch (TREE_CODE (top))
14767 case BIT_AND_EXPR:
14768 /* Bitwise and provides a power of two multiple. If the mask is
14769 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14770 if (!integer_pow2p (bottom))
14771 return 0;
14772 /* FALLTHRU */
14774 case MULT_EXPR:
14775 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14776 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14778 case PLUS_EXPR:
14779 case MINUS_EXPR:
14780 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14781 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14783 case LSHIFT_EXPR:
14784 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14786 tree op1, t1;
14788 op1 = TREE_OPERAND (top, 1);
14789 /* const_binop may not detect overflow correctly,
14790 so check for it explicitly here. */
14791 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14792 > TREE_INT_CST_LOW (op1)
14793 && TREE_INT_CST_HIGH (op1) == 0
14794 && 0 != (t1 = fold_convert (type,
14795 const_binop (LSHIFT_EXPR,
14796 size_one_node,
14797 op1, 0)))
14798 && !TREE_OVERFLOW (t1))
14799 return multiple_of_p (type, t1, bottom);
14801 return 0;
14803 case NOP_EXPR:
14804 /* Can't handle conversions from non-integral or wider integral type. */
14805 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14806 || (TYPE_PRECISION (type)
14807 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14808 return 0;
14810 /* .. fall through ... */
14812 case SAVE_EXPR:
14813 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14815 case INTEGER_CST:
14816 if (TREE_CODE (bottom) != INTEGER_CST
14817 || integer_zerop (bottom)
14818 || (TYPE_UNSIGNED (type)
14819 && (tree_int_cst_sgn (top) < 0
14820 || tree_int_cst_sgn (bottom) < 0)))
14821 return 0;
14822 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14823 top, bottom, 0));
14825 default:
14826 return 0;
14830 /* Return true if CODE or TYPE is known to be non-negative. */
14832 static bool
14833 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14835 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14836 && truth_value_p (code))
14837 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14838 have a signed:1 type (where the value is -1 and 0). */
14839 return true;
14840 return false;
14843 /* Return true if (CODE OP0) is known to be non-negative. If the return
14844 value is based on the assumption that signed overflow is undefined,
14845 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14846 *STRICT_OVERFLOW_P. */
14848 bool
14849 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14850 bool *strict_overflow_p)
14852 if (TYPE_UNSIGNED (type))
14853 return true;
14855 switch (code)
14857 case ABS_EXPR:
14858 /* We can't return 1 if flag_wrapv is set because
14859 ABS_EXPR<INT_MIN> = INT_MIN. */
14860 if (!INTEGRAL_TYPE_P (type))
14861 return true;
14862 if (TYPE_OVERFLOW_UNDEFINED (type))
14864 *strict_overflow_p = true;
14865 return true;
14867 break;
14869 case NON_LVALUE_EXPR:
14870 case FLOAT_EXPR:
14871 case FIX_TRUNC_EXPR:
14872 return tree_expr_nonnegative_warnv_p (op0,
14873 strict_overflow_p);
14875 case NOP_EXPR:
14877 tree inner_type = TREE_TYPE (op0);
14878 tree outer_type = type;
14880 if (TREE_CODE (outer_type) == REAL_TYPE)
14882 if (TREE_CODE (inner_type) == REAL_TYPE)
14883 return tree_expr_nonnegative_warnv_p (op0,
14884 strict_overflow_p);
14885 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14887 if (TYPE_UNSIGNED (inner_type))
14888 return true;
14889 return tree_expr_nonnegative_warnv_p (op0,
14890 strict_overflow_p);
14893 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14895 if (TREE_CODE (inner_type) == REAL_TYPE)
14896 return tree_expr_nonnegative_warnv_p (op0,
14897 strict_overflow_p);
14898 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14899 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14900 && TYPE_UNSIGNED (inner_type);
14903 break;
14905 default:
14906 return tree_simple_nonnegative_warnv_p (code, type);
14909 /* We don't know sign of `t', so be conservative and return false. */
14910 return false;
14913 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14914 value is based on the assumption that signed overflow is undefined,
14915 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14916 *STRICT_OVERFLOW_P. */
14918 bool
14919 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14920 tree op1, bool *strict_overflow_p)
14922 if (TYPE_UNSIGNED (type))
14923 return true;
14925 switch (code)
14927 case POINTER_PLUS_EXPR:
14928 case PLUS_EXPR:
14929 if (FLOAT_TYPE_P (type))
14930 return (tree_expr_nonnegative_warnv_p (op0,
14931 strict_overflow_p)
14932 && tree_expr_nonnegative_warnv_p (op1,
14933 strict_overflow_p));
14935 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14936 both unsigned and at least 2 bits shorter than the result. */
14937 if (TREE_CODE (type) == INTEGER_TYPE
14938 && TREE_CODE (op0) == NOP_EXPR
14939 && TREE_CODE (op1) == NOP_EXPR)
14941 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14942 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14943 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14944 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14946 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14947 TYPE_PRECISION (inner2)) + 1;
14948 return prec < TYPE_PRECISION (type);
14951 break;
14953 case MULT_EXPR:
14954 if (FLOAT_TYPE_P (type))
14956 /* x * x for floating point x is always non-negative. */
14957 if (operand_equal_p (op0, op1, 0))
14958 return true;
14959 return (tree_expr_nonnegative_warnv_p (op0,
14960 strict_overflow_p)
14961 && tree_expr_nonnegative_warnv_p (op1,
14962 strict_overflow_p));
14965 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14966 both unsigned and their total bits is shorter than the result. */
14967 if (TREE_CODE (type) == INTEGER_TYPE
14968 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14969 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14971 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14972 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14973 : TREE_TYPE (op0);
14974 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14975 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14976 : TREE_TYPE (op1);
14978 bool unsigned0 = TYPE_UNSIGNED (inner0);
14979 bool unsigned1 = TYPE_UNSIGNED (inner1);
14981 if (TREE_CODE (op0) == INTEGER_CST)
14982 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14984 if (TREE_CODE (op1) == INTEGER_CST)
14985 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14987 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14988 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14990 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14991 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14992 : TYPE_PRECISION (inner0);
14994 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14995 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14996 : TYPE_PRECISION (inner1);
14998 return precision0 + precision1 < TYPE_PRECISION (type);
15001 return false;
15003 case BIT_AND_EXPR:
15004 case MAX_EXPR:
15005 return (tree_expr_nonnegative_warnv_p (op0,
15006 strict_overflow_p)
15007 || tree_expr_nonnegative_warnv_p (op1,
15008 strict_overflow_p));
15010 case BIT_IOR_EXPR:
15011 case BIT_XOR_EXPR:
15012 case MIN_EXPR:
15013 case RDIV_EXPR:
15014 case TRUNC_DIV_EXPR:
15015 case CEIL_DIV_EXPR:
15016 case FLOOR_DIV_EXPR:
15017 case ROUND_DIV_EXPR:
15018 return (tree_expr_nonnegative_warnv_p (op0,
15019 strict_overflow_p)
15020 && tree_expr_nonnegative_warnv_p (op1,
15021 strict_overflow_p));
15023 case TRUNC_MOD_EXPR:
15024 case CEIL_MOD_EXPR:
15025 case FLOOR_MOD_EXPR:
15026 case ROUND_MOD_EXPR:
15027 return tree_expr_nonnegative_warnv_p (op0,
15028 strict_overflow_p);
15029 default:
15030 return tree_simple_nonnegative_warnv_p (code, type);
15033 /* We don't know sign of `t', so be conservative and return false. */
15034 return false;
15037 /* Return true if T is known to be non-negative. If the return
15038 value is based on the assumption that signed overflow is undefined,
15039 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15040 *STRICT_OVERFLOW_P. */
15042 bool
15043 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15045 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15046 return true;
15048 switch (TREE_CODE (t))
15050 case INTEGER_CST:
15051 return tree_int_cst_sgn (t) >= 0;
15053 case REAL_CST:
15054 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15056 case FIXED_CST:
15057 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15059 case COND_EXPR:
15060 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15061 strict_overflow_p)
15062 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15063 strict_overflow_p));
15064 default:
15065 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15066 TREE_TYPE (t));
15068 /* We don't know sign of `t', so be conservative and return false. */
15069 return false;
15072 /* Return true if T is known to be non-negative. If the return
15073 value is based on the assumption that signed overflow is undefined,
15074 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15075 *STRICT_OVERFLOW_P. */
15077 bool
15078 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15079 tree arg0, tree arg1, bool *strict_overflow_p)
15081 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15082 switch (DECL_FUNCTION_CODE (fndecl))
15084 CASE_FLT_FN (BUILT_IN_ACOS):
15085 CASE_FLT_FN (BUILT_IN_ACOSH):
15086 CASE_FLT_FN (BUILT_IN_CABS):
15087 CASE_FLT_FN (BUILT_IN_COSH):
15088 CASE_FLT_FN (BUILT_IN_ERFC):
15089 CASE_FLT_FN (BUILT_IN_EXP):
15090 CASE_FLT_FN (BUILT_IN_EXP10):
15091 CASE_FLT_FN (BUILT_IN_EXP2):
15092 CASE_FLT_FN (BUILT_IN_FABS):
15093 CASE_FLT_FN (BUILT_IN_FDIM):
15094 CASE_FLT_FN (BUILT_IN_HYPOT):
15095 CASE_FLT_FN (BUILT_IN_POW10):
15096 CASE_INT_FN (BUILT_IN_FFS):
15097 CASE_INT_FN (BUILT_IN_PARITY):
15098 CASE_INT_FN (BUILT_IN_POPCOUNT):
15099 case BUILT_IN_BSWAP32:
15100 case BUILT_IN_BSWAP64:
15101 /* Always true. */
15102 return true;
15104 CASE_FLT_FN (BUILT_IN_SQRT):
15105 /* sqrt(-0.0) is -0.0. */
15106 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15107 return true;
15108 return tree_expr_nonnegative_warnv_p (arg0,
15109 strict_overflow_p);
15111 CASE_FLT_FN (BUILT_IN_ASINH):
15112 CASE_FLT_FN (BUILT_IN_ATAN):
15113 CASE_FLT_FN (BUILT_IN_ATANH):
15114 CASE_FLT_FN (BUILT_IN_CBRT):
15115 CASE_FLT_FN (BUILT_IN_CEIL):
15116 CASE_FLT_FN (BUILT_IN_ERF):
15117 CASE_FLT_FN (BUILT_IN_EXPM1):
15118 CASE_FLT_FN (BUILT_IN_FLOOR):
15119 CASE_FLT_FN (BUILT_IN_FMOD):
15120 CASE_FLT_FN (BUILT_IN_FREXP):
15121 CASE_FLT_FN (BUILT_IN_LCEIL):
15122 CASE_FLT_FN (BUILT_IN_LDEXP):
15123 CASE_FLT_FN (BUILT_IN_LFLOOR):
15124 CASE_FLT_FN (BUILT_IN_LLCEIL):
15125 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15126 CASE_FLT_FN (BUILT_IN_LLRINT):
15127 CASE_FLT_FN (BUILT_IN_LLROUND):
15128 CASE_FLT_FN (BUILT_IN_LRINT):
15129 CASE_FLT_FN (BUILT_IN_LROUND):
15130 CASE_FLT_FN (BUILT_IN_MODF):
15131 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15132 CASE_FLT_FN (BUILT_IN_RINT):
15133 CASE_FLT_FN (BUILT_IN_ROUND):
15134 CASE_FLT_FN (BUILT_IN_SCALB):
15135 CASE_FLT_FN (BUILT_IN_SCALBLN):
15136 CASE_FLT_FN (BUILT_IN_SCALBN):
15137 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15138 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15139 CASE_FLT_FN (BUILT_IN_SINH):
15140 CASE_FLT_FN (BUILT_IN_TANH):
15141 CASE_FLT_FN (BUILT_IN_TRUNC):
15142 /* True if the 1st argument is nonnegative. */
15143 return tree_expr_nonnegative_warnv_p (arg0,
15144 strict_overflow_p);
15146 CASE_FLT_FN (BUILT_IN_FMAX):
15147 /* True if the 1st OR 2nd arguments are nonnegative. */
15148 return (tree_expr_nonnegative_warnv_p (arg0,
15149 strict_overflow_p)
15150 || (tree_expr_nonnegative_warnv_p (arg1,
15151 strict_overflow_p)));
15153 CASE_FLT_FN (BUILT_IN_FMIN):
15154 /* True if the 1st AND 2nd arguments are nonnegative. */
15155 return (tree_expr_nonnegative_warnv_p (arg0,
15156 strict_overflow_p)
15157 && (tree_expr_nonnegative_warnv_p (arg1,
15158 strict_overflow_p)));
15160 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15161 /* True if the 2nd argument is nonnegative. */
15162 return tree_expr_nonnegative_warnv_p (arg1,
15163 strict_overflow_p);
15165 CASE_FLT_FN (BUILT_IN_POWI):
15166 /* True if the 1st argument is nonnegative or the second
15167 argument is an even integer. */
15168 if (TREE_CODE (arg1) == INTEGER_CST
15169 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15170 return true;
15171 return tree_expr_nonnegative_warnv_p (arg0,
15172 strict_overflow_p);
15174 CASE_FLT_FN (BUILT_IN_POW):
15175 /* True if the 1st argument is nonnegative or the second
15176 argument is an even integer valued real. */
15177 if (TREE_CODE (arg1) == REAL_CST)
15179 REAL_VALUE_TYPE c;
15180 HOST_WIDE_INT n;
15182 c = TREE_REAL_CST (arg1);
15183 n = real_to_integer (&c);
15184 if ((n & 1) == 0)
15186 REAL_VALUE_TYPE cint;
15187 real_from_integer (&cint, VOIDmode, n,
15188 n < 0 ? -1 : 0, 0);
15189 if (real_identical (&c, &cint))
15190 return true;
15193 return tree_expr_nonnegative_warnv_p (arg0,
15194 strict_overflow_p);
15196 default:
15197 break;
15199 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15200 type);
15203 /* Return true if T is known to be non-negative. If the return
15204 value is based on the assumption that signed overflow is undefined,
15205 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15206 *STRICT_OVERFLOW_P. */
15208 bool
15209 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15211 enum tree_code code = TREE_CODE (t);
15212 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15213 return true;
15215 switch (code)
15217 case TARGET_EXPR:
15219 tree temp = TARGET_EXPR_SLOT (t);
15220 t = TARGET_EXPR_INITIAL (t);
15222 /* If the initializer is non-void, then it's a normal expression
15223 that will be assigned to the slot. */
15224 if (!VOID_TYPE_P (t))
15225 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15227 /* Otherwise, the initializer sets the slot in some way. One common
15228 way is an assignment statement at the end of the initializer. */
15229 while (1)
15231 if (TREE_CODE (t) == BIND_EXPR)
15232 t = expr_last (BIND_EXPR_BODY (t));
15233 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15234 || TREE_CODE (t) == TRY_CATCH_EXPR)
15235 t = expr_last (TREE_OPERAND (t, 0));
15236 else if (TREE_CODE (t) == STATEMENT_LIST)
15237 t = expr_last (t);
15238 else
15239 break;
15241 if (TREE_CODE (t) == MODIFY_EXPR
15242 && TREE_OPERAND (t, 0) == temp)
15243 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15244 strict_overflow_p);
15246 return false;
15249 case CALL_EXPR:
15251 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15252 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15254 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15255 get_callee_fndecl (t),
15256 arg0,
15257 arg1,
15258 strict_overflow_p);
15260 case COMPOUND_EXPR:
15261 case MODIFY_EXPR:
15262 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15263 strict_overflow_p);
15264 case BIND_EXPR:
15265 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15266 strict_overflow_p);
15267 case SAVE_EXPR:
15268 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15269 strict_overflow_p);
15271 default:
15272 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15273 TREE_TYPE (t));
15276 /* We don't know sign of `t', so be conservative and return false. */
15277 return false;
15280 /* Return true if T is known to be non-negative. If the return
15281 value is based on the assumption that signed overflow is undefined,
15282 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15283 *STRICT_OVERFLOW_P. */
15285 bool
15286 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15288 enum tree_code code;
15289 if (t == error_mark_node)
15290 return false;
15292 code = TREE_CODE (t);
15293 switch (TREE_CODE_CLASS (code))
15295 case tcc_binary:
15296 case tcc_comparison:
15297 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15298 TREE_TYPE (t),
15299 TREE_OPERAND (t, 0),
15300 TREE_OPERAND (t, 1),
15301 strict_overflow_p);
15303 case tcc_unary:
15304 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15305 TREE_TYPE (t),
15306 TREE_OPERAND (t, 0),
15307 strict_overflow_p);
15309 case tcc_constant:
15310 case tcc_declaration:
15311 case tcc_reference:
15312 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15314 default:
15315 break;
15318 switch (code)
15320 case TRUTH_AND_EXPR:
15321 case TRUTH_OR_EXPR:
15322 case TRUTH_XOR_EXPR:
15323 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15324 TREE_TYPE (t),
15325 TREE_OPERAND (t, 0),
15326 TREE_OPERAND (t, 1),
15327 strict_overflow_p);
15328 case TRUTH_NOT_EXPR:
15329 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15330 TREE_TYPE (t),
15331 TREE_OPERAND (t, 0),
15332 strict_overflow_p);
15334 case COND_EXPR:
15335 case CONSTRUCTOR:
15336 case OBJ_TYPE_REF:
15337 case ASSERT_EXPR:
15338 case ADDR_EXPR:
15339 case WITH_SIZE_EXPR:
15340 case SSA_NAME:
15341 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15343 default:
15344 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15348 /* Return true if `t' is known to be non-negative. Handle warnings
15349 about undefined signed overflow. */
15351 bool
15352 tree_expr_nonnegative_p (tree t)
15354 bool ret, strict_overflow_p;
15356 strict_overflow_p = false;
15357 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15358 if (strict_overflow_p)
15359 fold_overflow_warning (("assuming signed overflow does not occur when "
15360 "determining that expression is always "
15361 "non-negative"),
15362 WARN_STRICT_OVERFLOW_MISC);
15363 return ret;
15367 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15368 For floating point we further ensure that T is not denormal.
15369 Similar logic is present in nonzero_address in rtlanal.h.
15371 If the return value is based on the assumption that signed overflow
15372 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15373 change *STRICT_OVERFLOW_P. */
15375 bool
15376 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15377 bool *strict_overflow_p)
15379 switch (code)
15381 case ABS_EXPR:
15382 return tree_expr_nonzero_warnv_p (op0,
15383 strict_overflow_p);
15385 case NOP_EXPR:
15387 tree inner_type = TREE_TYPE (op0);
15388 tree outer_type = type;
15390 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15391 && tree_expr_nonzero_warnv_p (op0,
15392 strict_overflow_p));
15394 break;
15396 case NON_LVALUE_EXPR:
15397 return tree_expr_nonzero_warnv_p (op0,
15398 strict_overflow_p);
15400 default:
15401 break;
15404 return false;
15407 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15408 For floating point we further ensure that T is not denormal.
15409 Similar logic is present in nonzero_address in rtlanal.h.
15411 If the return value is based on the assumption that signed overflow
15412 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15413 change *STRICT_OVERFLOW_P. */
15415 bool
15416 tree_binary_nonzero_warnv_p (enum tree_code code,
15417 tree type,
15418 tree op0,
15419 tree op1, bool *strict_overflow_p)
15421 bool sub_strict_overflow_p;
15422 switch (code)
15424 case POINTER_PLUS_EXPR:
15425 case PLUS_EXPR:
15426 if (TYPE_OVERFLOW_UNDEFINED (type))
15428 /* With the presence of negative values it is hard
15429 to say something. */
15430 sub_strict_overflow_p = false;
15431 if (!tree_expr_nonnegative_warnv_p (op0,
15432 &sub_strict_overflow_p)
15433 || !tree_expr_nonnegative_warnv_p (op1,
15434 &sub_strict_overflow_p))
15435 return false;
15436 /* One of operands must be positive and the other non-negative. */
15437 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15438 overflows, on a twos-complement machine the sum of two
15439 nonnegative numbers can never be zero. */
15440 return (tree_expr_nonzero_warnv_p (op0,
15441 strict_overflow_p)
15442 || tree_expr_nonzero_warnv_p (op1,
15443 strict_overflow_p));
15445 break;
15447 case MULT_EXPR:
15448 if (TYPE_OVERFLOW_UNDEFINED (type))
15450 if (tree_expr_nonzero_warnv_p (op0,
15451 strict_overflow_p)
15452 && tree_expr_nonzero_warnv_p (op1,
15453 strict_overflow_p))
15455 *strict_overflow_p = true;
15456 return true;
15459 break;
15461 case MIN_EXPR:
15462 sub_strict_overflow_p = false;
15463 if (tree_expr_nonzero_warnv_p (op0,
15464 &sub_strict_overflow_p)
15465 && tree_expr_nonzero_warnv_p (op1,
15466 &sub_strict_overflow_p))
15468 if (sub_strict_overflow_p)
15469 *strict_overflow_p = true;
15471 break;
15473 case MAX_EXPR:
15474 sub_strict_overflow_p = false;
15475 if (tree_expr_nonzero_warnv_p (op0,
15476 &sub_strict_overflow_p))
15478 if (sub_strict_overflow_p)
15479 *strict_overflow_p = true;
15481 /* When both operands are nonzero, then MAX must be too. */
15482 if (tree_expr_nonzero_warnv_p (op1,
15483 strict_overflow_p))
15484 return true;
15486 /* MAX where operand 0 is positive is positive. */
15487 return tree_expr_nonnegative_warnv_p (op0,
15488 strict_overflow_p);
15490 /* MAX where operand 1 is positive is positive. */
15491 else if (tree_expr_nonzero_warnv_p (op1,
15492 &sub_strict_overflow_p)
15493 && tree_expr_nonnegative_warnv_p (op1,
15494 &sub_strict_overflow_p))
15496 if (sub_strict_overflow_p)
15497 *strict_overflow_p = true;
15498 return true;
15500 break;
15502 case BIT_IOR_EXPR:
15503 return (tree_expr_nonzero_warnv_p (op1,
15504 strict_overflow_p)
15505 || tree_expr_nonzero_warnv_p (op0,
15506 strict_overflow_p));
15508 default:
15509 break;
15512 return false;
15515 /* Return true when T is an address and is known to be nonzero.
15516 For floating point we further ensure that T is not denormal.
15517 Similar logic is present in nonzero_address in rtlanal.h.
15519 If the return value is based on the assumption that signed overflow
15520 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15521 change *STRICT_OVERFLOW_P. */
15523 bool
15524 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15526 bool sub_strict_overflow_p;
15527 switch (TREE_CODE (t))
15529 case INTEGER_CST:
15530 return !integer_zerop (t);
15532 case ADDR_EXPR:
15534 tree base = get_base_address (TREE_OPERAND (t, 0));
15536 if (!base)
15537 return false;
15539 /* Weak declarations may link to NULL. Other things may also be NULL
15540 so protect with -fdelete-null-pointer-checks; but not variables
15541 allocated on the stack. */
15542 if (DECL_P (base)
15543 && (flag_delete_null_pointer_checks
15544 || (TREE_CODE (base) == VAR_DECL && !TREE_STATIC (base))))
15545 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15547 /* Constants are never weak. */
15548 if (CONSTANT_CLASS_P (base))
15549 return true;
15551 return false;
15554 case COND_EXPR:
15555 sub_strict_overflow_p = false;
15556 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15557 &sub_strict_overflow_p)
15558 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15559 &sub_strict_overflow_p))
15561 if (sub_strict_overflow_p)
15562 *strict_overflow_p = true;
15563 return true;
15565 break;
15567 default:
15568 break;
15570 return false;
15573 /* Return true when T is an address and is known to be nonzero.
15574 For floating point we further ensure that T is not denormal.
15575 Similar logic is present in nonzero_address in rtlanal.h.
15577 If the return value is based on the assumption that signed overflow
15578 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15579 change *STRICT_OVERFLOW_P. */
15581 bool
15582 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15584 tree type = TREE_TYPE (t);
15585 enum tree_code code;
15587 /* Doing something useful for floating point would need more work. */
15588 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15589 return false;
15591 code = TREE_CODE (t);
15592 switch (TREE_CODE_CLASS (code))
15594 case tcc_unary:
15595 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15596 strict_overflow_p);
15597 case tcc_binary:
15598 case tcc_comparison:
15599 return tree_binary_nonzero_warnv_p (code, type,
15600 TREE_OPERAND (t, 0),
15601 TREE_OPERAND (t, 1),
15602 strict_overflow_p);
15603 case tcc_constant:
15604 case tcc_declaration:
15605 case tcc_reference:
15606 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15608 default:
15609 break;
15612 switch (code)
15614 case TRUTH_NOT_EXPR:
15615 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15616 strict_overflow_p);
15618 case TRUTH_AND_EXPR:
15619 case TRUTH_OR_EXPR:
15620 case TRUTH_XOR_EXPR:
15621 return tree_binary_nonzero_warnv_p (code, type,
15622 TREE_OPERAND (t, 0),
15623 TREE_OPERAND (t, 1),
15624 strict_overflow_p);
15626 case COND_EXPR:
15627 case CONSTRUCTOR:
15628 case OBJ_TYPE_REF:
15629 case ASSERT_EXPR:
15630 case ADDR_EXPR:
15631 case WITH_SIZE_EXPR:
15632 case SSA_NAME:
15633 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15635 case COMPOUND_EXPR:
15636 case MODIFY_EXPR:
15637 case BIND_EXPR:
15638 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15639 strict_overflow_p);
15641 case SAVE_EXPR:
15642 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15643 strict_overflow_p);
15645 case CALL_EXPR:
15646 return alloca_call_p (t);
15648 default:
15649 break;
15651 return false;
15654 /* Return true when T is an address and is known to be nonzero.
15655 Handle warnings about undefined signed overflow. */
15657 bool
15658 tree_expr_nonzero_p (tree t)
15660 bool ret, strict_overflow_p;
15662 strict_overflow_p = false;
15663 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15664 if (strict_overflow_p)
15665 fold_overflow_warning (("assuming signed overflow does not occur when "
15666 "determining that expression is always "
15667 "non-zero"),
15668 WARN_STRICT_OVERFLOW_MISC);
15669 return ret;
15672 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15673 attempt to fold the expression to a constant without modifying TYPE,
15674 OP0 or OP1.
15676 If the expression could be simplified to a constant, then return
15677 the constant. If the expression would not be simplified to a
15678 constant, then return NULL_TREE. */
15680 tree
15681 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15683 tree tem = fold_binary (code, type, op0, op1);
15684 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15687 /* Given the components of a unary expression CODE, TYPE and OP0,
15688 attempt to fold the expression to a constant without modifying
15689 TYPE or OP0.
15691 If the expression could be simplified to a constant, then return
15692 the constant. If the expression would not be simplified to a
15693 constant, then return NULL_TREE. */
15695 tree
15696 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15698 tree tem = fold_unary (code, type, op0);
15699 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15702 /* If EXP represents referencing an element in a constant string
15703 (either via pointer arithmetic or array indexing), return the
15704 tree representing the value accessed, otherwise return NULL. */
15706 tree
15707 fold_read_from_constant_string (tree exp)
15709 if ((TREE_CODE (exp) == INDIRECT_REF
15710 || TREE_CODE (exp) == ARRAY_REF)
15711 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15713 tree exp1 = TREE_OPERAND (exp, 0);
15714 tree index;
15715 tree string;
15716 location_t loc = EXPR_LOCATION (exp);
15718 if (TREE_CODE (exp) == INDIRECT_REF)
15719 string = string_constant (exp1, &index);
15720 else
15722 tree low_bound = array_ref_low_bound (exp);
15723 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15725 /* Optimize the special-case of a zero lower bound.
15727 We convert the low_bound to sizetype to avoid some problems
15728 with constant folding. (E.g. suppose the lower bound is 1,
15729 and its mode is QI. Without the conversion,l (ARRAY
15730 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15731 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15732 if (! integer_zerop (low_bound))
15733 index = size_diffop_loc (loc, index,
15734 fold_convert_loc (loc, sizetype, low_bound));
15736 string = exp1;
15739 if (string
15740 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15741 && TREE_CODE (string) == STRING_CST
15742 && TREE_CODE (index) == INTEGER_CST
15743 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15744 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15745 == MODE_INT)
15746 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15747 return build_int_cst_type (TREE_TYPE (exp),
15748 (TREE_STRING_POINTER (string)
15749 [TREE_INT_CST_LOW (index)]));
15751 return NULL;
15754 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15755 an integer constant, real, or fixed-point constant.
15757 TYPE is the type of the result. */
15759 static tree
15760 fold_negate_const (tree arg0, tree type)
15762 tree t = NULL_TREE;
15764 switch (TREE_CODE (arg0))
15766 case INTEGER_CST:
15768 unsigned HOST_WIDE_INT low;
15769 HOST_WIDE_INT high;
15770 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15771 TREE_INT_CST_HIGH (arg0),
15772 &low, &high);
15773 t = force_fit_type_double (type, low, high, 1,
15774 (overflow | TREE_OVERFLOW (arg0))
15775 && !TYPE_UNSIGNED (type));
15776 break;
15779 case REAL_CST:
15780 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15781 break;
15783 case FIXED_CST:
15785 FIXED_VALUE_TYPE f;
15786 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15787 &(TREE_FIXED_CST (arg0)), NULL,
15788 TYPE_SATURATING (type));
15789 t = build_fixed (type, f);
15790 /* Propagate overflow flags. */
15791 if (overflow_p | TREE_OVERFLOW (arg0))
15792 TREE_OVERFLOW (t) = 1;
15793 break;
15796 default:
15797 gcc_unreachable ();
15800 return t;
15803 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15804 an integer constant or real constant.
15806 TYPE is the type of the result. */
15808 tree
15809 fold_abs_const (tree arg0, tree type)
15811 tree t = NULL_TREE;
15813 switch (TREE_CODE (arg0))
15815 case INTEGER_CST:
15816 /* If the value is unsigned, then the absolute value is
15817 the same as the ordinary value. */
15818 if (TYPE_UNSIGNED (type))
15819 t = arg0;
15820 /* Similarly, if the value is non-negative. */
15821 else if (INT_CST_LT (integer_minus_one_node, arg0))
15822 t = arg0;
15823 /* If the value is negative, then the absolute value is
15824 its negation. */
15825 else
15827 unsigned HOST_WIDE_INT low;
15828 HOST_WIDE_INT high;
15829 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15830 TREE_INT_CST_HIGH (arg0),
15831 &low, &high);
15832 t = force_fit_type_double (type, low, high, -1,
15833 overflow | TREE_OVERFLOW (arg0));
15835 break;
15837 case REAL_CST:
15838 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15839 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15840 else
15841 t = arg0;
15842 break;
15844 default:
15845 gcc_unreachable ();
15848 return t;
15851 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15852 constant. TYPE is the type of the result. */
15854 static tree
15855 fold_not_const (tree arg0, tree type)
15857 tree t = NULL_TREE;
15859 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15861 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15862 ~TREE_INT_CST_HIGH (arg0), 0,
15863 TREE_OVERFLOW (arg0));
15865 return t;
15868 /* Given CODE, a relational operator, the target type, TYPE and two
15869 constant operands OP0 and OP1, return the result of the
15870 relational operation. If the result is not a compile time
15871 constant, then return NULL_TREE. */
15873 static tree
15874 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15876 int result, invert;
15878 /* From here on, the only cases we handle are when the result is
15879 known to be a constant. */
15881 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15883 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15884 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15886 /* Handle the cases where either operand is a NaN. */
15887 if (real_isnan (c0) || real_isnan (c1))
15889 switch (code)
15891 case EQ_EXPR:
15892 case ORDERED_EXPR:
15893 result = 0;
15894 break;
15896 case NE_EXPR:
15897 case UNORDERED_EXPR:
15898 case UNLT_EXPR:
15899 case UNLE_EXPR:
15900 case UNGT_EXPR:
15901 case UNGE_EXPR:
15902 case UNEQ_EXPR:
15903 result = 1;
15904 break;
15906 case LT_EXPR:
15907 case LE_EXPR:
15908 case GT_EXPR:
15909 case GE_EXPR:
15910 case LTGT_EXPR:
15911 if (flag_trapping_math)
15912 return NULL_TREE;
15913 result = 0;
15914 break;
15916 default:
15917 gcc_unreachable ();
15920 return constant_boolean_node (result, type);
15923 return constant_boolean_node (real_compare (code, c0, c1), type);
15926 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15928 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15929 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15930 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15933 /* Handle equality/inequality of complex constants. */
15934 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15936 tree rcond = fold_relational_const (code, type,
15937 TREE_REALPART (op0),
15938 TREE_REALPART (op1));
15939 tree icond = fold_relational_const (code, type,
15940 TREE_IMAGPART (op0),
15941 TREE_IMAGPART (op1));
15942 if (code == EQ_EXPR)
15943 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15944 else if (code == NE_EXPR)
15945 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15946 else
15947 return NULL_TREE;
15950 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15952 To compute GT, swap the arguments and do LT.
15953 To compute GE, do LT and invert the result.
15954 To compute LE, swap the arguments, do LT and invert the result.
15955 To compute NE, do EQ and invert the result.
15957 Therefore, the code below must handle only EQ and LT. */
15959 if (code == LE_EXPR || code == GT_EXPR)
15961 tree tem = op0;
15962 op0 = op1;
15963 op1 = tem;
15964 code = swap_tree_comparison (code);
15967 /* Note that it is safe to invert for real values here because we
15968 have already handled the one case that it matters. */
15970 invert = 0;
15971 if (code == NE_EXPR || code == GE_EXPR)
15973 invert = 1;
15974 code = invert_tree_comparison (code, false);
15977 /* Compute a result for LT or EQ if args permit;
15978 Otherwise return T. */
15979 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15981 if (code == EQ_EXPR)
15982 result = tree_int_cst_equal (op0, op1);
15983 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15984 result = INT_CST_LT_UNSIGNED (op0, op1);
15985 else
15986 result = INT_CST_LT (op0, op1);
15988 else
15989 return NULL_TREE;
15991 if (invert)
15992 result ^= 1;
15993 return constant_boolean_node (result, type);
15996 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15997 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15998 itself. */
16000 tree
16001 fold_build_cleanup_point_expr (tree type, tree expr)
16003 /* If the expression does not have side effects then we don't have to wrap
16004 it with a cleanup point expression. */
16005 if (!TREE_SIDE_EFFECTS (expr))
16006 return expr;
16008 /* If the expression is a return, check to see if the expression inside the
16009 return has no side effects or the right hand side of the modify expression
16010 inside the return. If either don't have side effects set we don't need to
16011 wrap the expression in a cleanup point expression. Note we don't check the
16012 left hand side of the modify because it should always be a return decl. */
16013 if (TREE_CODE (expr) == RETURN_EXPR)
16015 tree op = TREE_OPERAND (expr, 0);
16016 if (!op || !TREE_SIDE_EFFECTS (op))
16017 return expr;
16018 op = TREE_OPERAND (op, 1);
16019 if (!TREE_SIDE_EFFECTS (op))
16020 return expr;
16023 return build1 (CLEANUP_POINT_EXPR, type, expr);
16026 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16027 of an indirection through OP0, or NULL_TREE if no simplification is
16028 possible. */
16030 tree
16031 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16033 tree sub = op0;
16034 tree subtype;
16036 STRIP_NOPS (sub);
16037 subtype = TREE_TYPE (sub);
16038 if (!POINTER_TYPE_P (subtype))
16039 return NULL_TREE;
16041 if (TREE_CODE (sub) == ADDR_EXPR)
16043 tree op = TREE_OPERAND (sub, 0);
16044 tree optype = TREE_TYPE (op);
16045 /* *&CONST_DECL -> to the value of the const decl. */
16046 if (TREE_CODE (op) == CONST_DECL)
16047 return DECL_INITIAL (op);
16048 /* *&p => p; make sure to handle *&"str"[cst] here. */
16049 if (type == optype)
16051 tree fop = fold_read_from_constant_string (op);
16052 if (fop)
16053 return fop;
16054 else
16055 return op;
16057 /* *(foo *)&fooarray => fooarray[0] */
16058 else if (TREE_CODE (optype) == ARRAY_TYPE
16059 && type == TREE_TYPE (optype))
16061 tree type_domain = TYPE_DOMAIN (optype);
16062 tree min_val = size_zero_node;
16063 if (type_domain && TYPE_MIN_VALUE (type_domain))
16064 min_val = TYPE_MIN_VALUE (type_domain);
16065 op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
16066 SET_EXPR_LOCATION (op0, loc);
16067 return op0;
16069 /* *(foo *)&complexfoo => __real__ complexfoo */
16070 else if (TREE_CODE (optype) == COMPLEX_TYPE
16071 && type == TREE_TYPE (optype))
16072 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16073 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16074 else if (TREE_CODE (optype) == VECTOR_TYPE
16075 && type == TREE_TYPE (optype))
16077 tree part_width = TYPE_SIZE (type);
16078 tree index = bitsize_int (0);
16079 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16083 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16084 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16085 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16087 tree op00 = TREE_OPERAND (sub, 0);
16088 tree op01 = TREE_OPERAND (sub, 1);
16089 tree op00type;
16091 STRIP_NOPS (op00);
16092 op00type = TREE_TYPE (op00);
16093 if (TREE_CODE (op00) == ADDR_EXPR
16094 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
16095 && type == TREE_TYPE (TREE_TYPE (op00type)))
16097 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16098 tree part_width = TYPE_SIZE (type);
16099 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16100 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16101 tree index = bitsize_int (indexi);
16103 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
16104 return fold_build3_loc (loc,
16105 BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
16106 part_width, index);
16112 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16113 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16114 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16116 tree op00 = TREE_OPERAND (sub, 0);
16117 tree op01 = TREE_OPERAND (sub, 1);
16118 tree op00type;
16120 STRIP_NOPS (op00);
16121 op00type = TREE_TYPE (op00);
16122 if (TREE_CODE (op00) == ADDR_EXPR
16123 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
16124 && type == TREE_TYPE (TREE_TYPE (op00type)))
16126 tree size = TYPE_SIZE_UNIT (type);
16127 if (tree_int_cst_equal (size, op01))
16128 return fold_build1_loc (loc, IMAGPART_EXPR, type,
16129 TREE_OPERAND (op00, 0));
16133 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16134 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16135 && type == TREE_TYPE (TREE_TYPE (subtype)))
16137 tree type_domain;
16138 tree min_val = size_zero_node;
16139 sub = build_fold_indirect_ref_loc (loc, sub);
16140 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16141 if (type_domain && TYPE_MIN_VALUE (type_domain))
16142 min_val = TYPE_MIN_VALUE (type_domain);
16143 op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
16144 SET_EXPR_LOCATION (op0, loc);
16145 return op0;
16148 return NULL_TREE;
16151 /* Builds an expression for an indirection through T, simplifying some
16152 cases. */
16154 tree
16155 build_fold_indirect_ref_loc (location_t loc, tree t)
16157 tree type = TREE_TYPE (TREE_TYPE (t));
16158 tree sub = fold_indirect_ref_1 (loc, type, t);
16160 if (sub)
16161 return sub;
16163 t = build1 (INDIRECT_REF, type, t);
16164 SET_EXPR_LOCATION (t, loc);
16165 return t;
16168 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16170 tree
16171 fold_indirect_ref_loc (location_t loc, tree t)
16173 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16175 if (sub)
16176 return sub;
16177 else
16178 return t;
16181 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16182 whose result is ignored. The type of the returned tree need not be
16183 the same as the original expression. */
16185 tree
16186 fold_ignored_result (tree t)
16188 if (!TREE_SIDE_EFFECTS (t))
16189 return integer_zero_node;
16191 for (;;)
16192 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16194 case tcc_unary:
16195 t = TREE_OPERAND (t, 0);
16196 break;
16198 case tcc_binary:
16199 case tcc_comparison:
16200 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16201 t = TREE_OPERAND (t, 0);
16202 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16203 t = TREE_OPERAND (t, 1);
16204 else
16205 return t;
16206 break;
16208 case tcc_expression:
16209 switch (TREE_CODE (t))
16211 case COMPOUND_EXPR:
16212 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16213 return t;
16214 t = TREE_OPERAND (t, 0);
16215 break;
16217 case COND_EXPR:
16218 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16219 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16220 return t;
16221 t = TREE_OPERAND (t, 0);
16222 break;
16224 default:
16225 return t;
16227 break;
16229 default:
16230 return t;
16234 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16235 This can only be applied to objects of a sizetype. */
16237 tree
16238 round_up_loc (location_t loc, tree value, int divisor)
16240 tree div = NULL_TREE;
16242 gcc_assert (divisor > 0);
16243 if (divisor == 1)
16244 return value;
16246 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16247 have to do anything. Only do this when we are not given a const,
16248 because in that case, this check is more expensive than just
16249 doing it. */
16250 if (TREE_CODE (value) != INTEGER_CST)
16252 div = build_int_cst (TREE_TYPE (value), divisor);
16254 if (multiple_of_p (TREE_TYPE (value), value, div))
16255 return value;
16258 /* If divisor is a power of two, simplify this to bit manipulation. */
16259 if (divisor == (divisor & -divisor))
16261 if (TREE_CODE (value) == INTEGER_CST)
16263 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
16264 unsigned HOST_WIDE_INT high;
16265 bool overflow_p;
16267 if ((low & (divisor - 1)) == 0)
16268 return value;
16270 overflow_p = TREE_OVERFLOW (value);
16271 high = TREE_INT_CST_HIGH (value);
16272 low &= ~(divisor - 1);
16273 low += divisor;
16274 if (low == 0)
16276 high++;
16277 if (high == 0)
16278 overflow_p = true;
16281 return force_fit_type_double (TREE_TYPE (value), low, high,
16282 -1, overflow_p);
16284 else
16286 tree t;
16288 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16289 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16290 t = build_int_cst (TREE_TYPE (value), -divisor);
16291 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16294 else
16296 if (!div)
16297 div = build_int_cst (TREE_TYPE (value), divisor);
16298 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16299 value = size_binop_loc (loc, MULT_EXPR, value, div);
16302 return value;
16305 /* Likewise, but round down. */
16307 tree
16308 round_down_loc (location_t loc, tree value, int divisor)
16310 tree div = NULL_TREE;
16312 gcc_assert (divisor > 0);
16313 if (divisor == 1)
16314 return value;
16316 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16317 have to do anything. Only do this when we are not given a const,
16318 because in that case, this check is more expensive than just
16319 doing it. */
16320 if (TREE_CODE (value) != INTEGER_CST)
16322 div = build_int_cst (TREE_TYPE (value), divisor);
16324 if (multiple_of_p (TREE_TYPE (value), value, div))
16325 return value;
16328 /* If divisor is a power of two, simplify this to bit manipulation. */
16329 if (divisor == (divisor & -divisor))
16331 tree t;
16333 t = build_int_cst (TREE_TYPE (value), -divisor);
16334 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16336 else
16338 if (!div)
16339 div = build_int_cst (TREE_TYPE (value), divisor);
16340 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16341 value = size_binop_loc (loc, MULT_EXPR, value, div);
16344 return value;
16347 /* Returns the pointer to the base of the object addressed by EXP and
16348 extracts the information about the offset of the access, storing it
16349 to PBITPOS and POFFSET. */
16351 static tree
16352 split_address_to_core_and_offset (tree exp,
16353 HOST_WIDE_INT *pbitpos, tree *poffset)
16355 tree core;
16356 enum machine_mode mode;
16357 int unsignedp, volatilep;
16358 HOST_WIDE_INT bitsize;
16359 location_t loc = EXPR_LOCATION (exp);
16361 if (TREE_CODE (exp) == ADDR_EXPR)
16363 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16364 poffset, &mode, &unsignedp, &volatilep,
16365 false);
16366 core = build_fold_addr_expr_loc (loc, core);
16368 else
16370 core = exp;
16371 *pbitpos = 0;
16372 *poffset = NULL_TREE;
16375 return core;
16378 /* Returns true if addresses of E1 and E2 differ by a constant, false
16379 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16381 bool
16382 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16384 tree core1, core2;
16385 HOST_WIDE_INT bitpos1, bitpos2;
16386 tree toffset1, toffset2, tdiff, type;
16388 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16389 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16391 if (bitpos1 % BITS_PER_UNIT != 0
16392 || bitpos2 % BITS_PER_UNIT != 0
16393 || !operand_equal_p (core1, core2, 0))
16394 return false;
16396 if (toffset1 && toffset2)
16398 type = TREE_TYPE (toffset1);
16399 if (type != TREE_TYPE (toffset2))
16400 toffset2 = fold_convert (type, toffset2);
16402 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16403 if (!cst_and_fits_in_hwi (tdiff))
16404 return false;
16406 *diff = int_cst_value (tdiff);
16408 else if (toffset1 || toffset2)
16410 /* If only one of the offsets is non-constant, the difference cannot
16411 be a constant. */
16412 return false;
16414 else
16415 *diff = 0;
16417 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16418 return true;
16421 /* Simplify the floating point expression EXP when the sign of the
16422 result is not significant. Return NULL_TREE if no simplification
16423 is possible. */
16425 tree
16426 fold_strip_sign_ops (tree exp)
16428 tree arg0, arg1;
16429 location_t loc = EXPR_LOCATION (exp);
16431 switch (TREE_CODE (exp))
16433 case ABS_EXPR:
16434 case NEGATE_EXPR:
16435 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16436 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16438 case MULT_EXPR:
16439 case RDIV_EXPR:
16440 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16441 return NULL_TREE;
16442 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16443 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16444 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16445 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16446 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16447 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16448 break;
16450 case COMPOUND_EXPR:
16451 arg0 = TREE_OPERAND (exp, 0);
16452 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16453 if (arg1)
16454 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16455 break;
16457 case COND_EXPR:
16458 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16459 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16460 if (arg0 || arg1)
16461 return fold_build3_loc (loc,
16462 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16463 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16464 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16465 break;
16467 case CALL_EXPR:
16469 const enum built_in_function fcode = builtin_mathfn_code (exp);
16470 switch (fcode)
16472 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16473 /* Strip copysign function call, return the 1st argument. */
16474 arg0 = CALL_EXPR_ARG (exp, 0);
16475 arg1 = CALL_EXPR_ARG (exp, 1);
16476 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16478 default:
16479 /* Strip sign ops from the argument of "odd" math functions. */
16480 if (negate_mathfn_p (fcode))
16482 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16483 if (arg0)
16484 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16486 break;
16489 break;
16491 default:
16492 break;
16494 return NULL_TREE;