* config/sh/sh.md (*movqi_pop): New insn pattern.
[official-gcc.git] / gcc / fold-const.c
blob03598a59a6f0ca3b2e46688ba4a970c734d56741
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "toplev.h"
63 #include "intl.h"
64 #include "ggc.h"
65 #include "hashtab.h"
66 #include "langhooks.h"
67 #include "md5.h"
68 #include "gimple.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
71 otherwise. */
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
78 COMPCODE_FALSE = 0,
79 COMPCODE_LT = 1,
80 COMPCODE_EQ = 2,
81 COMPCODE_LE = 3,
82 COMPCODE_GT = 4,
83 COMPCODE_LTGT = 5,
84 COMPCODE_GE = 6,
85 COMPCODE_ORD = 7,
86 COMPCODE_UNORD = 8,
87 COMPCODE_UNLT = 9,
88 COMPCODE_UNEQ = 10,
89 COMPCODE_UNLE = 11,
90 COMPCODE_UNGT = 12,
91 COMPCODE_NE = 13,
92 COMPCODE_UNGE = 14,
93 COMPCODE_TRUE = 15
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
110 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (location_t, tree, tree,
112 HOST_WIDE_INT, HOST_WIDE_INT, int);
113 static tree optimize_bit_field_compare (location_t, enum tree_code,
114 tree, tree, tree);
115 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
116 HOST_WIDE_INT *,
117 enum machine_mode *, int *, int *,
118 tree *, tree *);
119 static int all_ones_mask_p (const_tree, int);
120 static tree sign_bit_p (tree, const_tree);
121 static int simple_operand_p (const_tree);
122 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
123 static tree range_predecessor (tree);
124 static tree range_successor (tree);
125 extern tree make_range (tree, int *, tree *, tree *, bool *);
126 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
127 tree, tree);
128 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
129 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
130 static tree unextend (tree, int, int, tree);
131 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
132 static tree optimize_minmax_comparison (location_t, enum tree_code,
133 tree, tree, tree);
134 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
135 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
136 static tree fold_binary_op_with_conditional_arg (location_t,
137 enum tree_code, tree,
138 tree, tree,
139 tree, tree, int);
140 static tree fold_mathfn_compare (location_t,
141 enum built_in_function, enum tree_code,
142 tree, tree, tree);
143 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
144 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
145 static bool reorder_operands_p (const_tree, const_tree);
146 static tree fold_negate_const (tree, tree);
147 static tree fold_not_const (tree, tree);
148 static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 static tree fold_convert_const (enum tree_code, tree, tree);
152 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
153 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
154 and SUM1. Then this yields nonzero if overflow occurred during the
155 addition.
157 Overflow occurs if A and B have the same sign, but A and SUM differ in
158 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
159 sign. */
160 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
162 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
163 We do that by representing the two-word integer in 4 words, with only
164 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
165 number. The value of the word is LOWPART + HIGHPART * BASE. */
167 #define LOWPART(x) \
168 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
169 #define HIGHPART(x) \
170 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
171 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
173 /* Unpack a two-word integer into 4 words.
174 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
175 WORDS points to the array of HOST_WIDE_INTs. */
177 static void
178 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
180 words[0] = LOWPART (low);
181 words[1] = HIGHPART (low);
182 words[2] = LOWPART (hi);
183 words[3] = HIGHPART (hi);
186 /* Pack an array of 4 words into a two-word integer.
187 WORDS points to the array of words.
188 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
190 static void
191 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
192 HOST_WIDE_INT *hi)
194 *low = words[0] + words[1] * BASE;
195 *hi = words[2] + words[3] * BASE;
198 /* Force the double-word integer L1, H1 to be within the range of the
199 integer type TYPE. Stores the properly truncated and sign-extended
200 double-word integer in *LV, *HV. Returns true if the operation
201 overflows, that is, argument and result are different. */
204 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
205 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
207 unsigned HOST_WIDE_INT low0 = l1;
208 HOST_WIDE_INT high0 = h1;
209 unsigned int prec = TYPE_PRECISION (type);
210 int sign_extended_type;
212 /* Size types *are* sign extended. */
213 sign_extended_type = (!TYPE_UNSIGNED (type)
214 || (TREE_CODE (type) == INTEGER_TYPE
215 && TYPE_IS_SIZETYPE (type)));
217 /* First clear all bits that are beyond the type's precision. */
218 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
220 else if (prec > HOST_BITS_PER_WIDE_INT)
221 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
222 else
224 h1 = 0;
225 if (prec < HOST_BITS_PER_WIDE_INT)
226 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
229 /* Then do sign extension if necessary. */
230 if (!sign_extended_type)
231 /* No sign extension */;
232 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
233 /* Correct width already. */;
234 else if (prec > HOST_BITS_PER_WIDE_INT)
236 /* Sign extend top half? */
237 if (h1 & ((unsigned HOST_WIDE_INT)1
238 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
239 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
241 else if (prec == HOST_BITS_PER_WIDE_INT)
243 if ((HOST_WIDE_INT)l1 < 0)
244 h1 = -1;
246 else
248 /* Sign extend bottom half? */
249 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
251 h1 = -1;
252 l1 |= (HOST_WIDE_INT)(-1) << prec;
256 *lv = l1;
257 *hv = h1;
259 /* If the value didn't fit, signal overflow. */
260 return l1 != low0 || h1 != high0;
263 /* We force the double-int HIGH:LOW to the range of the type TYPE by
264 sign or zero extending it.
265 OVERFLOWABLE indicates if we are interested
266 in overflow of the value, when >0 we are only interested in signed
267 overflow, for <0 we are interested in any overflow. OVERFLOWED
268 indicates whether overflow has already occurred. CONST_OVERFLOWED
269 indicates whether constant overflow has already occurred. We force
270 T's value to be within range of T's type (by setting to 0 or 1 all
271 the bits outside the type's range). We set TREE_OVERFLOWED if,
272 OVERFLOWED is nonzero,
273 or OVERFLOWABLE is >0 and signed overflow occurs
274 or OVERFLOWABLE is <0 and any overflow occurs
275 We return a new tree node for the extended double-int. The node
276 is shared if no overflow flags are set. */
278 tree
279 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
280 HOST_WIDE_INT high, int overflowable,
281 bool overflowed)
283 int sign_extended_type;
284 bool overflow;
286 /* Size types *are* sign extended. */
287 sign_extended_type = (!TYPE_UNSIGNED (type)
288 || (TREE_CODE (type) == INTEGER_TYPE
289 && TYPE_IS_SIZETYPE (type)));
291 overflow = fit_double_type (low, high, &low, &high, type);
293 /* If we need to set overflow flags, return a new unshared node. */
294 if (overflowed || overflow)
296 if (overflowed
297 || overflowable < 0
298 || (overflowable > 0 && sign_extended_type))
300 tree t = make_node (INTEGER_CST);
301 TREE_INT_CST_LOW (t) = low;
302 TREE_INT_CST_HIGH (t) = high;
303 TREE_TYPE (t) = type;
304 TREE_OVERFLOW (t) = 1;
305 return t;
309 /* Else build a shared node. */
310 return build_int_cst_wide (type, low, high);
313 /* Add two doubleword integers with doubleword result.
314 Return nonzero if the operation overflows according to UNSIGNED_P.
315 Each argument is given as two `HOST_WIDE_INT' pieces.
316 One argument is L1 and H1; the other, L2 and H2.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
322 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
323 bool unsigned_p)
325 unsigned HOST_WIDE_INT l;
326 HOST_WIDE_INT h;
328 l = l1 + l2;
329 h = (HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) h1
330 + (unsigned HOST_WIDE_INT) h2
331 + (l < l1));
333 *lv = l;
334 *hv = h;
336 if (unsigned_p)
337 return ((unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1
338 || (h == h1
339 && l < l1));
340 else
341 return OVERFLOW_SUM_SIGN (h1, h2, h);
344 /* Negate a doubleword integer with doubleword result.
345 Return nonzero if the operation overflows, assuming it's signed.
346 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
353 if (l1 == 0)
355 *lv = 0;
356 *hv = - h1;
357 return (*hv & h1) < 0;
359 else
361 *lv = -l1;
362 *hv = ~h1;
363 return 0;
367 /* Multiply two doubleword integers with doubleword result.
368 Return nonzero if the operation overflows according to UNSIGNED_P.
369 Each argument is given as two `HOST_WIDE_INT' pieces.
370 One argument is L1 and H1; the other, L2 and H2.
371 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
374 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
375 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
376 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
377 bool unsigned_p)
379 HOST_WIDE_INT arg1[4];
380 HOST_WIDE_INT arg2[4];
381 HOST_WIDE_INT prod[4 * 2];
382 unsigned HOST_WIDE_INT carry;
383 int i, j, k;
384 unsigned HOST_WIDE_INT toplow, neglow;
385 HOST_WIDE_INT tophigh, neghigh;
387 encode (arg1, l1, h1);
388 encode (arg2, l2, h2);
390 memset (prod, 0, sizeof prod);
392 for (i = 0; i < 4; i++)
394 carry = 0;
395 for (j = 0; j < 4; j++)
397 k = i + j;
398 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
399 carry += arg1[i] * arg2[j];
400 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
401 carry += prod[k];
402 prod[k] = LOWPART (carry);
403 carry = HIGHPART (carry);
405 prod[i + 4] = carry;
408 decode (prod, lv, hv);
409 decode (prod + 4, &toplow, &tophigh);
411 /* Unsigned overflow is immediate. */
412 if (unsigned_p)
413 return (toplow | tophigh) != 0;
415 /* Check for signed overflow by calculating the signed representation of the
416 top half of the result; it should agree with the low half's sign bit. */
417 if (h1 < 0)
419 neg_double (l2, h2, &neglow, &neghigh);
420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
422 if (h2 < 0)
424 neg_double (l1, h1, &neglow, &neghigh);
425 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
427 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
430 /* Shift the doubleword integer in L1, H1 left by COUNT places
431 keeping only PREC bits of result.
432 Shift right if COUNT is negative.
433 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
434 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
436 void
437 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
438 HOST_WIDE_INT count, unsigned int prec,
439 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
441 unsigned HOST_WIDE_INT signmask;
443 if (count < 0)
445 rshift_double (l1, h1, -count, prec, lv, hv, arith);
446 return;
449 if (SHIFT_COUNT_TRUNCATED)
450 count %= prec;
452 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
454 /* Shifting by the host word size is undefined according to the
455 ANSI standard, so we must handle this as a special case. */
456 *hv = 0;
457 *lv = 0;
459 else if (count >= HOST_BITS_PER_WIDE_INT)
461 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
462 *lv = 0;
464 else
466 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
467 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
468 *lv = l1 << count;
471 /* Sign extend all bits that are beyond the precision. */
473 signmask = -((prec > HOST_BITS_PER_WIDE_INT
474 ? ((unsigned HOST_WIDE_INT) *hv
475 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
476 : (*lv >> (prec - 1))) & 1);
478 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
480 else if (prec >= HOST_BITS_PER_WIDE_INT)
482 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
483 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
485 else
487 *hv = signmask;
488 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
489 *lv |= signmask << prec;
493 /* Shift the doubleword integer in L1, H1 right by COUNT places
494 keeping only PREC bits of result. COUNT must be positive.
495 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
496 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
498 void
499 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
500 HOST_WIDE_INT count, unsigned int prec,
501 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
502 int arith)
504 unsigned HOST_WIDE_INT signmask;
506 signmask = (arith
507 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
508 : 0);
510 if (SHIFT_COUNT_TRUNCATED)
511 count %= prec;
513 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
515 /* Shifting by the host word size is undefined according to the
516 ANSI standard, so we must handle this as a special case. */
517 *hv = 0;
518 *lv = 0;
520 else if (count >= HOST_BITS_PER_WIDE_INT)
522 *hv = 0;
523 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
525 else
527 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
528 *lv = ((l1 >> count)
529 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
532 /* Zero / sign extend all bits that are beyond the precision. */
534 if (count >= (HOST_WIDE_INT)prec)
536 *hv = signmask;
537 *lv = signmask;
539 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
541 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
543 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
544 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
546 else
548 *hv = signmask;
549 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
550 *lv |= signmask << (prec - count);
554 /* Rotate the doubleword integer in L1, H1 left by COUNT places
555 keeping only PREC bits of result.
556 Rotate right if COUNT is negative.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
559 void
560 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561 HOST_WIDE_INT count, unsigned int prec,
562 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
564 unsigned HOST_WIDE_INT s1l, s2l;
565 HOST_WIDE_INT s1h, s2h;
567 count %= prec;
568 if (count < 0)
569 count += prec;
571 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
573 *lv = s1l | s2l;
574 *hv = s1h | s2h;
577 /* Rotate the doubleword integer in L1, H1 left by COUNT places
578 keeping only PREC bits of result. COUNT must be positive.
579 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
581 void
582 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
583 HOST_WIDE_INT count, unsigned int prec,
584 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
586 unsigned HOST_WIDE_INT s1l, s2l;
587 HOST_WIDE_INT s1h, s2h;
589 count %= prec;
590 if (count < 0)
591 count += prec;
593 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
594 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
595 *lv = s1l | s2l;
596 *hv = s1h | s2h;
599 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
600 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
601 CODE is a tree code for a kind of division, one of
602 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
603 or EXACT_DIV_EXPR
604 It controls how the quotient is rounded to an integer.
605 Return nonzero if the operation overflows.
606 UNS nonzero says do unsigned division. */
609 div_and_round_double (enum tree_code code, int uns,
610 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
611 HOST_WIDE_INT hnum_orig,
612 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
613 HOST_WIDE_INT hden_orig,
614 unsigned HOST_WIDE_INT *lquo,
615 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
616 HOST_WIDE_INT *hrem)
618 int quo_neg = 0;
619 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
620 HOST_WIDE_INT den[4], quo[4];
621 int i, j;
622 unsigned HOST_WIDE_INT work;
623 unsigned HOST_WIDE_INT carry = 0;
624 unsigned HOST_WIDE_INT lnum = lnum_orig;
625 HOST_WIDE_INT hnum = hnum_orig;
626 unsigned HOST_WIDE_INT lden = lden_orig;
627 HOST_WIDE_INT hden = hden_orig;
628 int overflow = 0;
630 if (hden == 0 && lden == 0)
631 overflow = 1, lden = 1;
633 /* Calculate quotient sign and convert operands to unsigned. */
634 if (!uns)
636 if (hnum < 0)
638 quo_neg = ~ quo_neg;
639 /* (minimum integer) / (-1) is the only overflow case. */
640 if (neg_double (lnum, hnum, &lnum, &hnum)
641 && ((HOST_WIDE_INT) lden & hden) == -1)
642 overflow = 1;
644 if (hden < 0)
646 quo_neg = ~ quo_neg;
647 neg_double (lden, hden, &lden, &hden);
651 if (hnum == 0 && hden == 0)
652 { /* single precision */
653 *hquo = *hrem = 0;
654 /* This unsigned division rounds toward zero. */
655 *lquo = lnum / lden;
656 goto finish_up;
659 if (hnum == 0)
660 { /* trivial case: dividend < divisor */
661 /* hden != 0 already checked. */
662 *hquo = *lquo = 0;
663 *hrem = hnum;
664 *lrem = lnum;
665 goto finish_up;
668 memset (quo, 0, sizeof quo);
670 memset (num, 0, sizeof num); /* to zero 9th element */
671 memset (den, 0, sizeof den);
673 encode (num, lnum, hnum);
674 encode (den, lden, hden);
676 /* Special code for when the divisor < BASE. */
677 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
679 /* hnum != 0 already checked. */
680 for (i = 4 - 1; i >= 0; i--)
682 work = num[i] + carry * BASE;
683 quo[i] = work / lden;
684 carry = work % lden;
687 else
689 /* Full double precision division,
690 with thanks to Don Knuth's "Seminumerical Algorithms". */
691 int num_hi_sig, den_hi_sig;
692 unsigned HOST_WIDE_INT quo_est, scale;
694 /* Find the highest nonzero divisor digit. */
695 for (i = 4 - 1;; i--)
696 if (den[i] != 0)
698 den_hi_sig = i;
699 break;
702 /* Insure that the first digit of the divisor is at least BASE/2.
703 This is required by the quotient digit estimation algorithm. */
705 scale = BASE / (den[den_hi_sig] + 1);
706 if (scale > 1)
707 { /* scale divisor and dividend */
708 carry = 0;
709 for (i = 0; i <= 4 - 1; i++)
711 work = (num[i] * scale) + carry;
712 num[i] = LOWPART (work);
713 carry = HIGHPART (work);
716 num[4] = carry;
717 carry = 0;
718 for (i = 0; i <= 4 - 1; i++)
720 work = (den[i] * scale) + carry;
721 den[i] = LOWPART (work);
722 carry = HIGHPART (work);
723 if (den[i] != 0) den_hi_sig = i;
727 num_hi_sig = 4;
729 /* Main loop */
730 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
732 /* Guess the next quotient digit, quo_est, by dividing the first
733 two remaining dividend digits by the high order quotient digit.
734 quo_est is never low and is at most 2 high. */
735 unsigned HOST_WIDE_INT tmp;
737 num_hi_sig = i + den_hi_sig + 1;
738 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
739 if (num[num_hi_sig] != den[den_hi_sig])
740 quo_est = work / den[den_hi_sig];
741 else
742 quo_est = BASE - 1;
744 /* Refine quo_est so it's usually correct, and at most one high. */
745 tmp = work - quo_est * den[den_hi_sig];
746 if (tmp < BASE
747 && (den[den_hi_sig - 1] * quo_est
748 > (tmp * BASE + num[num_hi_sig - 2])))
749 quo_est--;
751 /* Try QUO_EST as the quotient digit, by multiplying the
752 divisor by QUO_EST and subtracting from the remaining dividend.
753 Keep in mind that QUO_EST is the I - 1st digit. */
755 carry = 0;
756 for (j = 0; j <= den_hi_sig; j++)
758 work = quo_est * den[j] + carry;
759 carry = HIGHPART (work);
760 work = num[i + j] - LOWPART (work);
761 num[i + j] = LOWPART (work);
762 carry += HIGHPART (work) != 0;
765 /* If quo_est was high by one, then num[i] went negative and
766 we need to correct things. */
767 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
769 quo_est--;
770 carry = 0; /* add divisor back in */
771 for (j = 0; j <= den_hi_sig; j++)
773 work = num[i + j] + den[j] + carry;
774 carry = HIGHPART (work);
775 num[i + j] = LOWPART (work);
778 num [num_hi_sig] += carry;
781 /* Store the quotient digit. */
782 quo[i] = quo_est;
786 decode (quo, lquo, hquo);
788 finish_up:
789 /* If result is negative, make it so. */
790 if (quo_neg)
791 neg_double (*lquo, *hquo, lquo, hquo);
793 /* Compute trial remainder: rem = num - (quo * den) */
794 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
795 neg_double (*lrem, *hrem, lrem, hrem);
796 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
798 switch (code)
800 case TRUNC_DIV_EXPR:
801 case TRUNC_MOD_EXPR: /* round toward zero */
802 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
803 return overflow;
805 case FLOOR_DIV_EXPR:
806 case FLOOR_MOD_EXPR: /* round toward negative infinity */
807 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
809 /* quo = quo - 1; */
810 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
811 lquo, hquo);
813 else
814 return overflow;
815 break;
817 case CEIL_DIV_EXPR:
818 case CEIL_MOD_EXPR: /* round toward positive infinity */
819 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
821 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
822 lquo, hquo);
824 else
825 return overflow;
826 break;
828 case ROUND_DIV_EXPR:
829 case ROUND_MOD_EXPR: /* round to closest integer */
831 unsigned HOST_WIDE_INT labs_rem = *lrem;
832 HOST_WIDE_INT habs_rem = *hrem;
833 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
834 HOST_WIDE_INT habs_den = hden, htwice;
836 /* Get absolute values. */
837 if (*hrem < 0)
838 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
839 if (hden < 0)
840 neg_double (lden, hden, &labs_den, &habs_den);
842 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
843 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
844 labs_rem, habs_rem, &ltwice, &htwice);
846 if (((unsigned HOST_WIDE_INT) habs_den
847 < (unsigned HOST_WIDE_INT) htwice)
848 || (((unsigned HOST_WIDE_INT) habs_den
849 == (unsigned HOST_WIDE_INT) htwice)
850 && (labs_den <= ltwice)))
852 if (*hquo < 0)
853 /* quo = quo - 1; */
854 add_double (*lquo, *hquo,
855 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
856 else
857 /* quo = quo + 1; */
858 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
859 lquo, hquo);
861 else
862 return overflow;
864 break;
866 default:
867 gcc_unreachable ();
870 /* Compute true remainder: rem = num - (quo * den) */
871 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
872 neg_double (*lrem, *hrem, lrem, hrem);
873 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
874 return overflow;
877 /* If ARG2 divides ARG1 with zero remainder, carries out the division
878 of type CODE and returns the quotient.
879 Otherwise returns NULL_TREE. */
881 tree
882 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
884 unsigned HOST_WIDE_INT int1l, int2l;
885 HOST_WIDE_INT int1h, int2h;
886 unsigned HOST_WIDE_INT quol, reml;
887 HOST_WIDE_INT quoh, remh;
888 int uns;
890 /* The sign of the division is according to operand two, that
891 does the correct thing for POINTER_PLUS_EXPR where we want
892 a signed division. */
893 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
894 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
895 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
896 uns = false;
898 int1l = TREE_INT_CST_LOW (arg1);
899 int1h = TREE_INT_CST_HIGH (arg1);
900 int2l = TREE_INT_CST_LOW (arg2);
901 int2h = TREE_INT_CST_HIGH (arg2);
903 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
904 &quol, &quoh, &reml, &remh);
905 if (remh != 0 || reml != 0)
906 return NULL_TREE;
908 return build_int_cst_wide (TREE_TYPE (arg1), quol, quoh);
911 /* This is nonzero if we should defer warnings about undefined
912 overflow. This facility exists because these warnings are a
913 special case. The code to estimate loop iterations does not want
914 to issue any warnings, since it works with expressions which do not
915 occur in user code. Various bits of cleanup code call fold(), but
916 only use the result if it has certain characteristics (e.g., is a
917 constant); that code only wants to issue a warning if the result is
918 used. */
920 static int fold_deferring_overflow_warnings;
922 /* If a warning about undefined overflow is deferred, this is the
923 warning. Note that this may cause us to turn two warnings into
924 one, but that is fine since it is sufficient to only give one
925 warning per expression. */
927 static const char* fold_deferred_overflow_warning;
929 /* If a warning about undefined overflow is deferred, this is the
930 level at which the warning should be emitted. */
932 static enum warn_strict_overflow_code fold_deferred_overflow_code;
934 /* Start deferring overflow warnings. We could use a stack here to
935 permit nested calls, but at present it is not necessary. */
937 void
938 fold_defer_overflow_warnings (void)
940 ++fold_deferring_overflow_warnings;
943 /* Stop deferring overflow warnings. If there is a pending warning,
944 and ISSUE is true, then issue the warning if appropriate. STMT is
945 the statement with which the warning should be associated (used for
946 location information); STMT may be NULL. CODE is the level of the
947 warning--a warn_strict_overflow_code value. This function will use
948 the smaller of CODE and the deferred code when deciding whether to
949 issue the warning. CODE may be zero to mean to always use the
950 deferred code. */
952 void
953 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
955 const char *warnmsg;
956 location_t locus;
958 gcc_assert (fold_deferring_overflow_warnings > 0);
959 --fold_deferring_overflow_warnings;
960 if (fold_deferring_overflow_warnings > 0)
962 if (fold_deferred_overflow_warning != NULL
963 && code != 0
964 && code < (int) fold_deferred_overflow_code)
965 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
966 return;
969 warnmsg = fold_deferred_overflow_warning;
970 fold_deferred_overflow_warning = NULL;
972 if (!issue || warnmsg == NULL)
973 return;
975 if (gimple_no_warning_p (stmt))
976 return;
978 /* Use the smallest code level when deciding to issue the
979 warning. */
980 if (code == 0 || code > (int) fold_deferred_overflow_code)
981 code = fold_deferred_overflow_code;
983 if (!issue_strict_overflow_warning (code))
984 return;
986 if (stmt == NULL)
987 locus = input_location;
988 else
989 locus = gimple_location (stmt);
990 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
993 /* Stop deferring overflow warnings, ignoring any deferred
994 warnings. */
996 void
997 fold_undefer_and_ignore_overflow_warnings (void)
999 fold_undefer_overflow_warnings (false, NULL, 0);
1002 /* Whether we are deferring overflow warnings. */
1004 bool
1005 fold_deferring_overflow_warnings_p (void)
1007 return fold_deferring_overflow_warnings > 0;
1010 /* This is called when we fold something based on the fact that signed
1011 overflow is undefined. */
1013 static void
1014 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1016 if (fold_deferring_overflow_warnings > 0)
1018 if (fold_deferred_overflow_warning == NULL
1019 || wc < fold_deferred_overflow_code)
1021 fold_deferred_overflow_warning = gmsgid;
1022 fold_deferred_overflow_code = wc;
1025 else if (issue_strict_overflow_warning (wc))
1026 warning (OPT_Wstrict_overflow, gmsgid);
1029 /* Return true if the built-in mathematical function specified by CODE
1030 is odd, i.e. -f(x) == f(-x). */
1032 static bool
1033 negate_mathfn_p (enum built_in_function code)
1035 switch (code)
1037 CASE_FLT_FN (BUILT_IN_ASIN):
1038 CASE_FLT_FN (BUILT_IN_ASINH):
1039 CASE_FLT_FN (BUILT_IN_ATAN):
1040 CASE_FLT_FN (BUILT_IN_ATANH):
1041 CASE_FLT_FN (BUILT_IN_CASIN):
1042 CASE_FLT_FN (BUILT_IN_CASINH):
1043 CASE_FLT_FN (BUILT_IN_CATAN):
1044 CASE_FLT_FN (BUILT_IN_CATANH):
1045 CASE_FLT_FN (BUILT_IN_CBRT):
1046 CASE_FLT_FN (BUILT_IN_CPROJ):
1047 CASE_FLT_FN (BUILT_IN_CSIN):
1048 CASE_FLT_FN (BUILT_IN_CSINH):
1049 CASE_FLT_FN (BUILT_IN_CTAN):
1050 CASE_FLT_FN (BUILT_IN_CTANH):
1051 CASE_FLT_FN (BUILT_IN_ERF):
1052 CASE_FLT_FN (BUILT_IN_LLROUND):
1053 CASE_FLT_FN (BUILT_IN_LROUND):
1054 CASE_FLT_FN (BUILT_IN_ROUND):
1055 CASE_FLT_FN (BUILT_IN_SIN):
1056 CASE_FLT_FN (BUILT_IN_SINH):
1057 CASE_FLT_FN (BUILT_IN_TAN):
1058 CASE_FLT_FN (BUILT_IN_TANH):
1059 CASE_FLT_FN (BUILT_IN_TRUNC):
1060 return true;
1062 CASE_FLT_FN (BUILT_IN_LLRINT):
1063 CASE_FLT_FN (BUILT_IN_LRINT):
1064 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1065 CASE_FLT_FN (BUILT_IN_RINT):
1066 return !flag_rounding_math;
1068 default:
1069 break;
1071 return false;
1074 /* Check whether we may negate an integer constant T without causing
1075 overflow. */
1077 bool
1078 may_negate_without_overflow_p (const_tree t)
1080 unsigned HOST_WIDE_INT val;
1081 unsigned int prec;
1082 tree type;
1084 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1086 type = TREE_TYPE (t);
1087 if (TYPE_UNSIGNED (type))
1088 return false;
1090 prec = TYPE_PRECISION (type);
1091 if (prec > HOST_BITS_PER_WIDE_INT)
1093 if (TREE_INT_CST_LOW (t) != 0)
1094 return true;
1095 prec -= HOST_BITS_PER_WIDE_INT;
1096 val = TREE_INT_CST_HIGH (t);
1098 else
1099 val = TREE_INT_CST_LOW (t);
1100 if (prec < HOST_BITS_PER_WIDE_INT)
1101 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1102 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1105 /* Determine whether an expression T can be cheaply negated using
1106 the function negate_expr without introducing undefined overflow. */
1108 static bool
1109 negate_expr_p (tree t)
1111 tree type;
1113 if (t == 0)
1114 return false;
1116 type = TREE_TYPE (t);
1118 STRIP_SIGN_NOPS (t);
1119 switch (TREE_CODE (t))
1121 case INTEGER_CST:
1122 if (TYPE_OVERFLOW_WRAPS (type))
1123 return true;
1125 /* Check that -CST will not overflow type. */
1126 return may_negate_without_overflow_p (t);
1127 case BIT_NOT_EXPR:
1128 return (INTEGRAL_TYPE_P (type)
1129 && TYPE_OVERFLOW_WRAPS (type));
1131 case FIXED_CST:
1132 case NEGATE_EXPR:
1133 return true;
1135 case REAL_CST:
1136 /* We want to canonicalize to positive real constants. Pretend
1137 that only negative ones can be easily negated. */
1138 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
1140 case COMPLEX_CST:
1141 return negate_expr_p (TREE_REALPART (t))
1142 && negate_expr_p (TREE_IMAGPART (t));
1144 case COMPLEX_EXPR:
1145 return negate_expr_p (TREE_OPERAND (t, 0))
1146 && negate_expr_p (TREE_OPERAND (t, 1));
1148 case CONJ_EXPR:
1149 return negate_expr_p (TREE_OPERAND (t, 0));
1151 case PLUS_EXPR:
1152 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1153 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1154 return false;
1155 /* -(A + B) -> (-B) - A. */
1156 if (negate_expr_p (TREE_OPERAND (t, 1))
1157 && reorder_operands_p (TREE_OPERAND (t, 0),
1158 TREE_OPERAND (t, 1)))
1159 return true;
1160 /* -(A + B) -> (-A) - B. */
1161 return negate_expr_p (TREE_OPERAND (t, 0));
1163 case MINUS_EXPR:
1164 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1165 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1166 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1167 && reorder_operands_p (TREE_OPERAND (t, 0),
1168 TREE_OPERAND (t, 1));
1170 case MULT_EXPR:
1171 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1172 break;
1174 /* Fall through. */
1176 case RDIV_EXPR:
1177 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1178 return negate_expr_p (TREE_OPERAND (t, 1))
1179 || negate_expr_p (TREE_OPERAND (t, 0));
1180 break;
1182 case TRUNC_DIV_EXPR:
1183 case ROUND_DIV_EXPR:
1184 case FLOOR_DIV_EXPR:
1185 case CEIL_DIV_EXPR:
1186 case EXACT_DIV_EXPR:
1187 /* In general we can't negate A / B, because if A is INT_MIN and
1188 B is 1, we may turn this into INT_MIN / -1 which is undefined
1189 and actually traps on some architectures. But if overflow is
1190 undefined, we can negate, because - (INT_MIN / 1) is an
1191 overflow. */
1192 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1193 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1194 break;
1195 return negate_expr_p (TREE_OPERAND (t, 1))
1196 || negate_expr_p (TREE_OPERAND (t, 0));
1198 case NOP_EXPR:
1199 /* Negate -((double)float) as (double)(-float). */
1200 if (TREE_CODE (type) == REAL_TYPE)
1202 tree tem = strip_float_extensions (t);
1203 if (tem != t)
1204 return negate_expr_p (tem);
1206 break;
1208 case CALL_EXPR:
1209 /* Negate -f(x) as f(-x). */
1210 if (negate_mathfn_p (builtin_mathfn_code (t)))
1211 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1212 break;
1214 case RSHIFT_EXPR:
1215 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1216 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1218 tree op1 = TREE_OPERAND (t, 1);
1219 if (TREE_INT_CST_HIGH (op1) == 0
1220 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1221 == TREE_INT_CST_LOW (op1))
1222 return true;
1224 break;
1226 default:
1227 break;
1229 return false;
1232 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1233 simplification is possible.
1234 If negate_expr_p would return true for T, NULL_TREE will never be
1235 returned. */
1237 static tree
1238 fold_negate_expr (location_t loc, tree t)
1240 tree type = TREE_TYPE (t);
1241 tree tem;
1243 switch (TREE_CODE (t))
1245 /* Convert - (~A) to A + 1. */
1246 case BIT_NOT_EXPR:
1247 if (INTEGRAL_TYPE_P (type))
1248 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
1249 build_int_cst (type, 1));
1250 break;
1252 case INTEGER_CST:
1253 tem = fold_negate_const (t, type);
1254 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1255 || !TYPE_OVERFLOW_TRAPS (type))
1256 return tem;
1257 break;
1259 case REAL_CST:
1260 tem = fold_negate_const (t, type);
1261 /* Two's complement FP formats, such as c4x, may overflow. */
1262 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1263 return tem;
1264 break;
1266 case FIXED_CST:
1267 tem = fold_negate_const (t, type);
1268 return tem;
1270 case COMPLEX_CST:
1272 tree rpart = negate_expr (TREE_REALPART (t));
1273 tree ipart = negate_expr (TREE_IMAGPART (t));
1275 if ((TREE_CODE (rpart) == REAL_CST
1276 && TREE_CODE (ipart) == REAL_CST)
1277 || (TREE_CODE (rpart) == INTEGER_CST
1278 && TREE_CODE (ipart) == INTEGER_CST))
1279 return build_complex (type, rpart, ipart);
1281 break;
1283 case COMPLEX_EXPR:
1284 if (negate_expr_p (t))
1285 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1286 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
1287 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
1288 break;
1290 case CONJ_EXPR:
1291 if (negate_expr_p (t))
1292 return fold_build1_loc (loc, CONJ_EXPR, type,
1293 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
1294 break;
1296 case NEGATE_EXPR:
1297 return TREE_OPERAND (t, 0);
1299 case PLUS_EXPR:
1300 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1301 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1303 /* -(A + B) -> (-B) - A. */
1304 if (negate_expr_p (TREE_OPERAND (t, 1))
1305 && reorder_operands_p (TREE_OPERAND (t, 0),
1306 TREE_OPERAND (t, 1)))
1308 tem = negate_expr (TREE_OPERAND (t, 1));
1309 return fold_build2_loc (loc, MINUS_EXPR, type,
1310 tem, TREE_OPERAND (t, 0));
1313 /* -(A + B) -> (-A) - B. */
1314 if (negate_expr_p (TREE_OPERAND (t, 0)))
1316 tem = negate_expr (TREE_OPERAND (t, 0));
1317 return fold_build2_loc (loc, MINUS_EXPR, type,
1318 tem, TREE_OPERAND (t, 1));
1321 break;
1323 case MINUS_EXPR:
1324 /* - (A - B) -> B - A */
1325 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1326 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1327 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1328 return fold_build2_loc (loc, MINUS_EXPR, type,
1329 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1330 break;
1332 case MULT_EXPR:
1333 if (TYPE_UNSIGNED (type))
1334 break;
1336 /* Fall through. */
1338 case RDIV_EXPR:
1339 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1341 tem = TREE_OPERAND (t, 1);
1342 if (negate_expr_p (tem))
1343 return fold_build2_loc (loc, TREE_CODE (t), type,
1344 TREE_OPERAND (t, 0), negate_expr (tem));
1345 tem = TREE_OPERAND (t, 0);
1346 if (negate_expr_p (tem))
1347 return fold_build2_loc (loc, TREE_CODE (t), type,
1348 negate_expr (tem), TREE_OPERAND (t, 1));
1350 break;
1352 case TRUNC_DIV_EXPR:
1353 case ROUND_DIV_EXPR:
1354 case FLOOR_DIV_EXPR:
1355 case CEIL_DIV_EXPR:
1356 case EXACT_DIV_EXPR:
1357 /* In general we can't negate A / B, because if A is INT_MIN and
1358 B is 1, we may turn this into INT_MIN / -1 which is undefined
1359 and actually traps on some architectures. But if overflow is
1360 undefined, we can negate, because - (INT_MIN / 1) is an
1361 overflow. */
1362 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1364 const char * const warnmsg = G_("assuming signed overflow does not "
1365 "occur when negating a division");
1366 tem = TREE_OPERAND (t, 1);
1367 if (negate_expr_p (tem))
1369 if (INTEGRAL_TYPE_P (type)
1370 && (TREE_CODE (tem) != INTEGER_CST
1371 || integer_onep (tem)))
1372 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1373 return fold_build2_loc (loc, TREE_CODE (t), type,
1374 TREE_OPERAND (t, 0), negate_expr (tem));
1376 tem = TREE_OPERAND (t, 0);
1377 if (negate_expr_p (tem))
1379 if (INTEGRAL_TYPE_P (type)
1380 && (TREE_CODE (tem) != INTEGER_CST
1381 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1382 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1383 return fold_build2_loc (loc, TREE_CODE (t), type,
1384 negate_expr (tem), TREE_OPERAND (t, 1));
1387 break;
1389 case NOP_EXPR:
1390 /* Convert -((double)float) into (double)(-float). */
1391 if (TREE_CODE (type) == REAL_TYPE)
1393 tem = strip_float_extensions (t);
1394 if (tem != t && negate_expr_p (tem))
1395 return fold_convert_loc (loc, type, negate_expr (tem));
1397 break;
1399 case CALL_EXPR:
1400 /* Negate -f(x) as f(-x). */
1401 if (negate_mathfn_p (builtin_mathfn_code (t))
1402 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1404 tree fndecl, arg;
1406 fndecl = get_callee_fndecl (t);
1407 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1408 return build_call_expr_loc (loc, fndecl, 1, arg);
1410 break;
1412 case RSHIFT_EXPR:
1413 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1414 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1416 tree op1 = TREE_OPERAND (t, 1);
1417 if (TREE_INT_CST_HIGH (op1) == 0
1418 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1419 == TREE_INT_CST_LOW (op1))
1421 tree ntype = TYPE_UNSIGNED (type)
1422 ? signed_type_for (type)
1423 : unsigned_type_for (type);
1424 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
1425 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
1426 return fold_convert_loc (loc, type, temp);
1429 break;
1431 default:
1432 break;
1435 return NULL_TREE;
1438 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1439 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1440 return NULL_TREE. */
1442 static tree
1443 negate_expr (tree t)
1445 tree type, tem;
1446 location_t loc;
1448 if (t == NULL_TREE)
1449 return NULL_TREE;
1451 loc = EXPR_LOCATION (t);
1452 type = TREE_TYPE (t);
1453 STRIP_SIGN_NOPS (t);
1455 tem = fold_negate_expr (loc, t);
1456 if (!tem)
1458 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1459 SET_EXPR_LOCATION (tem, loc);
1461 return fold_convert_loc (loc, type, tem);
1464 /* Split a tree IN into a constant, literal and variable parts that could be
1465 combined with CODE to make IN. "constant" means an expression with
1466 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1467 commutative arithmetic operation. Store the constant part into *CONP,
1468 the literal in *LITP and return the variable part. If a part isn't
1469 present, set it to null. If the tree does not decompose in this way,
1470 return the entire tree as the variable part and the other parts as null.
1472 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1473 case, we negate an operand that was subtracted. Except if it is a
1474 literal for which we use *MINUS_LITP instead.
1476 If NEGATE_P is true, we are negating all of IN, again except a literal
1477 for which we use *MINUS_LITP instead.
1479 If IN is itself a literal or constant, return it as appropriate.
1481 Note that we do not guarantee that any of the three values will be the
1482 same type as IN, but they will have the same signedness and mode. */
1484 static tree
1485 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1486 tree *minus_litp, int negate_p)
1488 tree var = 0;
1490 *conp = 0;
1491 *litp = 0;
1492 *minus_litp = 0;
1494 /* Strip any conversions that don't change the machine mode or signedness. */
1495 STRIP_SIGN_NOPS (in);
1497 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1498 || TREE_CODE (in) == FIXED_CST)
1499 *litp = in;
1500 else if (TREE_CODE (in) == code
1501 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1502 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1503 /* We can associate addition and subtraction together (even
1504 though the C standard doesn't say so) for integers because
1505 the value is not affected. For reals, the value might be
1506 affected, so we can't. */
1507 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1508 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1510 tree op0 = TREE_OPERAND (in, 0);
1511 tree op1 = TREE_OPERAND (in, 1);
1512 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1513 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1515 /* First see if either of the operands is a literal, then a constant. */
1516 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1517 || TREE_CODE (op0) == FIXED_CST)
1518 *litp = op0, op0 = 0;
1519 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1520 || TREE_CODE (op1) == FIXED_CST)
1521 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1523 if (op0 != 0 && TREE_CONSTANT (op0))
1524 *conp = op0, op0 = 0;
1525 else if (op1 != 0 && TREE_CONSTANT (op1))
1526 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1528 /* If we haven't dealt with either operand, this is not a case we can
1529 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1530 if (op0 != 0 && op1 != 0)
1531 var = in;
1532 else if (op0 != 0)
1533 var = op0;
1534 else
1535 var = op1, neg_var_p = neg1_p;
1537 /* Now do any needed negations. */
1538 if (neg_litp_p)
1539 *minus_litp = *litp, *litp = 0;
1540 if (neg_conp_p)
1541 *conp = negate_expr (*conp);
1542 if (neg_var_p)
1543 var = negate_expr (var);
1545 else if (TREE_CONSTANT (in))
1546 *conp = in;
1547 else
1548 var = in;
1550 if (negate_p)
1552 if (*litp)
1553 *minus_litp = *litp, *litp = 0;
1554 else if (*minus_litp)
1555 *litp = *minus_litp, *minus_litp = 0;
1556 *conp = negate_expr (*conp);
1557 var = negate_expr (var);
1560 return var;
1563 /* Re-associate trees split by the above function. T1 and T2 are
1564 either expressions to associate or null. Return the new
1565 expression, if any. LOC is the location of the new expression. If
1566 we build an operation, do it in TYPE and with CODE. */
1568 static tree
1569 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1571 tree tem;
1573 if (t1 == 0)
1574 return t2;
1575 else if (t2 == 0)
1576 return t1;
1578 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1579 try to fold this since we will have infinite recursion. But do
1580 deal with any NEGATE_EXPRs. */
1581 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1582 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1584 if (code == PLUS_EXPR)
1586 if (TREE_CODE (t1) == NEGATE_EXPR)
1587 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
1588 fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
1589 else if (TREE_CODE (t2) == NEGATE_EXPR)
1590 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
1591 fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
1592 else if (integer_zerop (t2))
1593 return fold_convert_loc (loc, type, t1);
1595 else if (code == MINUS_EXPR)
1597 if (integer_zerop (t2))
1598 return fold_convert_loc (loc, type, t1);
1601 tem = build2 (code, type, fold_convert_loc (loc, type, t1),
1602 fold_convert_loc (loc, type, t2));
1603 goto associate_trees_exit;
1606 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1607 fold_convert_loc (loc, type, t2));
1608 associate_trees_exit:
1609 protected_set_expr_location (tem, loc);
1610 return tem;
1613 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1614 for use in int_const_binop, size_binop and size_diffop. */
1616 static bool
1617 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1619 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1620 return false;
1621 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1622 return false;
1624 switch (code)
1626 case LSHIFT_EXPR:
1627 case RSHIFT_EXPR:
1628 case LROTATE_EXPR:
1629 case RROTATE_EXPR:
1630 return true;
1632 default:
1633 break;
1636 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1637 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1638 && TYPE_MODE (type1) == TYPE_MODE (type2);
1642 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1643 to produce a new constant. Return NULL_TREE if we don't know how
1644 to evaluate CODE at compile-time.
1646 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1648 tree
1649 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1651 unsigned HOST_WIDE_INT int1l, int2l;
1652 HOST_WIDE_INT int1h, int2h;
1653 unsigned HOST_WIDE_INT low;
1654 HOST_WIDE_INT hi;
1655 unsigned HOST_WIDE_INT garbagel;
1656 HOST_WIDE_INT garbageh;
1657 tree t;
1658 tree type = TREE_TYPE (arg1);
1659 int uns = TYPE_UNSIGNED (type);
1660 int is_sizetype
1661 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1662 int overflow = 0;
1664 int1l = TREE_INT_CST_LOW (arg1);
1665 int1h = TREE_INT_CST_HIGH (arg1);
1666 int2l = TREE_INT_CST_LOW (arg2);
1667 int2h = TREE_INT_CST_HIGH (arg2);
1669 switch (code)
1671 case BIT_IOR_EXPR:
1672 low = int1l | int2l, hi = int1h | int2h;
1673 break;
1675 case BIT_XOR_EXPR:
1676 low = int1l ^ int2l, hi = int1h ^ int2h;
1677 break;
1679 case BIT_AND_EXPR:
1680 low = int1l & int2l, hi = int1h & int2h;
1681 break;
1683 case RSHIFT_EXPR:
1684 int2l = -int2l;
1685 case LSHIFT_EXPR:
1686 /* It's unclear from the C standard whether shifts can overflow.
1687 The following code ignores overflow; perhaps a C standard
1688 interpretation ruling is needed. */
1689 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1690 &low, &hi, !uns);
1691 break;
1693 case RROTATE_EXPR:
1694 int2l = - int2l;
1695 case LROTATE_EXPR:
1696 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1697 &low, &hi);
1698 break;
1700 case PLUS_EXPR:
1701 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1702 break;
1704 case MINUS_EXPR:
1705 neg_double (int2l, int2h, &low, &hi);
1706 add_double (int1l, int1h, low, hi, &low, &hi);
1707 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1708 break;
1710 case MULT_EXPR:
1711 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1712 break;
1714 case TRUNC_DIV_EXPR:
1715 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1716 case EXACT_DIV_EXPR:
1717 /* This is a shortcut for a common special case. */
1718 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1719 && !TREE_OVERFLOW (arg1)
1720 && !TREE_OVERFLOW (arg2)
1721 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1723 if (code == CEIL_DIV_EXPR)
1724 int1l += int2l - 1;
1726 low = int1l / int2l, hi = 0;
1727 break;
1730 /* ... fall through ... */
1732 case ROUND_DIV_EXPR:
1733 if (int2h == 0 && int2l == 0)
1734 return NULL_TREE;
1735 if (int2h == 0 && int2l == 1)
1737 low = int1l, hi = int1h;
1738 break;
1740 if (int1l == int2l && int1h == int2h
1741 && ! (int1l == 0 && int1h == 0))
1743 low = 1, hi = 0;
1744 break;
1746 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1747 &low, &hi, &garbagel, &garbageh);
1748 break;
1750 case TRUNC_MOD_EXPR:
1751 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1752 /* This is a shortcut for a common special case. */
1753 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1754 && !TREE_OVERFLOW (arg1)
1755 && !TREE_OVERFLOW (arg2)
1756 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1758 if (code == CEIL_MOD_EXPR)
1759 int1l += int2l - 1;
1760 low = int1l % int2l, hi = 0;
1761 break;
1764 /* ... fall through ... */
1766 case ROUND_MOD_EXPR:
1767 if (int2h == 0 && int2l == 0)
1768 return NULL_TREE;
1769 overflow = div_and_round_double (code, uns,
1770 int1l, int1h, int2l, int2h,
1771 &garbagel, &garbageh, &low, &hi);
1772 break;
1774 case MIN_EXPR:
1775 case MAX_EXPR:
1776 if (uns)
1777 low = (((unsigned HOST_WIDE_INT) int1h
1778 < (unsigned HOST_WIDE_INT) int2h)
1779 || (((unsigned HOST_WIDE_INT) int1h
1780 == (unsigned HOST_WIDE_INT) int2h)
1781 && int1l < int2l));
1782 else
1783 low = (int1h < int2h
1784 || (int1h == int2h && int1l < int2l));
1786 if (low == (code == MIN_EXPR))
1787 low = int1l, hi = int1h;
1788 else
1789 low = int2l, hi = int2h;
1790 break;
1792 default:
1793 return NULL_TREE;
1796 if (notrunc)
1798 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1800 /* Propagate overflow flags ourselves. */
1801 if (((!uns || is_sizetype) && overflow)
1802 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1804 t = copy_node (t);
1805 TREE_OVERFLOW (t) = 1;
1808 else
1809 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1810 ((!uns || is_sizetype) && overflow)
1811 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1813 return t;
1816 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1817 constant. We assume ARG1 and ARG2 have the same data type, or at least
1818 are the same kind of constant and the same machine mode. Return zero if
1819 combining the constants is not allowed in the current operating mode.
1821 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1823 static tree
1824 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1826 /* Sanity check for the recursive cases. */
1827 if (!arg1 || !arg2)
1828 return NULL_TREE;
1830 STRIP_NOPS (arg1);
1831 STRIP_NOPS (arg2);
1833 if (TREE_CODE (arg1) == INTEGER_CST)
1834 return int_const_binop (code, arg1, arg2, notrunc);
1836 if (TREE_CODE (arg1) == REAL_CST)
1838 enum machine_mode mode;
1839 REAL_VALUE_TYPE d1;
1840 REAL_VALUE_TYPE d2;
1841 REAL_VALUE_TYPE value;
1842 REAL_VALUE_TYPE result;
1843 bool inexact;
1844 tree t, type;
1846 /* The following codes are handled by real_arithmetic. */
1847 switch (code)
1849 case PLUS_EXPR:
1850 case MINUS_EXPR:
1851 case MULT_EXPR:
1852 case RDIV_EXPR:
1853 case MIN_EXPR:
1854 case MAX_EXPR:
1855 break;
1857 default:
1858 return NULL_TREE;
1861 d1 = TREE_REAL_CST (arg1);
1862 d2 = TREE_REAL_CST (arg2);
1864 type = TREE_TYPE (arg1);
1865 mode = TYPE_MODE (type);
1867 /* Don't perform operation if we honor signaling NaNs and
1868 either operand is a NaN. */
1869 if (HONOR_SNANS (mode)
1870 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1871 return NULL_TREE;
1873 /* Don't perform operation if it would raise a division
1874 by zero exception. */
1875 if (code == RDIV_EXPR
1876 && REAL_VALUES_EQUAL (d2, dconst0)
1877 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1878 return NULL_TREE;
1880 /* If either operand is a NaN, just return it. Otherwise, set up
1881 for floating-point trap; we return an overflow. */
1882 if (REAL_VALUE_ISNAN (d1))
1883 return arg1;
1884 else if (REAL_VALUE_ISNAN (d2))
1885 return arg2;
1887 inexact = real_arithmetic (&value, code, &d1, &d2);
1888 real_convert (&result, mode, &value);
1890 /* Don't constant fold this floating point operation if
1891 the result has overflowed and flag_trapping_math. */
1892 if (flag_trapping_math
1893 && MODE_HAS_INFINITIES (mode)
1894 && REAL_VALUE_ISINF (result)
1895 && !REAL_VALUE_ISINF (d1)
1896 && !REAL_VALUE_ISINF (d2))
1897 return NULL_TREE;
1899 /* Don't constant fold this floating point operation if the
1900 result may dependent upon the run-time rounding mode and
1901 flag_rounding_math is set, or if GCC's software emulation
1902 is unable to accurately represent the result. */
1903 if ((flag_rounding_math
1904 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1905 && (inexact || !real_identical (&result, &value)))
1906 return NULL_TREE;
1908 t = build_real (type, result);
1910 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1911 return t;
1914 if (TREE_CODE (arg1) == FIXED_CST)
1916 FIXED_VALUE_TYPE f1;
1917 FIXED_VALUE_TYPE f2;
1918 FIXED_VALUE_TYPE result;
1919 tree t, type;
1920 int sat_p;
1921 bool overflow_p;
1923 /* The following codes are handled by fixed_arithmetic. */
1924 switch (code)
1926 case PLUS_EXPR:
1927 case MINUS_EXPR:
1928 case MULT_EXPR:
1929 case TRUNC_DIV_EXPR:
1930 f2 = TREE_FIXED_CST (arg2);
1931 break;
1933 case LSHIFT_EXPR:
1934 case RSHIFT_EXPR:
1935 f2.data.high = TREE_INT_CST_HIGH (arg2);
1936 f2.data.low = TREE_INT_CST_LOW (arg2);
1937 f2.mode = SImode;
1938 break;
1940 default:
1941 return NULL_TREE;
1944 f1 = TREE_FIXED_CST (arg1);
1945 type = TREE_TYPE (arg1);
1946 sat_p = TYPE_SATURATING (type);
1947 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1948 t = build_fixed (type, result);
1949 /* Propagate overflow flags. */
1950 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1951 TREE_OVERFLOW (t) = 1;
1952 return t;
1955 if (TREE_CODE (arg1) == COMPLEX_CST)
1957 tree type = TREE_TYPE (arg1);
1958 tree r1 = TREE_REALPART (arg1);
1959 tree i1 = TREE_IMAGPART (arg1);
1960 tree r2 = TREE_REALPART (arg2);
1961 tree i2 = TREE_IMAGPART (arg2);
1962 tree real, imag;
1964 switch (code)
1966 case PLUS_EXPR:
1967 case MINUS_EXPR:
1968 real = const_binop (code, r1, r2, notrunc);
1969 imag = const_binop (code, i1, i2, notrunc);
1970 break;
1972 case MULT_EXPR:
1973 if (COMPLEX_FLOAT_TYPE_P (type))
1974 return do_mpc_arg2 (arg1, arg2, type,
1975 /* do_nonfinite= */ folding_initializer,
1976 mpc_mul);
1978 real = const_binop (MINUS_EXPR,
1979 const_binop (MULT_EXPR, r1, r2, notrunc),
1980 const_binop (MULT_EXPR, i1, i2, notrunc),
1981 notrunc);
1982 imag = const_binop (PLUS_EXPR,
1983 const_binop (MULT_EXPR, r1, i2, notrunc),
1984 const_binop (MULT_EXPR, i1, r2, notrunc),
1985 notrunc);
1986 break;
1988 case RDIV_EXPR:
1989 if (COMPLEX_FLOAT_TYPE_P (type))
1990 return do_mpc_arg2 (arg1, arg2, type,
1991 /* do_nonfinite= */ folding_initializer,
1992 mpc_div);
1993 /* Fallthru ... */
1994 case TRUNC_DIV_EXPR:
1995 case CEIL_DIV_EXPR:
1996 case FLOOR_DIV_EXPR:
1997 case ROUND_DIV_EXPR:
1998 if (flag_complex_method == 0)
2000 /* Keep this algorithm in sync with
2001 tree-complex.c:expand_complex_div_straight().
2003 Expand complex division to scalars, straightforward algorithm.
2004 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
2005 t = br*br + bi*bi
2007 tree magsquared
2008 = const_binop (PLUS_EXPR,
2009 const_binop (MULT_EXPR, r2, r2, notrunc),
2010 const_binop (MULT_EXPR, i2, i2, notrunc),
2011 notrunc);
2012 tree t1
2013 = const_binop (PLUS_EXPR,
2014 const_binop (MULT_EXPR, r1, r2, notrunc),
2015 const_binop (MULT_EXPR, i1, i2, notrunc),
2016 notrunc);
2017 tree t2
2018 = const_binop (MINUS_EXPR,
2019 const_binop (MULT_EXPR, i1, r2, notrunc),
2020 const_binop (MULT_EXPR, r1, i2, notrunc),
2021 notrunc);
2023 real = const_binop (code, t1, magsquared, notrunc);
2024 imag = const_binop (code, t2, magsquared, notrunc);
2026 else
2028 /* Keep this algorithm in sync with
2029 tree-complex.c:expand_complex_div_wide().
2031 Expand complex division to scalars, modified algorithm to minimize
2032 overflow with wide input ranges. */
2033 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
2034 fold_abs_const (r2, TREE_TYPE (type)),
2035 fold_abs_const (i2, TREE_TYPE (type)));
2037 if (integer_nonzerop (compare))
2039 /* In the TRUE branch, we compute
2040 ratio = br/bi;
2041 div = (br * ratio) + bi;
2042 tr = (ar * ratio) + ai;
2043 ti = (ai * ratio) - ar;
2044 tr = tr / div;
2045 ti = ti / div; */
2046 tree ratio = const_binop (code, r2, i2, notrunc);
2047 tree div = const_binop (PLUS_EXPR, i2,
2048 const_binop (MULT_EXPR, r2, ratio,
2049 notrunc),
2050 notrunc);
2051 real = const_binop (MULT_EXPR, r1, ratio, notrunc);
2052 real = const_binop (PLUS_EXPR, real, i1, notrunc);
2053 real = const_binop (code, real, div, notrunc);
2055 imag = const_binop (MULT_EXPR, i1, ratio, notrunc);
2056 imag = const_binop (MINUS_EXPR, imag, r1, notrunc);
2057 imag = const_binop (code, imag, div, notrunc);
2059 else
2061 /* In the FALSE branch, we compute
2062 ratio = d/c;
2063 divisor = (d * ratio) + c;
2064 tr = (b * ratio) + a;
2065 ti = b - (a * ratio);
2066 tr = tr / div;
2067 ti = ti / div; */
2068 tree ratio = const_binop (code, i2, r2, notrunc);
2069 tree div = const_binop (PLUS_EXPR, r2,
2070 const_binop (MULT_EXPR, i2, ratio,
2071 notrunc),
2072 notrunc);
2074 real = const_binop (MULT_EXPR, i1, ratio, notrunc);
2075 real = const_binop (PLUS_EXPR, real, r1, notrunc);
2076 real = const_binop (code, real, div, notrunc);
2078 imag = const_binop (MULT_EXPR, r1, ratio, notrunc);
2079 imag = const_binop (MINUS_EXPR, i1, imag, notrunc);
2080 imag = const_binop (code, imag, div, notrunc);
2083 break;
2085 default:
2086 return NULL_TREE;
2089 if (real && imag)
2090 return build_complex (type, real, imag);
2093 if (TREE_CODE (arg1) == VECTOR_CST)
2095 tree type = TREE_TYPE(arg1);
2096 int count = TYPE_VECTOR_SUBPARTS (type), i;
2097 tree elements1, elements2, list = NULL_TREE;
2099 if(TREE_CODE(arg2) != VECTOR_CST)
2100 return NULL_TREE;
2102 elements1 = TREE_VECTOR_CST_ELTS (arg1);
2103 elements2 = TREE_VECTOR_CST_ELTS (arg2);
2105 for (i = 0; i < count; i++)
2107 tree elem1, elem2, elem;
2109 /* The trailing elements can be empty and should be treated as 0 */
2110 if(!elements1)
2111 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2112 else
2114 elem1 = TREE_VALUE(elements1);
2115 elements1 = TREE_CHAIN (elements1);
2118 if(!elements2)
2119 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2120 else
2122 elem2 = TREE_VALUE(elements2);
2123 elements2 = TREE_CHAIN (elements2);
2126 elem = const_binop (code, elem1, elem2, notrunc);
2128 /* It is possible that const_binop cannot handle the given
2129 code and return NULL_TREE */
2130 if(elem == NULL_TREE)
2131 return NULL_TREE;
2133 list = tree_cons (NULL_TREE, elem, list);
2135 return build_vector(type, nreverse(list));
2137 return NULL_TREE;
2140 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2141 indicates which particular sizetype to create. */
2143 tree
2144 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2146 return build_int_cst (sizetype_tab[(int) kind], number);
2149 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2150 is a tree code. The type of the result is taken from the operands.
2151 Both must be equivalent integer types, ala int_binop_types_match_p.
2152 If the operands are constant, so is the result. */
2154 tree
2155 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2157 tree type = TREE_TYPE (arg0);
2159 if (arg0 == error_mark_node || arg1 == error_mark_node)
2160 return error_mark_node;
2162 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2163 TREE_TYPE (arg1)));
2165 /* Handle the special case of two integer constants faster. */
2166 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2168 /* And some specific cases even faster than that. */
2169 if (code == PLUS_EXPR)
2171 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2172 return arg1;
2173 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2174 return arg0;
2176 else if (code == MINUS_EXPR)
2178 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2179 return arg0;
2181 else if (code == MULT_EXPR)
2183 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2184 return arg1;
2187 /* Handle general case of two integer constants. */
2188 return int_const_binop (code, arg0, arg1, 0);
2191 return fold_build2_loc (loc, code, type, arg0, arg1);
2194 /* Given two values, either both of sizetype or both of bitsizetype,
2195 compute the difference between the two values. Return the value
2196 in signed type corresponding to the type of the operands. */
2198 tree
2199 size_diffop_loc (location_t loc, tree arg0, tree arg1)
2201 tree type = TREE_TYPE (arg0);
2202 tree ctype;
2204 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2205 TREE_TYPE (arg1)));
2207 /* If the type is already signed, just do the simple thing. */
2208 if (!TYPE_UNSIGNED (type))
2209 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2211 if (type == sizetype)
2212 ctype = ssizetype;
2213 else if (type == bitsizetype)
2214 ctype = sbitsizetype;
2215 else
2216 ctype = signed_type_for (type);
2218 /* If either operand is not a constant, do the conversions to the signed
2219 type and subtract. The hardware will do the right thing with any
2220 overflow in the subtraction. */
2221 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2222 return size_binop_loc (loc, MINUS_EXPR,
2223 fold_convert_loc (loc, ctype, arg0),
2224 fold_convert_loc (loc, ctype, arg1));
2226 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2227 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2228 overflow) and negate (which can't either). Special-case a result
2229 of zero while we're here. */
2230 if (tree_int_cst_equal (arg0, arg1))
2231 return build_int_cst (ctype, 0);
2232 else if (tree_int_cst_lt (arg1, arg0))
2233 return fold_convert_loc (loc, ctype,
2234 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2235 else
2236 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2237 fold_convert_loc (loc, ctype,
2238 size_binop_loc (loc,
2239 MINUS_EXPR,
2240 arg1, arg0)));
2243 /* A subroutine of fold_convert_const handling conversions of an
2244 INTEGER_CST to another integer type. */
2246 static tree
2247 fold_convert_const_int_from_int (tree type, const_tree arg1)
2249 tree t;
2251 /* Given an integer constant, make new constant with new type,
2252 appropriately sign-extended or truncated. */
2253 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2254 TREE_INT_CST_HIGH (arg1),
2255 /* Don't set the overflow when
2256 converting from a pointer, */
2257 !POINTER_TYPE_P (TREE_TYPE (arg1))
2258 /* or to a sizetype with same signedness
2259 and the precision is unchanged.
2260 ??? sizetype is always sign-extended,
2261 but its signedness depends on the
2262 frontend. Thus we see spurious overflows
2263 here if we do not check this. */
2264 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2265 == TYPE_PRECISION (type))
2266 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2267 == TYPE_UNSIGNED (type))
2268 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2269 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2270 || (TREE_CODE (type) == INTEGER_TYPE
2271 && TYPE_IS_SIZETYPE (type)))),
2272 (TREE_INT_CST_HIGH (arg1) < 0
2273 && (TYPE_UNSIGNED (type)
2274 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2275 | TREE_OVERFLOW (arg1));
2277 return t;
2280 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2281 to an integer type. */
2283 static tree
2284 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2286 int overflow = 0;
2287 tree t;
2289 /* The following code implements the floating point to integer
2290 conversion rules required by the Java Language Specification,
2291 that IEEE NaNs are mapped to zero and values that overflow
2292 the target precision saturate, i.e. values greater than
2293 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2294 are mapped to INT_MIN. These semantics are allowed by the
2295 C and C++ standards that simply state that the behavior of
2296 FP-to-integer conversion is unspecified upon overflow. */
2298 HOST_WIDE_INT high, low;
2299 REAL_VALUE_TYPE r;
2300 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2302 switch (code)
2304 case FIX_TRUNC_EXPR:
2305 real_trunc (&r, VOIDmode, &x);
2306 break;
2308 default:
2309 gcc_unreachable ();
2312 /* If R is NaN, return zero and show we have an overflow. */
2313 if (REAL_VALUE_ISNAN (r))
2315 overflow = 1;
2316 high = 0;
2317 low = 0;
2320 /* See if R is less than the lower bound or greater than the
2321 upper bound. */
2323 if (! overflow)
2325 tree lt = TYPE_MIN_VALUE (type);
2326 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2327 if (REAL_VALUES_LESS (r, l))
2329 overflow = 1;
2330 high = TREE_INT_CST_HIGH (lt);
2331 low = TREE_INT_CST_LOW (lt);
2335 if (! overflow)
2337 tree ut = TYPE_MAX_VALUE (type);
2338 if (ut)
2340 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2341 if (REAL_VALUES_LESS (u, r))
2343 overflow = 1;
2344 high = TREE_INT_CST_HIGH (ut);
2345 low = TREE_INT_CST_LOW (ut);
2350 if (! overflow)
2351 REAL_VALUE_TO_INT (&low, &high, r);
2353 t = force_fit_type_double (type, low, high, -1,
2354 overflow | TREE_OVERFLOW (arg1));
2355 return t;
2358 /* A subroutine of fold_convert_const handling conversions of a
2359 FIXED_CST to an integer type. */
2361 static tree
2362 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2364 tree t;
2365 double_int temp, temp_trunc;
2366 unsigned int mode;
2368 /* Right shift FIXED_CST to temp by fbit. */
2369 temp = TREE_FIXED_CST (arg1).data;
2370 mode = TREE_FIXED_CST (arg1).mode;
2371 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2373 lshift_double (temp.low, temp.high,
2374 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2375 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2377 /* Left shift temp to temp_trunc by fbit. */
2378 lshift_double (temp.low, temp.high,
2379 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2380 &temp_trunc.low, &temp_trunc.high,
2381 SIGNED_FIXED_POINT_MODE_P (mode));
2383 else
2385 temp.low = 0;
2386 temp.high = 0;
2387 temp_trunc.low = 0;
2388 temp_trunc.high = 0;
2391 /* If FIXED_CST is negative, we need to round the value toward 0.
2392 By checking if the fractional bits are not zero to add 1 to temp. */
2393 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2394 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2396 double_int one;
2397 one.low = 1;
2398 one.high = 0;
2399 temp = double_int_add (temp, one);
2402 /* Given a fixed-point constant, make new constant with new type,
2403 appropriately sign-extended or truncated. */
2404 t = force_fit_type_double (type, temp.low, temp.high, -1,
2405 (temp.high < 0
2406 && (TYPE_UNSIGNED (type)
2407 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2408 | TREE_OVERFLOW (arg1));
2410 return t;
2413 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2414 to another floating point type. */
2416 static tree
2417 fold_convert_const_real_from_real (tree type, const_tree arg1)
2419 REAL_VALUE_TYPE value;
2420 tree t;
2422 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2423 t = build_real (type, value);
2425 /* If converting an infinity or NAN to a representation that doesn't
2426 have one, set the overflow bit so that we can produce some kind of
2427 error message at the appropriate point if necessary. It's not the
2428 most user-friendly message, but it's better than nothing. */
2429 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2430 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2431 TREE_OVERFLOW (t) = 1;
2432 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2433 && !MODE_HAS_NANS (TYPE_MODE (type)))
2434 TREE_OVERFLOW (t) = 1;
2435 /* Regular overflow, conversion produced an infinity in a mode that
2436 can't represent them. */
2437 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2438 && REAL_VALUE_ISINF (value)
2439 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2440 TREE_OVERFLOW (t) = 1;
2441 else
2442 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2443 return t;
2446 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2447 to a floating point type. */
2449 static tree
2450 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2452 REAL_VALUE_TYPE value;
2453 tree t;
2455 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2456 t = build_real (type, value);
2458 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2459 return t;
2462 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2463 to another fixed-point type. */
2465 static tree
2466 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2468 FIXED_VALUE_TYPE value;
2469 tree t;
2470 bool overflow_p;
2472 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2473 TYPE_SATURATING (type));
2474 t = build_fixed (type, value);
2476 /* Propagate overflow flags. */
2477 if (overflow_p | TREE_OVERFLOW (arg1))
2478 TREE_OVERFLOW (t) = 1;
2479 return t;
2482 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2483 to a fixed-point type. */
2485 static tree
2486 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2488 FIXED_VALUE_TYPE value;
2489 tree t;
2490 bool overflow_p;
2492 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2493 TREE_INT_CST (arg1),
2494 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2495 TYPE_SATURATING (type));
2496 t = build_fixed (type, value);
2498 /* Propagate overflow flags. */
2499 if (overflow_p | TREE_OVERFLOW (arg1))
2500 TREE_OVERFLOW (t) = 1;
2501 return t;
2504 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2505 to a fixed-point type. */
2507 static tree
2508 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2510 FIXED_VALUE_TYPE value;
2511 tree t;
2512 bool overflow_p;
2514 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2515 &TREE_REAL_CST (arg1),
2516 TYPE_SATURATING (type));
2517 t = build_fixed (type, value);
2519 /* Propagate overflow flags. */
2520 if (overflow_p | TREE_OVERFLOW (arg1))
2521 TREE_OVERFLOW (t) = 1;
2522 return t;
2525 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2526 type TYPE. If no simplification can be done return NULL_TREE. */
2528 static tree
2529 fold_convert_const (enum tree_code code, tree type, tree arg1)
2531 if (TREE_TYPE (arg1) == type)
2532 return arg1;
2534 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2535 || TREE_CODE (type) == OFFSET_TYPE)
2537 if (TREE_CODE (arg1) == INTEGER_CST)
2538 return fold_convert_const_int_from_int (type, arg1);
2539 else if (TREE_CODE (arg1) == REAL_CST)
2540 return fold_convert_const_int_from_real (code, type, arg1);
2541 else if (TREE_CODE (arg1) == FIXED_CST)
2542 return fold_convert_const_int_from_fixed (type, arg1);
2544 else if (TREE_CODE (type) == REAL_TYPE)
2546 if (TREE_CODE (arg1) == INTEGER_CST)
2547 return build_real_from_int_cst (type, arg1);
2548 else if (TREE_CODE (arg1) == REAL_CST)
2549 return fold_convert_const_real_from_real (type, arg1);
2550 else if (TREE_CODE (arg1) == FIXED_CST)
2551 return fold_convert_const_real_from_fixed (type, arg1);
2553 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2555 if (TREE_CODE (arg1) == FIXED_CST)
2556 return fold_convert_const_fixed_from_fixed (type, arg1);
2557 else if (TREE_CODE (arg1) == INTEGER_CST)
2558 return fold_convert_const_fixed_from_int (type, arg1);
2559 else if (TREE_CODE (arg1) == REAL_CST)
2560 return fold_convert_const_fixed_from_real (type, arg1);
2562 return NULL_TREE;
2565 /* Construct a vector of zero elements of vector type TYPE. */
2567 static tree
2568 build_zero_vector (tree type)
2570 tree elem, list;
2571 int i, units;
2573 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2574 units = TYPE_VECTOR_SUBPARTS (type);
2576 list = NULL_TREE;
2577 for (i = 0; i < units; i++)
2578 list = tree_cons (NULL_TREE, elem, list);
2579 return build_vector (type, list);
2582 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2584 bool
2585 fold_convertible_p (const_tree type, const_tree arg)
2587 tree orig = TREE_TYPE (arg);
2589 if (type == orig)
2590 return true;
2592 if (TREE_CODE (arg) == ERROR_MARK
2593 || TREE_CODE (type) == ERROR_MARK
2594 || TREE_CODE (orig) == ERROR_MARK)
2595 return false;
2597 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2598 return true;
2600 switch (TREE_CODE (type))
2602 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2603 case POINTER_TYPE: case REFERENCE_TYPE:
2604 case OFFSET_TYPE:
2605 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2606 || TREE_CODE (orig) == OFFSET_TYPE)
2607 return true;
2608 return (TREE_CODE (orig) == VECTOR_TYPE
2609 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2611 case REAL_TYPE:
2612 case FIXED_POINT_TYPE:
2613 case COMPLEX_TYPE:
2614 case VECTOR_TYPE:
2615 case VOID_TYPE:
2616 return TREE_CODE (type) == TREE_CODE (orig);
2618 default:
2619 return false;
2623 /* Convert expression ARG to type TYPE. Used by the middle-end for
2624 simple conversions in preference to calling the front-end's convert. */
2626 tree
2627 fold_convert_loc (location_t loc, tree type, tree arg)
2629 tree orig = TREE_TYPE (arg);
2630 tree tem;
2632 if (type == orig)
2633 return arg;
2635 if (TREE_CODE (arg) == ERROR_MARK
2636 || TREE_CODE (type) == ERROR_MARK
2637 || TREE_CODE (orig) == ERROR_MARK)
2638 return error_mark_node;
2640 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2641 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2643 switch (TREE_CODE (type))
2645 case POINTER_TYPE:
2646 case REFERENCE_TYPE:
2647 /* Handle conversions between pointers to different address spaces. */
2648 if (POINTER_TYPE_P (orig)
2649 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2650 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2651 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2652 /* fall through */
2654 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2655 case OFFSET_TYPE:
2656 if (TREE_CODE (arg) == INTEGER_CST)
2658 tem = fold_convert_const (NOP_EXPR, type, arg);
2659 if (tem != NULL_TREE)
2660 return tem;
2662 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2663 || TREE_CODE (orig) == OFFSET_TYPE)
2664 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2665 if (TREE_CODE (orig) == COMPLEX_TYPE)
2666 return fold_convert_loc (loc, type,
2667 fold_build1_loc (loc, REALPART_EXPR,
2668 TREE_TYPE (orig), arg));
2669 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2670 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2671 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2673 case REAL_TYPE:
2674 if (TREE_CODE (arg) == INTEGER_CST)
2676 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2677 if (tem != NULL_TREE)
2678 return tem;
2680 else if (TREE_CODE (arg) == REAL_CST)
2682 tem = fold_convert_const (NOP_EXPR, type, arg);
2683 if (tem != NULL_TREE)
2684 return tem;
2686 else if (TREE_CODE (arg) == FIXED_CST)
2688 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2689 if (tem != NULL_TREE)
2690 return tem;
2693 switch (TREE_CODE (orig))
2695 case INTEGER_TYPE:
2696 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2697 case POINTER_TYPE: case REFERENCE_TYPE:
2698 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2700 case REAL_TYPE:
2701 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2703 case FIXED_POINT_TYPE:
2704 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2706 case COMPLEX_TYPE:
2707 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2708 return fold_convert_loc (loc, type, tem);
2710 default:
2711 gcc_unreachable ();
2714 case FIXED_POINT_TYPE:
2715 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2716 || TREE_CODE (arg) == REAL_CST)
2718 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2719 if (tem != NULL_TREE)
2720 goto fold_convert_exit;
2723 switch (TREE_CODE (orig))
2725 case FIXED_POINT_TYPE:
2726 case INTEGER_TYPE:
2727 case ENUMERAL_TYPE:
2728 case BOOLEAN_TYPE:
2729 case REAL_TYPE:
2730 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2732 case COMPLEX_TYPE:
2733 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2734 return fold_convert_loc (loc, type, tem);
2736 default:
2737 gcc_unreachable ();
2740 case COMPLEX_TYPE:
2741 switch (TREE_CODE (orig))
2743 case INTEGER_TYPE:
2744 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2745 case POINTER_TYPE: case REFERENCE_TYPE:
2746 case REAL_TYPE:
2747 case FIXED_POINT_TYPE:
2748 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2749 fold_convert_loc (loc, TREE_TYPE (type), arg),
2750 fold_convert_loc (loc, TREE_TYPE (type),
2751 integer_zero_node));
2752 case COMPLEX_TYPE:
2754 tree rpart, ipart;
2756 if (TREE_CODE (arg) == COMPLEX_EXPR)
2758 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2759 TREE_OPERAND (arg, 0));
2760 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2761 TREE_OPERAND (arg, 1));
2762 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2765 arg = save_expr (arg);
2766 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2767 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2768 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2769 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2770 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2773 default:
2774 gcc_unreachable ();
2777 case VECTOR_TYPE:
2778 if (integer_zerop (arg))
2779 return build_zero_vector (type);
2780 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2781 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2782 || TREE_CODE (orig) == VECTOR_TYPE);
2783 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2785 case VOID_TYPE:
2786 tem = fold_ignored_result (arg);
2787 if (TREE_CODE (tem) == MODIFY_EXPR)
2788 goto fold_convert_exit;
2789 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2791 default:
2792 gcc_unreachable ();
2794 fold_convert_exit:
2795 protected_set_expr_location (tem, loc);
2796 return tem;
2799 /* Return false if expr can be assumed not to be an lvalue, true
2800 otherwise. */
2802 static bool
2803 maybe_lvalue_p (const_tree x)
2805 /* We only need to wrap lvalue tree codes. */
2806 switch (TREE_CODE (x))
2808 case VAR_DECL:
2809 case PARM_DECL:
2810 case RESULT_DECL:
2811 case LABEL_DECL:
2812 case FUNCTION_DECL:
2813 case SSA_NAME:
2815 case COMPONENT_REF:
2816 case INDIRECT_REF:
2817 case ALIGN_INDIRECT_REF:
2818 case MISALIGNED_INDIRECT_REF:
2819 case ARRAY_REF:
2820 case ARRAY_RANGE_REF:
2821 case BIT_FIELD_REF:
2822 case OBJ_TYPE_REF:
2824 case REALPART_EXPR:
2825 case IMAGPART_EXPR:
2826 case PREINCREMENT_EXPR:
2827 case PREDECREMENT_EXPR:
2828 case SAVE_EXPR:
2829 case TRY_CATCH_EXPR:
2830 case WITH_CLEANUP_EXPR:
2831 case COMPOUND_EXPR:
2832 case MODIFY_EXPR:
2833 case TARGET_EXPR:
2834 case COND_EXPR:
2835 case BIND_EXPR:
2836 break;
2838 default:
2839 /* Assume the worst for front-end tree codes. */
2840 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2841 break;
2842 return false;
2845 return true;
2848 /* Return an expr equal to X but certainly not valid as an lvalue. */
2850 tree
2851 non_lvalue_loc (location_t loc, tree x)
2853 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2854 us. */
2855 if (in_gimple_form)
2856 return x;
2858 if (! maybe_lvalue_p (x))
2859 return x;
2860 x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2861 SET_EXPR_LOCATION (x, loc);
2862 return x;
2865 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2866 Zero means allow extended lvalues. */
2868 int pedantic_lvalues;
2870 /* When pedantic, return an expr equal to X but certainly not valid as a
2871 pedantic lvalue. Otherwise, return X. */
2873 static tree
2874 pedantic_non_lvalue_loc (location_t loc, tree x)
2876 if (pedantic_lvalues)
2877 return non_lvalue_loc (loc, x);
2878 protected_set_expr_location (x, loc);
2879 return x;
2882 /* Given a tree comparison code, return the code that is the logical inverse
2883 of the given code. It is not safe to do this for floating-point
2884 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2885 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2887 enum tree_code
2888 invert_tree_comparison (enum tree_code code, bool honor_nans)
2890 if (honor_nans && flag_trapping_math)
2891 return ERROR_MARK;
2893 switch (code)
2895 case EQ_EXPR:
2896 return NE_EXPR;
2897 case NE_EXPR:
2898 return EQ_EXPR;
2899 case GT_EXPR:
2900 return honor_nans ? UNLE_EXPR : LE_EXPR;
2901 case GE_EXPR:
2902 return honor_nans ? UNLT_EXPR : LT_EXPR;
2903 case LT_EXPR:
2904 return honor_nans ? UNGE_EXPR : GE_EXPR;
2905 case LE_EXPR:
2906 return honor_nans ? UNGT_EXPR : GT_EXPR;
2907 case LTGT_EXPR:
2908 return UNEQ_EXPR;
2909 case UNEQ_EXPR:
2910 return LTGT_EXPR;
2911 case UNGT_EXPR:
2912 return LE_EXPR;
2913 case UNGE_EXPR:
2914 return LT_EXPR;
2915 case UNLT_EXPR:
2916 return GE_EXPR;
2917 case UNLE_EXPR:
2918 return GT_EXPR;
2919 case ORDERED_EXPR:
2920 return UNORDERED_EXPR;
2921 case UNORDERED_EXPR:
2922 return ORDERED_EXPR;
2923 default:
2924 gcc_unreachable ();
2928 /* Similar, but return the comparison that results if the operands are
2929 swapped. This is safe for floating-point. */
2931 enum tree_code
2932 swap_tree_comparison (enum tree_code code)
2934 switch (code)
2936 case EQ_EXPR:
2937 case NE_EXPR:
2938 case ORDERED_EXPR:
2939 case UNORDERED_EXPR:
2940 case LTGT_EXPR:
2941 case UNEQ_EXPR:
2942 return code;
2943 case GT_EXPR:
2944 return LT_EXPR;
2945 case GE_EXPR:
2946 return LE_EXPR;
2947 case LT_EXPR:
2948 return GT_EXPR;
2949 case LE_EXPR:
2950 return GE_EXPR;
2951 case UNGT_EXPR:
2952 return UNLT_EXPR;
2953 case UNGE_EXPR:
2954 return UNLE_EXPR;
2955 case UNLT_EXPR:
2956 return UNGT_EXPR;
2957 case UNLE_EXPR:
2958 return UNGE_EXPR;
2959 default:
2960 gcc_unreachable ();
2965 /* Convert a comparison tree code from an enum tree_code representation
2966 into a compcode bit-based encoding. This function is the inverse of
2967 compcode_to_comparison. */
2969 static enum comparison_code
2970 comparison_to_compcode (enum tree_code code)
2972 switch (code)
2974 case LT_EXPR:
2975 return COMPCODE_LT;
2976 case EQ_EXPR:
2977 return COMPCODE_EQ;
2978 case LE_EXPR:
2979 return COMPCODE_LE;
2980 case GT_EXPR:
2981 return COMPCODE_GT;
2982 case NE_EXPR:
2983 return COMPCODE_NE;
2984 case GE_EXPR:
2985 return COMPCODE_GE;
2986 case ORDERED_EXPR:
2987 return COMPCODE_ORD;
2988 case UNORDERED_EXPR:
2989 return COMPCODE_UNORD;
2990 case UNLT_EXPR:
2991 return COMPCODE_UNLT;
2992 case UNEQ_EXPR:
2993 return COMPCODE_UNEQ;
2994 case UNLE_EXPR:
2995 return COMPCODE_UNLE;
2996 case UNGT_EXPR:
2997 return COMPCODE_UNGT;
2998 case LTGT_EXPR:
2999 return COMPCODE_LTGT;
3000 case UNGE_EXPR:
3001 return COMPCODE_UNGE;
3002 default:
3003 gcc_unreachable ();
3007 /* Convert a compcode bit-based encoding of a comparison operator back
3008 to GCC's enum tree_code representation. This function is the
3009 inverse of comparison_to_compcode. */
3011 static enum tree_code
3012 compcode_to_comparison (enum comparison_code code)
3014 switch (code)
3016 case COMPCODE_LT:
3017 return LT_EXPR;
3018 case COMPCODE_EQ:
3019 return EQ_EXPR;
3020 case COMPCODE_LE:
3021 return LE_EXPR;
3022 case COMPCODE_GT:
3023 return GT_EXPR;
3024 case COMPCODE_NE:
3025 return NE_EXPR;
3026 case COMPCODE_GE:
3027 return GE_EXPR;
3028 case COMPCODE_ORD:
3029 return ORDERED_EXPR;
3030 case COMPCODE_UNORD:
3031 return UNORDERED_EXPR;
3032 case COMPCODE_UNLT:
3033 return UNLT_EXPR;
3034 case COMPCODE_UNEQ:
3035 return UNEQ_EXPR;
3036 case COMPCODE_UNLE:
3037 return UNLE_EXPR;
3038 case COMPCODE_UNGT:
3039 return UNGT_EXPR;
3040 case COMPCODE_LTGT:
3041 return LTGT_EXPR;
3042 case COMPCODE_UNGE:
3043 return UNGE_EXPR;
3044 default:
3045 gcc_unreachable ();
3049 /* Return a tree for the comparison which is the combination of
3050 doing the AND or OR (depending on CODE) of the two operations LCODE
3051 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
3052 the possibility of trapping if the mode has NaNs, and return NULL_TREE
3053 if this makes the transformation invalid. */
3055 tree
3056 combine_comparisons (location_t loc,
3057 enum tree_code code, enum tree_code lcode,
3058 enum tree_code rcode, tree truth_type,
3059 tree ll_arg, tree lr_arg)
3061 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
3062 enum comparison_code lcompcode = comparison_to_compcode (lcode);
3063 enum comparison_code rcompcode = comparison_to_compcode (rcode);
3064 int compcode;
3066 switch (code)
3068 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
3069 compcode = lcompcode & rcompcode;
3070 break;
3072 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
3073 compcode = lcompcode | rcompcode;
3074 break;
3076 default:
3077 return NULL_TREE;
3080 if (!honor_nans)
3082 /* Eliminate unordered comparisons, as well as LTGT and ORD
3083 which are not used unless the mode has NaNs. */
3084 compcode &= ~COMPCODE_UNORD;
3085 if (compcode == COMPCODE_LTGT)
3086 compcode = COMPCODE_NE;
3087 else if (compcode == COMPCODE_ORD)
3088 compcode = COMPCODE_TRUE;
3090 else if (flag_trapping_math)
3092 /* Check that the original operation and the optimized ones will trap
3093 under the same condition. */
3094 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
3095 && (lcompcode != COMPCODE_EQ)
3096 && (lcompcode != COMPCODE_ORD);
3097 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3098 && (rcompcode != COMPCODE_EQ)
3099 && (rcompcode != COMPCODE_ORD);
3100 bool trap = (compcode & COMPCODE_UNORD) == 0
3101 && (compcode != COMPCODE_EQ)
3102 && (compcode != COMPCODE_ORD);
3104 /* In a short-circuited boolean expression the LHS might be
3105 such that the RHS, if evaluated, will never trap. For
3106 example, in ORD (x, y) && (x < y), we evaluate the RHS only
3107 if neither x nor y is NaN. (This is a mixed blessing: for
3108 example, the expression above will never trap, hence
3109 optimizing it to x < y would be invalid). */
3110 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3111 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3112 rtrap = false;
3114 /* If the comparison was short-circuited, and only the RHS
3115 trapped, we may now generate a spurious trap. */
3116 if (rtrap && !ltrap
3117 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3118 return NULL_TREE;
3120 /* If we changed the conditions that cause a trap, we lose. */
3121 if ((ltrap || rtrap) != trap)
3122 return NULL_TREE;
3125 if (compcode == COMPCODE_TRUE)
3126 return constant_boolean_node (true, truth_type);
3127 else if (compcode == COMPCODE_FALSE)
3128 return constant_boolean_node (false, truth_type);
3129 else
3131 enum tree_code tcode;
3133 tcode = compcode_to_comparison ((enum comparison_code) compcode);
3134 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3138 /* Return nonzero if two operands (typically of the same tree node)
3139 are necessarily equal. If either argument has side-effects this
3140 function returns zero. FLAGS modifies behavior as follows:
3142 If OEP_ONLY_CONST is set, only return nonzero for constants.
3143 This function tests whether the operands are indistinguishable;
3144 it does not test whether they are equal using C's == operation.
3145 The distinction is important for IEEE floating point, because
3146 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3147 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3149 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3150 even though it may hold multiple values during a function.
3151 This is because a GCC tree node guarantees that nothing else is
3152 executed between the evaluation of its "operands" (which may often
3153 be evaluated in arbitrary order). Hence if the operands themselves
3154 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3155 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3156 unset means assuming isochronic (or instantaneous) tree equivalence.
3157 Unless comparing arbitrary expression trees, such as from different
3158 statements, this flag can usually be left unset.
3160 If OEP_PURE_SAME is set, then pure functions with identical arguments
3161 are considered the same. It is used when the caller has other ways
3162 to ensure that global memory is unchanged in between. */
3165 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3167 /* If either is ERROR_MARK, they aren't equal. */
3168 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
3169 || TREE_TYPE (arg0) == error_mark_node
3170 || TREE_TYPE (arg1) == error_mark_node)
3171 return 0;
3173 /* Similar, if either does not have a type (like a released SSA name),
3174 they aren't equal. */
3175 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
3176 return 0;
3178 /* Check equality of integer constants before bailing out due to
3179 precision differences. */
3180 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3181 return tree_int_cst_equal (arg0, arg1);
3183 /* If both types don't have the same signedness, then we can't consider
3184 them equal. We must check this before the STRIP_NOPS calls
3185 because they may change the signedness of the arguments. As pointers
3186 strictly don't have a signedness, require either two pointers or
3187 two non-pointers as well. */
3188 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3189 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3190 return 0;
3192 /* We cannot consider pointers to different address space equal. */
3193 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
3194 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3195 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3196 return 0;
3198 /* If both types don't have the same precision, then it is not safe
3199 to strip NOPs. */
3200 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3201 return 0;
3203 STRIP_NOPS (arg0);
3204 STRIP_NOPS (arg1);
3206 /* In case both args are comparisons but with different comparison
3207 code, try to swap the comparison operands of one arg to produce
3208 a match and compare that variant. */
3209 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3210 && COMPARISON_CLASS_P (arg0)
3211 && COMPARISON_CLASS_P (arg1))
3213 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3215 if (TREE_CODE (arg0) == swap_code)
3216 return operand_equal_p (TREE_OPERAND (arg0, 0),
3217 TREE_OPERAND (arg1, 1), flags)
3218 && operand_equal_p (TREE_OPERAND (arg0, 1),
3219 TREE_OPERAND (arg1, 0), flags);
3222 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3223 /* This is needed for conversions and for COMPONENT_REF.
3224 Might as well play it safe and always test this. */
3225 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3226 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3227 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3228 return 0;
3230 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3231 We don't care about side effects in that case because the SAVE_EXPR
3232 takes care of that for us. In all other cases, two expressions are
3233 equal if they have no side effects. If we have two identical
3234 expressions with side effects that should be treated the same due
3235 to the only side effects being identical SAVE_EXPR's, that will
3236 be detected in the recursive calls below. */
3237 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3238 && (TREE_CODE (arg0) == SAVE_EXPR
3239 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3240 return 1;
3242 /* Next handle constant cases, those for which we can return 1 even
3243 if ONLY_CONST is set. */
3244 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3245 switch (TREE_CODE (arg0))
3247 case INTEGER_CST:
3248 return tree_int_cst_equal (arg0, arg1);
3250 case FIXED_CST:
3251 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3252 TREE_FIXED_CST (arg1));
3254 case REAL_CST:
3255 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3256 TREE_REAL_CST (arg1)))
3257 return 1;
3260 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3262 /* If we do not distinguish between signed and unsigned zero,
3263 consider them equal. */
3264 if (real_zerop (arg0) && real_zerop (arg1))
3265 return 1;
3267 return 0;
3269 case VECTOR_CST:
3271 tree v1, v2;
3273 v1 = TREE_VECTOR_CST_ELTS (arg0);
3274 v2 = TREE_VECTOR_CST_ELTS (arg1);
3275 while (v1 && v2)
3277 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3278 flags))
3279 return 0;
3280 v1 = TREE_CHAIN (v1);
3281 v2 = TREE_CHAIN (v2);
3284 return v1 == v2;
3287 case COMPLEX_CST:
3288 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3289 flags)
3290 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3291 flags));
3293 case STRING_CST:
3294 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3295 && ! memcmp (TREE_STRING_POINTER (arg0),
3296 TREE_STRING_POINTER (arg1),
3297 TREE_STRING_LENGTH (arg0)));
3299 case ADDR_EXPR:
3300 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3302 default:
3303 break;
3306 if (flags & OEP_ONLY_CONST)
3307 return 0;
3309 /* Define macros to test an operand from arg0 and arg1 for equality and a
3310 variant that allows null and views null as being different from any
3311 non-null value. In the latter case, if either is null, the both
3312 must be; otherwise, do the normal comparison. */
3313 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3314 TREE_OPERAND (arg1, N), flags)
3316 #define OP_SAME_WITH_NULL(N) \
3317 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3318 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3320 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3322 case tcc_unary:
3323 /* Two conversions are equal only if signedness and modes match. */
3324 switch (TREE_CODE (arg0))
3326 CASE_CONVERT:
3327 case FIX_TRUNC_EXPR:
3328 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3329 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3330 return 0;
3331 break;
3332 default:
3333 break;
3336 return OP_SAME (0);
3339 case tcc_comparison:
3340 case tcc_binary:
3341 if (OP_SAME (0) && OP_SAME (1))
3342 return 1;
3344 /* For commutative ops, allow the other order. */
3345 return (commutative_tree_code (TREE_CODE (arg0))
3346 && operand_equal_p (TREE_OPERAND (arg0, 0),
3347 TREE_OPERAND (arg1, 1), flags)
3348 && operand_equal_p (TREE_OPERAND (arg0, 1),
3349 TREE_OPERAND (arg1, 0), flags));
3351 case tcc_reference:
3352 /* If either of the pointer (or reference) expressions we are
3353 dereferencing contain a side effect, these cannot be equal. */
3354 if (TREE_SIDE_EFFECTS (arg0)
3355 || TREE_SIDE_EFFECTS (arg1))
3356 return 0;
3358 switch (TREE_CODE (arg0))
3360 case INDIRECT_REF:
3361 case ALIGN_INDIRECT_REF:
3362 case MISALIGNED_INDIRECT_REF:
3363 case REALPART_EXPR:
3364 case IMAGPART_EXPR:
3365 return OP_SAME (0);
3367 case ARRAY_REF:
3368 case ARRAY_RANGE_REF:
3369 /* Operands 2 and 3 may be null.
3370 Compare the array index by value if it is constant first as we
3371 may have different types but same value here. */
3372 return (OP_SAME (0)
3373 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3374 TREE_OPERAND (arg1, 1))
3375 || OP_SAME (1))
3376 && OP_SAME_WITH_NULL (2)
3377 && OP_SAME_WITH_NULL (3));
3379 case COMPONENT_REF:
3380 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3381 may be NULL when we're called to compare MEM_EXPRs. */
3382 return OP_SAME_WITH_NULL (0)
3383 && OP_SAME (1)
3384 && OP_SAME_WITH_NULL (2);
3386 case BIT_FIELD_REF:
3387 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3389 default:
3390 return 0;
3393 case tcc_expression:
3394 switch (TREE_CODE (arg0))
3396 case ADDR_EXPR:
3397 case TRUTH_NOT_EXPR:
3398 return OP_SAME (0);
3400 case TRUTH_ANDIF_EXPR:
3401 case TRUTH_ORIF_EXPR:
3402 return OP_SAME (0) && OP_SAME (1);
3404 case TRUTH_AND_EXPR:
3405 case TRUTH_OR_EXPR:
3406 case TRUTH_XOR_EXPR:
3407 if (OP_SAME (0) && OP_SAME (1))
3408 return 1;
3410 /* Otherwise take into account this is a commutative operation. */
3411 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3412 TREE_OPERAND (arg1, 1), flags)
3413 && operand_equal_p (TREE_OPERAND (arg0, 1),
3414 TREE_OPERAND (arg1, 0), flags));
3416 case COND_EXPR:
3417 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3419 default:
3420 return 0;
3423 case tcc_vl_exp:
3424 switch (TREE_CODE (arg0))
3426 case CALL_EXPR:
3427 /* If the CALL_EXPRs call different functions, then they
3428 clearly can not be equal. */
3429 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3430 flags))
3431 return 0;
3434 unsigned int cef = call_expr_flags (arg0);
3435 if (flags & OEP_PURE_SAME)
3436 cef &= ECF_CONST | ECF_PURE;
3437 else
3438 cef &= ECF_CONST;
3439 if (!cef)
3440 return 0;
3443 /* Now see if all the arguments are the same. */
3445 const_call_expr_arg_iterator iter0, iter1;
3446 const_tree a0, a1;
3447 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3448 a1 = first_const_call_expr_arg (arg1, &iter1);
3449 a0 && a1;
3450 a0 = next_const_call_expr_arg (&iter0),
3451 a1 = next_const_call_expr_arg (&iter1))
3452 if (! operand_equal_p (a0, a1, flags))
3453 return 0;
3455 /* If we get here and both argument lists are exhausted
3456 then the CALL_EXPRs are equal. */
3457 return ! (a0 || a1);
3459 default:
3460 return 0;
3463 case tcc_declaration:
3464 /* Consider __builtin_sqrt equal to sqrt. */
3465 return (TREE_CODE (arg0) == FUNCTION_DECL
3466 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3467 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3468 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3470 default:
3471 return 0;
3474 #undef OP_SAME
3475 #undef OP_SAME_WITH_NULL
3478 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3479 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3481 When in doubt, return 0. */
3483 static int
3484 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3486 int unsignedp1, unsignedpo;
3487 tree primarg0, primarg1, primother;
3488 unsigned int correct_width;
3490 if (operand_equal_p (arg0, arg1, 0))
3491 return 1;
3493 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3494 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3495 return 0;
3497 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3498 and see if the inner values are the same. This removes any
3499 signedness comparison, which doesn't matter here. */
3500 primarg0 = arg0, primarg1 = arg1;
3501 STRIP_NOPS (primarg0);
3502 STRIP_NOPS (primarg1);
3503 if (operand_equal_p (primarg0, primarg1, 0))
3504 return 1;
3506 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3507 actual comparison operand, ARG0.
3509 First throw away any conversions to wider types
3510 already present in the operands. */
3512 primarg1 = get_narrower (arg1, &unsignedp1);
3513 primother = get_narrower (other, &unsignedpo);
3515 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3516 if (unsignedp1 == unsignedpo
3517 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3518 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3520 tree type = TREE_TYPE (arg0);
3522 /* Make sure shorter operand is extended the right way
3523 to match the longer operand. */
3524 primarg1 = fold_convert (signed_or_unsigned_type_for
3525 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3527 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3528 return 1;
3531 return 0;
3534 /* See if ARG is an expression that is either a comparison or is performing
3535 arithmetic on comparisons. The comparisons must only be comparing
3536 two different values, which will be stored in *CVAL1 and *CVAL2; if
3537 they are nonzero it means that some operands have already been found.
3538 No variables may be used anywhere else in the expression except in the
3539 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3540 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3542 If this is true, return 1. Otherwise, return zero. */
3544 static int
3545 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3547 enum tree_code code = TREE_CODE (arg);
3548 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3550 /* We can handle some of the tcc_expression cases here. */
3551 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3552 tclass = tcc_unary;
3553 else if (tclass == tcc_expression
3554 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3555 || code == COMPOUND_EXPR))
3556 tclass = tcc_binary;
3558 else if (tclass == tcc_expression && code == SAVE_EXPR
3559 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3561 /* If we've already found a CVAL1 or CVAL2, this expression is
3562 two complex to handle. */
3563 if (*cval1 || *cval2)
3564 return 0;
3566 tclass = tcc_unary;
3567 *save_p = 1;
3570 switch (tclass)
3572 case tcc_unary:
3573 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3575 case tcc_binary:
3576 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3577 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3578 cval1, cval2, save_p));
3580 case tcc_constant:
3581 return 1;
3583 case tcc_expression:
3584 if (code == COND_EXPR)
3585 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3586 cval1, cval2, save_p)
3587 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3588 cval1, cval2, save_p)
3589 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3590 cval1, cval2, save_p));
3591 return 0;
3593 case tcc_comparison:
3594 /* First see if we can handle the first operand, then the second. For
3595 the second operand, we know *CVAL1 can't be zero. It must be that
3596 one side of the comparison is each of the values; test for the
3597 case where this isn't true by failing if the two operands
3598 are the same. */
3600 if (operand_equal_p (TREE_OPERAND (arg, 0),
3601 TREE_OPERAND (arg, 1), 0))
3602 return 0;
3604 if (*cval1 == 0)
3605 *cval1 = TREE_OPERAND (arg, 0);
3606 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3608 else if (*cval2 == 0)
3609 *cval2 = TREE_OPERAND (arg, 0);
3610 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3612 else
3613 return 0;
3615 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3617 else if (*cval2 == 0)
3618 *cval2 = TREE_OPERAND (arg, 1);
3619 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3621 else
3622 return 0;
3624 return 1;
3626 default:
3627 return 0;
3631 /* ARG is a tree that is known to contain just arithmetic operations and
3632 comparisons. Evaluate the operations in the tree substituting NEW0 for
3633 any occurrence of OLD0 as an operand of a comparison and likewise for
3634 NEW1 and OLD1. */
3636 static tree
3637 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3638 tree old1, tree new1)
3640 tree type = TREE_TYPE (arg);
3641 enum tree_code code = TREE_CODE (arg);
3642 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3644 /* We can handle some of the tcc_expression cases here. */
3645 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3646 tclass = tcc_unary;
3647 else if (tclass == tcc_expression
3648 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3649 tclass = tcc_binary;
3651 switch (tclass)
3653 case tcc_unary:
3654 return fold_build1_loc (loc, code, type,
3655 eval_subst (loc, TREE_OPERAND (arg, 0),
3656 old0, new0, old1, new1));
3658 case tcc_binary:
3659 return fold_build2_loc (loc, code, type,
3660 eval_subst (loc, TREE_OPERAND (arg, 0),
3661 old0, new0, old1, new1),
3662 eval_subst (loc, TREE_OPERAND (arg, 1),
3663 old0, new0, old1, new1));
3665 case tcc_expression:
3666 switch (code)
3668 case SAVE_EXPR:
3669 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3670 old1, new1);
3672 case COMPOUND_EXPR:
3673 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3674 old1, new1);
3676 case COND_EXPR:
3677 return fold_build3_loc (loc, code, type,
3678 eval_subst (loc, TREE_OPERAND (arg, 0),
3679 old0, new0, old1, new1),
3680 eval_subst (loc, TREE_OPERAND (arg, 1),
3681 old0, new0, old1, new1),
3682 eval_subst (loc, TREE_OPERAND (arg, 2),
3683 old0, new0, old1, new1));
3684 default:
3685 break;
3687 /* Fall through - ??? */
3689 case tcc_comparison:
3691 tree arg0 = TREE_OPERAND (arg, 0);
3692 tree arg1 = TREE_OPERAND (arg, 1);
3694 /* We need to check both for exact equality and tree equality. The
3695 former will be true if the operand has a side-effect. In that
3696 case, we know the operand occurred exactly once. */
3698 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3699 arg0 = new0;
3700 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3701 arg0 = new1;
3703 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3704 arg1 = new0;
3705 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3706 arg1 = new1;
3708 return fold_build2_loc (loc, code, type, arg0, arg1);
3711 default:
3712 return arg;
3716 /* Return a tree for the case when the result of an expression is RESULT
3717 converted to TYPE and OMITTED was previously an operand of the expression
3718 but is now not needed (e.g., we folded OMITTED * 0).
3720 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3721 the conversion of RESULT to TYPE. */
3723 tree
3724 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3726 tree t = fold_convert_loc (loc, type, result);
3728 /* If the resulting operand is an empty statement, just return the omitted
3729 statement casted to void. */
3730 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3732 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3733 goto omit_one_operand_exit;
3736 if (TREE_SIDE_EFFECTS (omitted))
3738 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3739 goto omit_one_operand_exit;
3742 return non_lvalue_loc (loc, t);
3744 omit_one_operand_exit:
3745 protected_set_expr_location (t, loc);
3746 return t;
3749 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3751 static tree
3752 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3753 tree omitted)
3755 tree t = fold_convert_loc (loc, type, result);
3757 /* If the resulting operand is an empty statement, just return the omitted
3758 statement casted to void. */
3759 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3761 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3762 goto pedantic_omit_one_operand_exit;
3765 if (TREE_SIDE_EFFECTS (omitted))
3767 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3768 goto pedantic_omit_one_operand_exit;
3771 return pedantic_non_lvalue_loc (loc, t);
3773 pedantic_omit_one_operand_exit:
3774 protected_set_expr_location (t, loc);
3775 return t;
3778 /* Return a tree for the case when the result of an expression is RESULT
3779 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3780 of the expression but are now not needed.
3782 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3783 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3784 evaluated before OMITTED2. Otherwise, if neither has side effects,
3785 just do the conversion of RESULT to TYPE. */
3787 tree
3788 omit_two_operands_loc (location_t loc, tree type, tree result,
3789 tree omitted1, tree omitted2)
3791 tree t = fold_convert_loc (loc, type, result);
3793 if (TREE_SIDE_EFFECTS (omitted2))
3795 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3796 SET_EXPR_LOCATION (t, loc);
3798 if (TREE_SIDE_EFFECTS (omitted1))
3800 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3801 SET_EXPR_LOCATION (t, loc);
3804 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3808 /* Return a simplified tree node for the truth-negation of ARG. This
3809 never alters ARG itself. We assume that ARG is an operation that
3810 returns a truth value (0 or 1).
3812 FIXME: one would think we would fold the result, but it causes
3813 problems with the dominator optimizer. */
3815 tree
3816 fold_truth_not_expr (location_t loc, tree arg)
3818 tree t, type = TREE_TYPE (arg);
3819 enum tree_code code = TREE_CODE (arg);
3820 location_t loc1, loc2;
3822 /* If this is a comparison, we can simply invert it, except for
3823 floating-point non-equality comparisons, in which case we just
3824 enclose a TRUTH_NOT_EXPR around what we have. */
3826 if (TREE_CODE_CLASS (code) == tcc_comparison)
3828 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3829 if (FLOAT_TYPE_P (op_type)
3830 && flag_trapping_math
3831 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3832 && code != NE_EXPR && code != EQ_EXPR)
3833 return NULL_TREE;
3835 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3836 if (code == ERROR_MARK)
3837 return NULL_TREE;
3839 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3840 SET_EXPR_LOCATION (t, loc);
3841 return t;
3844 switch (code)
3846 case INTEGER_CST:
3847 return constant_boolean_node (integer_zerop (arg), type);
3849 case TRUTH_AND_EXPR:
3850 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3851 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3852 if (loc1 == UNKNOWN_LOCATION)
3853 loc1 = loc;
3854 if (loc2 == UNKNOWN_LOCATION)
3855 loc2 = loc;
3856 t = build2 (TRUTH_OR_EXPR, type,
3857 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3858 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3859 break;
3861 case TRUTH_OR_EXPR:
3862 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3863 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3864 if (loc1 == UNKNOWN_LOCATION)
3865 loc1 = loc;
3866 if (loc2 == UNKNOWN_LOCATION)
3867 loc2 = loc;
3868 t = build2 (TRUTH_AND_EXPR, type,
3869 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3870 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3871 break;
3873 case TRUTH_XOR_EXPR:
3874 /* Here we can invert either operand. We invert the first operand
3875 unless the second operand is a TRUTH_NOT_EXPR in which case our
3876 result is the XOR of the first operand with the inside of the
3877 negation of the second operand. */
3879 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3880 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3881 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3882 else
3883 t = build2 (TRUTH_XOR_EXPR, type,
3884 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3885 TREE_OPERAND (arg, 1));
3886 break;
3888 case TRUTH_ANDIF_EXPR:
3889 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3890 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3891 if (loc1 == UNKNOWN_LOCATION)
3892 loc1 = loc;
3893 if (loc2 == UNKNOWN_LOCATION)
3894 loc2 = loc;
3895 t = build2 (TRUTH_ORIF_EXPR, type,
3896 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3897 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3898 break;
3900 case TRUTH_ORIF_EXPR:
3901 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3902 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3903 if (loc1 == UNKNOWN_LOCATION)
3904 loc1 = loc;
3905 if (loc2 == UNKNOWN_LOCATION)
3906 loc2 = loc;
3907 t = build2 (TRUTH_ANDIF_EXPR, type,
3908 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3909 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3910 break;
3912 case TRUTH_NOT_EXPR:
3913 return TREE_OPERAND (arg, 0);
3915 case COND_EXPR:
3917 tree arg1 = TREE_OPERAND (arg, 1);
3918 tree arg2 = TREE_OPERAND (arg, 2);
3920 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3921 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3922 if (loc1 == UNKNOWN_LOCATION)
3923 loc1 = loc;
3924 if (loc2 == UNKNOWN_LOCATION)
3925 loc2 = loc;
3927 /* A COND_EXPR may have a throw as one operand, which
3928 then has void type. Just leave void operands
3929 as they are. */
3930 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3931 VOID_TYPE_P (TREE_TYPE (arg1))
3932 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3933 VOID_TYPE_P (TREE_TYPE (arg2))
3934 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3935 break;
3938 case COMPOUND_EXPR:
3939 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3940 if (loc1 == UNKNOWN_LOCATION)
3941 loc1 = loc;
3942 t = build2 (COMPOUND_EXPR, type,
3943 TREE_OPERAND (arg, 0),
3944 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3945 break;
3947 case NON_LVALUE_EXPR:
3948 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3949 if (loc1 == UNKNOWN_LOCATION)
3950 loc1 = loc;
3951 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3953 CASE_CONVERT:
3954 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3956 t = build1 (TRUTH_NOT_EXPR, type, arg);
3957 break;
3960 /* ... fall through ... */
3962 case FLOAT_EXPR:
3963 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3964 if (loc1 == UNKNOWN_LOCATION)
3965 loc1 = loc;
3966 t = build1 (TREE_CODE (arg), type,
3967 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3968 break;
3970 case BIT_AND_EXPR:
3971 if (!integer_onep (TREE_OPERAND (arg, 1)))
3972 return NULL_TREE;
3973 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3974 break;
3976 case SAVE_EXPR:
3977 t = build1 (TRUTH_NOT_EXPR, type, arg);
3978 break;
3980 case CLEANUP_POINT_EXPR:
3981 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3982 if (loc1 == UNKNOWN_LOCATION)
3983 loc1 = loc;
3984 t = build1 (CLEANUP_POINT_EXPR, type,
3985 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3986 break;
3988 default:
3989 t = NULL_TREE;
3990 break;
3993 if (t)
3994 SET_EXPR_LOCATION (t, loc);
3996 return t;
3999 /* Return a simplified tree node for the truth-negation of ARG. This
4000 never alters ARG itself. We assume that ARG is an operation that
4001 returns a truth value (0 or 1).
4003 FIXME: one would think we would fold the result, but it causes
4004 problems with the dominator optimizer. */
4006 tree
4007 invert_truthvalue_loc (location_t loc, tree arg)
4009 tree tem;
4011 if (TREE_CODE (arg) == ERROR_MARK)
4012 return arg;
4014 tem = fold_truth_not_expr (loc, arg);
4015 if (!tem)
4017 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
4018 SET_EXPR_LOCATION (tem, loc);
4021 return tem;
4024 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
4025 operands are another bit-wise operation with a common input. If so,
4026 distribute the bit operations to save an operation and possibly two if
4027 constants are involved. For example, convert
4028 (A | B) & (A | C) into A | (B & C)
4029 Further simplification will occur if B and C are constants.
4031 If this optimization cannot be done, 0 will be returned. */
4033 static tree
4034 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
4035 tree arg0, tree arg1)
4037 tree common;
4038 tree left, right;
4040 if (TREE_CODE (arg0) != TREE_CODE (arg1)
4041 || TREE_CODE (arg0) == code
4042 || (TREE_CODE (arg0) != BIT_AND_EXPR
4043 && TREE_CODE (arg0) != BIT_IOR_EXPR))
4044 return 0;
4046 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
4048 common = TREE_OPERAND (arg0, 0);
4049 left = TREE_OPERAND (arg0, 1);
4050 right = TREE_OPERAND (arg1, 1);
4052 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
4054 common = TREE_OPERAND (arg0, 0);
4055 left = TREE_OPERAND (arg0, 1);
4056 right = TREE_OPERAND (arg1, 0);
4058 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
4060 common = TREE_OPERAND (arg0, 1);
4061 left = TREE_OPERAND (arg0, 0);
4062 right = TREE_OPERAND (arg1, 1);
4064 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
4066 common = TREE_OPERAND (arg0, 1);
4067 left = TREE_OPERAND (arg0, 0);
4068 right = TREE_OPERAND (arg1, 0);
4070 else
4071 return 0;
4073 common = fold_convert_loc (loc, type, common);
4074 left = fold_convert_loc (loc, type, left);
4075 right = fold_convert_loc (loc, type, right);
4076 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
4077 fold_build2_loc (loc, code, type, left, right));
4080 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
4081 with code CODE. This optimization is unsafe. */
4082 static tree
4083 distribute_real_division (location_t loc, enum tree_code code, tree type,
4084 tree arg0, tree arg1)
4086 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
4087 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
4089 /* (A / C) +- (B / C) -> (A +- B) / C. */
4090 if (mul0 == mul1
4091 && operand_equal_p (TREE_OPERAND (arg0, 1),
4092 TREE_OPERAND (arg1, 1), 0))
4093 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
4094 fold_build2_loc (loc, code, type,
4095 TREE_OPERAND (arg0, 0),
4096 TREE_OPERAND (arg1, 0)),
4097 TREE_OPERAND (arg0, 1));
4099 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
4100 if (operand_equal_p (TREE_OPERAND (arg0, 0),
4101 TREE_OPERAND (arg1, 0), 0)
4102 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
4103 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
4105 REAL_VALUE_TYPE r0, r1;
4106 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
4107 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
4108 if (!mul0)
4109 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
4110 if (!mul1)
4111 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
4112 real_arithmetic (&r0, code, &r0, &r1);
4113 return fold_build2_loc (loc, MULT_EXPR, type,
4114 TREE_OPERAND (arg0, 0),
4115 build_real (type, r0));
4118 return NULL_TREE;
4121 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4122 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
4124 static tree
4125 make_bit_field_ref (location_t loc, tree inner, tree type,
4126 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
4128 tree result, bftype;
4130 if (bitpos == 0)
4132 tree size = TYPE_SIZE (TREE_TYPE (inner));
4133 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4134 || POINTER_TYPE_P (TREE_TYPE (inner)))
4135 && host_integerp (size, 0)
4136 && tree_low_cst (size, 0) == bitsize)
4137 return fold_convert_loc (loc, type, inner);
4140 bftype = type;
4141 if (TYPE_PRECISION (bftype) != bitsize
4142 || TYPE_UNSIGNED (bftype) == !unsignedp)
4143 bftype = build_nonstandard_integer_type (bitsize, 0);
4145 result = build3 (BIT_FIELD_REF, bftype, inner,
4146 size_int (bitsize), bitsize_int (bitpos));
4147 SET_EXPR_LOCATION (result, loc);
4149 if (bftype != type)
4150 result = fold_convert_loc (loc, type, result);
4152 return result;
4155 /* Optimize a bit-field compare.
4157 There are two cases: First is a compare against a constant and the
4158 second is a comparison of two items where the fields are at the same
4159 bit position relative to the start of a chunk (byte, halfword, word)
4160 large enough to contain it. In these cases we can avoid the shift
4161 implicit in bitfield extractions.
4163 For constants, we emit a compare of the shifted constant with the
4164 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4165 compared. For two fields at the same position, we do the ANDs with the
4166 similar mask and compare the result of the ANDs.
4168 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4169 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4170 are the left and right operands of the comparison, respectively.
4172 If the optimization described above can be done, we return the resulting
4173 tree. Otherwise we return zero. */
4175 static tree
4176 optimize_bit_field_compare (location_t loc, enum tree_code code,
4177 tree compare_type, tree lhs, tree rhs)
4179 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
4180 tree type = TREE_TYPE (lhs);
4181 tree signed_type, unsigned_type;
4182 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4183 enum machine_mode lmode, rmode, nmode;
4184 int lunsignedp, runsignedp;
4185 int lvolatilep = 0, rvolatilep = 0;
4186 tree linner, rinner = NULL_TREE;
4187 tree mask;
4188 tree offset;
4190 /* Get all the information about the extractions being done. If the bit size
4191 if the same as the size of the underlying object, we aren't doing an
4192 extraction at all and so can do nothing. We also don't want to
4193 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4194 then will no longer be able to replace it. */
4195 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
4196 &lunsignedp, &lvolatilep, false);
4197 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
4198 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
4199 return 0;
4201 if (!const_p)
4203 /* If this is not a constant, we can only do something if bit positions,
4204 sizes, and signedness are the same. */
4205 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4206 &runsignedp, &rvolatilep, false);
4208 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
4209 || lunsignedp != runsignedp || offset != 0
4210 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
4211 return 0;
4214 /* See if we can find a mode to refer to this field. We should be able to,
4215 but fail if we can't. */
4216 nmode = get_best_mode (lbitsize, lbitpos,
4217 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4218 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4219 TYPE_ALIGN (TREE_TYPE (rinner))),
4220 word_mode, lvolatilep || rvolatilep);
4221 if (nmode == VOIDmode)
4222 return 0;
4224 /* Set signed and unsigned types of the precision of this mode for the
4225 shifts below. */
4226 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
4227 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4229 /* Compute the bit position and size for the new reference and our offset
4230 within it. If the new reference is the same size as the original, we
4231 won't optimize anything, so return zero. */
4232 nbitsize = GET_MODE_BITSIZE (nmode);
4233 nbitpos = lbitpos & ~ (nbitsize - 1);
4234 lbitpos -= nbitpos;
4235 if (nbitsize == lbitsize)
4236 return 0;
4238 if (BYTES_BIG_ENDIAN)
4239 lbitpos = nbitsize - lbitsize - lbitpos;
4241 /* Make the mask to be used against the extracted field. */
4242 mask = build_int_cst_type (unsigned_type, -1);
4243 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
4244 mask = const_binop (RSHIFT_EXPR, mask,
4245 size_int (nbitsize - lbitsize - lbitpos), 0);
4247 if (! const_p)
4248 /* If not comparing with constant, just rework the comparison
4249 and return. */
4250 return fold_build2_loc (loc, code, compare_type,
4251 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4252 make_bit_field_ref (loc, linner,
4253 unsigned_type,
4254 nbitsize, nbitpos,
4256 mask),
4257 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4258 make_bit_field_ref (loc, rinner,
4259 unsigned_type,
4260 nbitsize, nbitpos,
4262 mask));
4264 /* Otherwise, we are handling the constant case. See if the constant is too
4265 big for the field. Warn and return a tree of for 0 (false) if so. We do
4266 this not only for its own sake, but to avoid having to test for this
4267 error case below. If we didn't, we might generate wrong code.
4269 For unsigned fields, the constant shifted right by the field length should
4270 be all zero. For signed fields, the high-order bits should agree with
4271 the sign bit. */
4273 if (lunsignedp)
4275 if (! integer_zerop (const_binop (RSHIFT_EXPR,
4276 fold_convert_loc (loc,
4277 unsigned_type, rhs),
4278 size_int (lbitsize), 0)))
4280 warning (0, "comparison is always %d due to width of bit-field",
4281 code == NE_EXPR);
4282 return constant_boolean_node (code == NE_EXPR, compare_type);
4285 else
4287 tree tem = const_binop (RSHIFT_EXPR,
4288 fold_convert_loc (loc, signed_type, rhs),
4289 size_int (lbitsize - 1), 0);
4290 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4292 warning (0, "comparison is always %d due to width of bit-field",
4293 code == NE_EXPR);
4294 return constant_boolean_node (code == NE_EXPR, compare_type);
4298 /* Single-bit compares should always be against zero. */
4299 if (lbitsize == 1 && ! integer_zerop (rhs))
4301 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4302 rhs = build_int_cst (type, 0);
4305 /* Make a new bitfield reference, shift the constant over the
4306 appropriate number of bits and mask it with the computed mask
4307 (in case this was a signed field). If we changed it, make a new one. */
4308 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
4309 if (lvolatilep)
4311 TREE_SIDE_EFFECTS (lhs) = 1;
4312 TREE_THIS_VOLATILE (lhs) = 1;
4315 rhs = const_binop (BIT_AND_EXPR,
4316 const_binop (LSHIFT_EXPR,
4317 fold_convert_loc (loc, unsigned_type, rhs),
4318 size_int (lbitpos), 0),
4319 mask, 0);
4321 lhs = build2 (code, compare_type,
4322 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4323 rhs);
4324 SET_EXPR_LOCATION (lhs, loc);
4325 return lhs;
4328 /* Subroutine for fold_truthop: decode a field reference.
4330 If EXP is a comparison reference, we return the innermost reference.
4332 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4333 set to the starting bit number.
4335 If the innermost field can be completely contained in a mode-sized
4336 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4338 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4339 otherwise it is not changed.
4341 *PUNSIGNEDP is set to the signedness of the field.
4343 *PMASK is set to the mask used. This is either contained in a
4344 BIT_AND_EXPR or derived from the width of the field.
4346 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4348 Return 0 if this is not a component reference or is one that we can't
4349 do anything with. */
4351 static tree
4352 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
4353 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4354 int *punsignedp, int *pvolatilep,
4355 tree *pmask, tree *pand_mask)
4357 tree outer_type = 0;
4358 tree and_mask = 0;
4359 tree mask, inner, offset;
4360 tree unsigned_type;
4361 unsigned int precision;
4363 /* All the optimizations using this function assume integer fields.
4364 There are problems with FP fields since the type_for_size call
4365 below can fail for, e.g., XFmode. */
4366 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4367 return 0;
4369 /* We are interested in the bare arrangement of bits, so strip everything
4370 that doesn't affect the machine mode. However, record the type of the
4371 outermost expression if it may matter below. */
4372 if (CONVERT_EXPR_P (exp)
4373 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4374 outer_type = TREE_TYPE (exp);
4375 STRIP_NOPS (exp);
4377 if (TREE_CODE (exp) == BIT_AND_EXPR)
4379 and_mask = TREE_OPERAND (exp, 1);
4380 exp = TREE_OPERAND (exp, 0);
4381 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4382 if (TREE_CODE (and_mask) != INTEGER_CST)
4383 return 0;
4386 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4387 punsignedp, pvolatilep, false);
4388 if ((inner == exp && and_mask == 0)
4389 || *pbitsize < 0 || offset != 0
4390 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4391 return 0;
4393 /* If the number of bits in the reference is the same as the bitsize of
4394 the outer type, then the outer type gives the signedness. Otherwise
4395 (in case of a small bitfield) the signedness is unchanged. */
4396 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4397 *punsignedp = TYPE_UNSIGNED (outer_type);
4399 /* Compute the mask to access the bitfield. */
4400 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4401 precision = TYPE_PRECISION (unsigned_type);
4403 mask = build_int_cst_type (unsigned_type, -1);
4405 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4406 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4408 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4409 if (and_mask != 0)
4410 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4411 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4413 *pmask = mask;
4414 *pand_mask = and_mask;
4415 return inner;
4418 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4419 bit positions. */
4421 static int
4422 all_ones_mask_p (const_tree mask, int size)
4424 tree type = TREE_TYPE (mask);
4425 unsigned int precision = TYPE_PRECISION (type);
4426 tree tmask;
4428 tmask = build_int_cst_type (signed_type_for (type), -1);
4430 return
4431 tree_int_cst_equal (mask,
4432 const_binop (RSHIFT_EXPR,
4433 const_binop (LSHIFT_EXPR, tmask,
4434 size_int (precision - size),
4436 size_int (precision - size), 0));
4439 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4440 represents the sign bit of EXP's type. If EXP represents a sign
4441 or zero extension, also test VAL against the unextended type.
4442 The return value is the (sub)expression whose sign bit is VAL,
4443 or NULL_TREE otherwise. */
4445 static tree
4446 sign_bit_p (tree exp, const_tree val)
4448 unsigned HOST_WIDE_INT mask_lo, lo;
4449 HOST_WIDE_INT mask_hi, hi;
4450 int width;
4451 tree t;
4453 /* Tree EXP must have an integral type. */
4454 t = TREE_TYPE (exp);
4455 if (! INTEGRAL_TYPE_P (t))
4456 return NULL_TREE;
4458 /* Tree VAL must be an integer constant. */
4459 if (TREE_CODE (val) != INTEGER_CST
4460 || TREE_OVERFLOW (val))
4461 return NULL_TREE;
4463 width = TYPE_PRECISION (t);
4464 if (width > HOST_BITS_PER_WIDE_INT)
4466 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4467 lo = 0;
4469 mask_hi = ((unsigned HOST_WIDE_INT) -1
4470 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4471 mask_lo = -1;
4473 else
4475 hi = 0;
4476 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4478 mask_hi = 0;
4479 mask_lo = ((unsigned HOST_WIDE_INT) -1
4480 >> (HOST_BITS_PER_WIDE_INT - width));
4483 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4484 treat VAL as if it were unsigned. */
4485 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4486 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4487 return exp;
4489 /* Handle extension from a narrower type. */
4490 if (TREE_CODE (exp) == NOP_EXPR
4491 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4492 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4494 return NULL_TREE;
4497 /* Subroutine for fold_truthop: determine if an operand is simple enough
4498 to be evaluated unconditionally. */
4500 static int
4501 simple_operand_p (const_tree exp)
4503 /* Strip any conversions that don't change the machine mode. */
4504 STRIP_NOPS (exp);
4506 return (CONSTANT_CLASS_P (exp)
4507 || TREE_CODE (exp) == SSA_NAME
4508 || (DECL_P (exp)
4509 && ! TREE_ADDRESSABLE (exp)
4510 && ! TREE_THIS_VOLATILE (exp)
4511 && ! DECL_NONLOCAL (exp)
4512 /* Don't regard global variables as simple. They may be
4513 allocated in ways unknown to the compiler (shared memory,
4514 #pragma weak, etc). */
4515 && ! TREE_PUBLIC (exp)
4516 && ! DECL_EXTERNAL (exp)
4517 /* Loading a static variable is unduly expensive, but global
4518 registers aren't expensive. */
4519 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4522 /* The following functions are subroutines to fold_range_test and allow it to
4523 try to change a logical combination of comparisons into a range test.
4525 For example, both
4526 X == 2 || X == 3 || X == 4 || X == 5
4528 X >= 2 && X <= 5
4529 are converted to
4530 (unsigned) (X - 2) <= 3
4532 We describe each set of comparisons as being either inside or outside
4533 a range, using a variable named like IN_P, and then describe the
4534 range with a lower and upper bound. If one of the bounds is omitted,
4535 it represents either the highest or lowest value of the type.
4537 In the comments below, we represent a range by two numbers in brackets
4538 preceded by a "+" to designate being inside that range, or a "-" to
4539 designate being outside that range, so the condition can be inverted by
4540 flipping the prefix. An omitted bound is represented by a "-". For
4541 example, "- [-, 10]" means being outside the range starting at the lowest
4542 possible value and ending at 10, in other words, being greater than 10.
4543 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4544 always false.
4546 We set up things so that the missing bounds are handled in a consistent
4547 manner so neither a missing bound nor "true" and "false" need to be
4548 handled using a special case. */
4550 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4551 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4552 and UPPER1_P are nonzero if the respective argument is an upper bound
4553 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4554 must be specified for a comparison. ARG1 will be converted to ARG0's
4555 type if both are specified. */
4557 static tree
4558 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4559 tree arg1, int upper1_p)
4561 tree tem;
4562 int result;
4563 int sgn0, sgn1;
4565 /* If neither arg represents infinity, do the normal operation.
4566 Else, if not a comparison, return infinity. Else handle the special
4567 comparison rules. Note that most of the cases below won't occur, but
4568 are handled for consistency. */
4570 if (arg0 != 0 && arg1 != 0)
4572 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4573 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4574 STRIP_NOPS (tem);
4575 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4578 if (TREE_CODE_CLASS (code) != tcc_comparison)
4579 return 0;
4581 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4582 for neither. In real maths, we cannot assume open ended ranges are
4583 the same. But, this is computer arithmetic, where numbers are finite.
4584 We can therefore make the transformation of any unbounded range with
4585 the value Z, Z being greater than any representable number. This permits
4586 us to treat unbounded ranges as equal. */
4587 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4588 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4589 switch (code)
4591 case EQ_EXPR:
4592 result = sgn0 == sgn1;
4593 break;
4594 case NE_EXPR:
4595 result = sgn0 != sgn1;
4596 break;
4597 case LT_EXPR:
4598 result = sgn0 < sgn1;
4599 break;
4600 case LE_EXPR:
4601 result = sgn0 <= sgn1;
4602 break;
4603 case GT_EXPR:
4604 result = sgn0 > sgn1;
4605 break;
4606 case GE_EXPR:
4607 result = sgn0 >= sgn1;
4608 break;
4609 default:
4610 gcc_unreachable ();
4613 return constant_boolean_node (result, type);
4616 /* Given EXP, a logical expression, set the range it is testing into
4617 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4618 actually being tested. *PLOW and *PHIGH will be made of the same
4619 type as the returned expression. If EXP is not a comparison, we
4620 will most likely not be returning a useful value and range. Set
4621 *STRICT_OVERFLOW_P to true if the return value is only valid
4622 because signed overflow is undefined; otherwise, do not change
4623 *STRICT_OVERFLOW_P. */
4625 tree
4626 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4627 bool *strict_overflow_p)
4629 enum tree_code code;
4630 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4631 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4632 int in_p, n_in_p;
4633 tree low, high, n_low, n_high;
4634 location_t loc = EXPR_LOCATION (exp);
4636 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4637 and see if we can refine the range. Some of the cases below may not
4638 happen, but it doesn't seem worth worrying about this. We "continue"
4639 the outer loop when we've changed something; otherwise we "break"
4640 the switch, which will "break" the while. */
4642 in_p = 0;
4643 low = high = build_int_cst (TREE_TYPE (exp), 0);
4645 while (1)
4647 code = TREE_CODE (exp);
4648 exp_type = TREE_TYPE (exp);
4650 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4652 if (TREE_OPERAND_LENGTH (exp) > 0)
4653 arg0 = TREE_OPERAND (exp, 0);
4654 if (TREE_CODE_CLASS (code) == tcc_comparison
4655 || TREE_CODE_CLASS (code) == tcc_unary
4656 || TREE_CODE_CLASS (code) == tcc_binary)
4657 arg0_type = TREE_TYPE (arg0);
4658 if (TREE_CODE_CLASS (code) == tcc_binary
4659 || TREE_CODE_CLASS (code) == tcc_comparison
4660 || (TREE_CODE_CLASS (code) == tcc_expression
4661 && TREE_OPERAND_LENGTH (exp) > 1))
4662 arg1 = TREE_OPERAND (exp, 1);
4665 switch (code)
4667 case TRUTH_NOT_EXPR:
4668 in_p = ! in_p, exp = arg0;
4669 continue;
4671 case EQ_EXPR: case NE_EXPR:
4672 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4673 /* We can only do something if the range is testing for zero
4674 and if the second operand is an integer constant. Note that
4675 saying something is "in" the range we make is done by
4676 complementing IN_P since it will set in the initial case of
4677 being not equal to zero; "out" is leaving it alone. */
4678 if (low == 0 || high == 0
4679 || ! integer_zerop (low) || ! integer_zerop (high)
4680 || TREE_CODE (arg1) != INTEGER_CST)
4681 break;
4683 switch (code)
4685 case NE_EXPR: /* - [c, c] */
4686 low = high = arg1;
4687 break;
4688 case EQ_EXPR: /* + [c, c] */
4689 in_p = ! in_p, low = high = arg1;
4690 break;
4691 case GT_EXPR: /* - [-, c] */
4692 low = 0, high = arg1;
4693 break;
4694 case GE_EXPR: /* + [c, -] */
4695 in_p = ! in_p, low = arg1, high = 0;
4696 break;
4697 case LT_EXPR: /* - [c, -] */
4698 low = arg1, high = 0;
4699 break;
4700 case LE_EXPR: /* + [-, c] */
4701 in_p = ! in_p, low = 0, high = arg1;
4702 break;
4703 default:
4704 gcc_unreachable ();
4707 /* If this is an unsigned comparison, we also know that EXP is
4708 greater than or equal to zero. We base the range tests we make
4709 on that fact, so we record it here so we can parse existing
4710 range tests. We test arg0_type since often the return type
4711 of, e.g. EQ_EXPR, is boolean. */
4712 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4714 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4715 in_p, low, high, 1,
4716 build_int_cst (arg0_type, 0),
4717 NULL_TREE))
4718 break;
4720 in_p = n_in_p, low = n_low, high = n_high;
4722 /* If the high bound is missing, but we have a nonzero low
4723 bound, reverse the range so it goes from zero to the low bound
4724 minus 1. */
4725 if (high == 0 && low && ! integer_zerop (low))
4727 in_p = ! in_p;
4728 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4729 integer_one_node, 0);
4730 low = build_int_cst (arg0_type, 0);
4734 exp = arg0;
4735 continue;
4737 case NEGATE_EXPR:
4738 /* (-x) IN [a,b] -> x in [-b, -a] */
4739 n_low = range_binop (MINUS_EXPR, exp_type,
4740 build_int_cst (exp_type, 0),
4741 0, high, 1);
4742 n_high = range_binop (MINUS_EXPR, exp_type,
4743 build_int_cst (exp_type, 0),
4744 0, low, 0);
4745 low = n_low, high = n_high;
4746 exp = arg0;
4747 continue;
4749 case BIT_NOT_EXPR:
4750 /* ~ X -> -X - 1 */
4751 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4752 build_int_cst (exp_type, 1));
4753 SET_EXPR_LOCATION (exp, loc);
4754 continue;
4756 case PLUS_EXPR: case MINUS_EXPR:
4757 if (TREE_CODE (arg1) != INTEGER_CST)
4758 break;
4760 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4761 move a constant to the other side. */
4762 if (!TYPE_UNSIGNED (arg0_type)
4763 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4764 break;
4766 /* If EXP is signed, any overflow in the computation is undefined,
4767 so we don't worry about it so long as our computations on
4768 the bounds don't overflow. For unsigned, overflow is defined
4769 and this is exactly the right thing. */
4770 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4771 arg0_type, low, 0, arg1, 0);
4772 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4773 arg0_type, high, 1, arg1, 0);
4774 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4775 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4776 break;
4778 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4779 *strict_overflow_p = true;
4781 /* Check for an unsigned range which has wrapped around the maximum
4782 value thus making n_high < n_low, and normalize it. */
4783 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4785 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4786 integer_one_node, 0);
4787 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4788 integer_one_node, 0);
4790 /* If the range is of the form +/- [ x+1, x ], we won't
4791 be able to normalize it. But then, it represents the
4792 whole range or the empty set, so make it
4793 +/- [ -, - ]. */
4794 if (tree_int_cst_equal (n_low, low)
4795 && tree_int_cst_equal (n_high, high))
4796 low = high = 0;
4797 else
4798 in_p = ! in_p;
4800 else
4801 low = n_low, high = n_high;
4803 exp = arg0;
4804 continue;
4806 CASE_CONVERT: case NON_LVALUE_EXPR:
4807 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4808 break;
4810 if (! INTEGRAL_TYPE_P (arg0_type)
4811 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4812 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4813 break;
4815 n_low = low, n_high = high;
4817 if (n_low != 0)
4818 n_low = fold_convert_loc (loc, arg0_type, n_low);
4820 if (n_high != 0)
4821 n_high = fold_convert_loc (loc, arg0_type, n_high);
4824 /* If we're converting arg0 from an unsigned type, to exp,
4825 a signed type, we will be doing the comparison as unsigned.
4826 The tests above have already verified that LOW and HIGH
4827 are both positive.
4829 So we have to ensure that we will handle large unsigned
4830 values the same way that the current signed bounds treat
4831 negative values. */
4833 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4835 tree high_positive;
4836 tree equiv_type;
4837 /* For fixed-point modes, we need to pass the saturating flag
4838 as the 2nd parameter. */
4839 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4840 equiv_type = lang_hooks.types.type_for_mode
4841 (TYPE_MODE (arg0_type),
4842 TYPE_SATURATING (arg0_type));
4843 else
4844 equiv_type = lang_hooks.types.type_for_mode
4845 (TYPE_MODE (arg0_type), 1);
4847 /* A range without an upper bound is, naturally, unbounded.
4848 Since convert would have cropped a very large value, use
4849 the max value for the destination type. */
4850 high_positive
4851 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4852 : TYPE_MAX_VALUE (arg0_type);
4854 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4855 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4856 fold_convert_loc (loc, arg0_type,
4857 high_positive),
4858 build_int_cst (arg0_type, 1));
4860 /* If the low bound is specified, "and" the range with the
4861 range for which the original unsigned value will be
4862 positive. */
4863 if (low != 0)
4865 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4866 1, n_low, n_high, 1,
4867 fold_convert_loc (loc, arg0_type,
4868 integer_zero_node),
4869 high_positive))
4870 break;
4872 in_p = (n_in_p == in_p);
4874 else
4876 /* Otherwise, "or" the range with the range of the input
4877 that will be interpreted as negative. */
4878 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4879 0, n_low, n_high, 1,
4880 fold_convert_loc (loc, arg0_type,
4881 integer_zero_node),
4882 high_positive))
4883 break;
4885 in_p = (in_p != n_in_p);
4889 exp = arg0;
4890 low = n_low, high = n_high;
4891 continue;
4893 default:
4894 break;
4897 break;
4900 /* If EXP is a constant, we can evaluate whether this is true or false. */
4901 if (TREE_CODE (exp) == INTEGER_CST)
4903 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4904 exp, 0, low, 0))
4905 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4906 exp, 1, high, 1)));
4907 low = high = 0;
4908 exp = 0;
4911 *pin_p = in_p, *plow = low, *phigh = high;
4912 return exp;
4915 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4916 type, TYPE, return an expression to test if EXP is in (or out of, depending
4917 on IN_P) the range. Return 0 if the test couldn't be created. */
4919 tree
4920 build_range_check (location_t loc, tree type, tree exp, int in_p,
4921 tree low, tree high)
4923 tree etype = TREE_TYPE (exp), value;
4925 #ifdef HAVE_canonicalize_funcptr_for_compare
4926 /* Disable this optimization for function pointer expressions
4927 on targets that require function pointer canonicalization. */
4928 if (HAVE_canonicalize_funcptr_for_compare
4929 && TREE_CODE (etype) == POINTER_TYPE
4930 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4931 return NULL_TREE;
4932 #endif
4934 if (! in_p)
4936 value = build_range_check (loc, type, exp, 1, low, high);
4937 if (value != 0)
4938 return invert_truthvalue_loc (loc, value);
4940 return 0;
4943 if (low == 0 && high == 0)
4944 return build_int_cst (type, 1);
4946 if (low == 0)
4947 return fold_build2_loc (loc, LE_EXPR, type, exp,
4948 fold_convert_loc (loc, etype, high));
4950 if (high == 0)
4951 return fold_build2_loc (loc, GE_EXPR, type, exp,
4952 fold_convert_loc (loc, etype, low));
4954 if (operand_equal_p (low, high, 0))
4955 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4956 fold_convert_loc (loc, etype, low));
4958 if (integer_zerop (low))
4960 if (! TYPE_UNSIGNED (etype))
4962 etype = unsigned_type_for (etype);
4963 high = fold_convert_loc (loc, etype, high);
4964 exp = fold_convert_loc (loc, etype, exp);
4966 return build_range_check (loc, type, exp, 1, 0, high);
4969 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4970 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4972 unsigned HOST_WIDE_INT lo;
4973 HOST_WIDE_INT hi;
4974 int prec;
4976 prec = TYPE_PRECISION (etype);
4977 if (prec <= HOST_BITS_PER_WIDE_INT)
4979 hi = 0;
4980 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4982 else
4984 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4985 lo = (unsigned HOST_WIDE_INT) -1;
4988 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4990 if (TYPE_UNSIGNED (etype))
4992 tree signed_etype = signed_type_for (etype);
4993 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4994 etype
4995 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4996 else
4997 etype = signed_etype;
4998 exp = fold_convert_loc (loc, etype, exp);
5000 return fold_build2_loc (loc, GT_EXPR, type, exp,
5001 build_int_cst (etype, 0));
5005 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5006 This requires wrap-around arithmetics for the type of the expression.
5007 First make sure that arithmetics in this type is valid, then make sure
5008 that it wraps around. */
5009 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5010 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
5011 TYPE_UNSIGNED (etype));
5013 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
5015 tree utype, minv, maxv;
5017 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5018 for the type in question, as we rely on this here. */
5019 utype = unsigned_type_for (etype);
5020 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
5021 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5022 integer_one_node, 1);
5023 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
5025 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5026 minv, 1, maxv, 1)))
5027 etype = utype;
5028 else
5029 return 0;
5032 high = fold_convert_loc (loc, etype, high);
5033 low = fold_convert_loc (loc, etype, low);
5034 exp = fold_convert_loc (loc, etype, exp);
5036 value = const_binop (MINUS_EXPR, high, low, 0);
5039 if (POINTER_TYPE_P (etype))
5041 if (value != 0 && !TREE_OVERFLOW (value))
5043 low = fold_convert_loc (loc, sizetype, low);
5044 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
5045 return build_range_check (loc, type,
5046 fold_build2_loc (loc, POINTER_PLUS_EXPR,
5047 etype, exp, low),
5048 1, build_int_cst (etype, 0), value);
5050 return 0;
5053 if (value != 0 && !TREE_OVERFLOW (value))
5054 return build_range_check (loc, type,
5055 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5056 1, build_int_cst (etype, 0), value);
5058 return 0;
5061 /* Return the predecessor of VAL in its type, handling the infinite case. */
5063 static tree
5064 range_predecessor (tree val)
5066 tree type = TREE_TYPE (val);
5068 if (INTEGRAL_TYPE_P (type)
5069 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5070 return 0;
5071 else
5072 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
5075 /* Return the successor of VAL in its type, handling the infinite case. */
5077 static tree
5078 range_successor (tree val)
5080 tree type = TREE_TYPE (val);
5082 if (INTEGRAL_TYPE_P (type)
5083 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5084 return 0;
5085 else
5086 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
5089 /* Given two ranges, see if we can merge them into one. Return 1 if we
5090 can, 0 if we can't. Set the output range into the specified parameters. */
5092 bool
5093 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5094 tree high0, int in1_p, tree low1, tree high1)
5096 int no_overlap;
5097 int subset;
5098 int temp;
5099 tree tem;
5100 int in_p;
5101 tree low, high;
5102 int lowequal = ((low0 == 0 && low1 == 0)
5103 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5104 low0, 0, low1, 0)));
5105 int highequal = ((high0 == 0 && high1 == 0)
5106 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5107 high0, 1, high1, 1)));
5109 /* Make range 0 be the range that starts first, or ends last if they
5110 start at the same value. Swap them if it isn't. */
5111 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5112 low0, 0, low1, 0))
5113 || (lowequal
5114 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5115 high1, 1, high0, 1))))
5117 temp = in0_p, in0_p = in1_p, in1_p = temp;
5118 tem = low0, low0 = low1, low1 = tem;
5119 tem = high0, high0 = high1, high1 = tem;
5122 /* Now flag two cases, whether the ranges are disjoint or whether the
5123 second range is totally subsumed in the first. Note that the tests
5124 below are simplified by the ones above. */
5125 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5126 high0, 1, low1, 0));
5127 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5128 high1, 1, high0, 1));
5130 /* We now have four cases, depending on whether we are including or
5131 excluding the two ranges. */
5132 if (in0_p && in1_p)
5134 /* If they don't overlap, the result is false. If the second range
5135 is a subset it is the result. Otherwise, the range is from the start
5136 of the second to the end of the first. */
5137 if (no_overlap)
5138 in_p = 0, low = high = 0;
5139 else if (subset)
5140 in_p = 1, low = low1, high = high1;
5141 else
5142 in_p = 1, low = low1, high = high0;
5145 else if (in0_p && ! in1_p)
5147 /* If they don't overlap, the result is the first range. If they are
5148 equal, the result is false. If the second range is a subset of the
5149 first, and the ranges begin at the same place, we go from just after
5150 the end of the second range to the end of the first. If the second
5151 range is not a subset of the first, or if it is a subset and both
5152 ranges end at the same place, the range starts at the start of the
5153 first range and ends just before the second range.
5154 Otherwise, we can't describe this as a single range. */
5155 if (no_overlap)
5156 in_p = 1, low = low0, high = high0;
5157 else if (lowequal && highequal)
5158 in_p = 0, low = high = 0;
5159 else if (subset && lowequal)
5161 low = range_successor (high1);
5162 high = high0;
5163 in_p = 1;
5164 if (low == 0)
5166 /* We are in the weird situation where high0 > high1 but
5167 high1 has no successor. Punt. */
5168 return 0;
5171 else if (! subset || highequal)
5173 low = low0;
5174 high = range_predecessor (low1);
5175 in_p = 1;
5176 if (high == 0)
5178 /* low0 < low1 but low1 has no predecessor. Punt. */
5179 return 0;
5182 else
5183 return 0;
5186 else if (! in0_p && in1_p)
5188 /* If they don't overlap, the result is the second range. If the second
5189 is a subset of the first, the result is false. Otherwise,
5190 the range starts just after the first range and ends at the
5191 end of the second. */
5192 if (no_overlap)
5193 in_p = 1, low = low1, high = high1;
5194 else if (subset || highequal)
5195 in_p = 0, low = high = 0;
5196 else
5198 low = range_successor (high0);
5199 high = high1;
5200 in_p = 1;
5201 if (low == 0)
5203 /* high1 > high0 but high0 has no successor. Punt. */
5204 return 0;
5209 else
5211 /* The case where we are excluding both ranges. Here the complex case
5212 is if they don't overlap. In that case, the only time we have a
5213 range is if they are adjacent. If the second is a subset of the
5214 first, the result is the first. Otherwise, the range to exclude
5215 starts at the beginning of the first range and ends at the end of the
5216 second. */
5217 if (no_overlap)
5219 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5220 range_successor (high0),
5221 1, low1, 0)))
5222 in_p = 0, low = low0, high = high1;
5223 else
5225 /* Canonicalize - [min, x] into - [-, x]. */
5226 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5227 switch (TREE_CODE (TREE_TYPE (low0)))
5229 case ENUMERAL_TYPE:
5230 if (TYPE_PRECISION (TREE_TYPE (low0))
5231 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5232 break;
5233 /* FALLTHROUGH */
5234 case INTEGER_TYPE:
5235 if (tree_int_cst_equal (low0,
5236 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5237 low0 = 0;
5238 break;
5239 case POINTER_TYPE:
5240 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5241 && integer_zerop (low0))
5242 low0 = 0;
5243 break;
5244 default:
5245 break;
5248 /* Canonicalize - [x, max] into - [x, -]. */
5249 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5250 switch (TREE_CODE (TREE_TYPE (high1)))
5252 case ENUMERAL_TYPE:
5253 if (TYPE_PRECISION (TREE_TYPE (high1))
5254 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5255 break;
5256 /* FALLTHROUGH */
5257 case INTEGER_TYPE:
5258 if (tree_int_cst_equal (high1,
5259 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5260 high1 = 0;
5261 break;
5262 case POINTER_TYPE:
5263 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5264 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5265 high1, 1,
5266 integer_one_node, 1)))
5267 high1 = 0;
5268 break;
5269 default:
5270 break;
5273 /* The ranges might be also adjacent between the maximum and
5274 minimum values of the given type. For
5275 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5276 return + [x + 1, y - 1]. */
5277 if (low0 == 0 && high1 == 0)
5279 low = range_successor (high0);
5280 high = range_predecessor (low1);
5281 if (low == 0 || high == 0)
5282 return 0;
5284 in_p = 1;
5286 else
5287 return 0;
5290 else if (subset)
5291 in_p = 0, low = low0, high = high0;
5292 else
5293 in_p = 0, low = low0, high = high1;
5296 *pin_p = in_p, *plow = low, *phigh = high;
5297 return 1;
5301 /* Subroutine of fold, looking inside expressions of the form
5302 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5303 of the COND_EXPR. This function is being used also to optimize
5304 A op B ? C : A, by reversing the comparison first.
5306 Return a folded expression whose code is not a COND_EXPR
5307 anymore, or NULL_TREE if no folding opportunity is found. */
5309 static tree
5310 fold_cond_expr_with_comparison (location_t loc, tree type,
5311 tree arg0, tree arg1, tree arg2)
5313 enum tree_code comp_code = TREE_CODE (arg0);
5314 tree arg00 = TREE_OPERAND (arg0, 0);
5315 tree arg01 = TREE_OPERAND (arg0, 1);
5316 tree arg1_type = TREE_TYPE (arg1);
5317 tree tem;
5319 STRIP_NOPS (arg1);
5320 STRIP_NOPS (arg2);
5322 /* If we have A op 0 ? A : -A, consider applying the following
5323 transformations:
5325 A == 0? A : -A same as -A
5326 A != 0? A : -A same as A
5327 A >= 0? A : -A same as abs (A)
5328 A > 0? A : -A same as abs (A)
5329 A <= 0? A : -A same as -abs (A)
5330 A < 0? A : -A same as -abs (A)
5332 None of these transformations work for modes with signed
5333 zeros. If A is +/-0, the first two transformations will
5334 change the sign of the result (from +0 to -0, or vice
5335 versa). The last four will fix the sign of the result,
5336 even though the original expressions could be positive or
5337 negative, depending on the sign of A.
5339 Note that all these transformations are correct if A is
5340 NaN, since the two alternatives (A and -A) are also NaNs. */
5341 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5342 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5343 ? real_zerop (arg01)
5344 : integer_zerop (arg01))
5345 && ((TREE_CODE (arg2) == NEGATE_EXPR
5346 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5347 /* In the case that A is of the form X-Y, '-A' (arg2) may
5348 have already been folded to Y-X, check for that. */
5349 || (TREE_CODE (arg1) == MINUS_EXPR
5350 && TREE_CODE (arg2) == MINUS_EXPR
5351 && operand_equal_p (TREE_OPERAND (arg1, 0),
5352 TREE_OPERAND (arg2, 1), 0)
5353 && operand_equal_p (TREE_OPERAND (arg1, 1),
5354 TREE_OPERAND (arg2, 0), 0))))
5355 switch (comp_code)
5357 case EQ_EXPR:
5358 case UNEQ_EXPR:
5359 tem = fold_convert_loc (loc, arg1_type, arg1);
5360 return pedantic_non_lvalue_loc (loc,
5361 fold_convert_loc (loc, type,
5362 negate_expr (tem)));
5363 case NE_EXPR:
5364 case LTGT_EXPR:
5365 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5366 case UNGE_EXPR:
5367 case UNGT_EXPR:
5368 if (flag_trapping_math)
5369 break;
5370 /* Fall through. */
5371 case GE_EXPR:
5372 case GT_EXPR:
5373 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5374 arg1 = fold_convert_loc (loc, signed_type_for
5375 (TREE_TYPE (arg1)), arg1);
5376 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5377 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5378 case UNLE_EXPR:
5379 case UNLT_EXPR:
5380 if (flag_trapping_math)
5381 break;
5382 case LE_EXPR:
5383 case LT_EXPR:
5384 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5385 arg1 = fold_convert_loc (loc, signed_type_for
5386 (TREE_TYPE (arg1)), arg1);
5387 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5388 return negate_expr (fold_convert_loc (loc, type, tem));
5389 default:
5390 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5391 break;
5394 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5395 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5396 both transformations are correct when A is NaN: A != 0
5397 is then true, and A == 0 is false. */
5399 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5400 && integer_zerop (arg01) && integer_zerop (arg2))
5402 if (comp_code == NE_EXPR)
5403 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5404 else if (comp_code == EQ_EXPR)
5405 return build_int_cst (type, 0);
5408 /* Try some transformations of A op B ? A : B.
5410 A == B? A : B same as B
5411 A != B? A : B same as A
5412 A >= B? A : B same as max (A, B)
5413 A > B? A : B same as max (B, A)
5414 A <= B? A : B same as min (A, B)
5415 A < B? A : B same as min (B, A)
5417 As above, these transformations don't work in the presence
5418 of signed zeros. For example, if A and B are zeros of
5419 opposite sign, the first two transformations will change
5420 the sign of the result. In the last four, the original
5421 expressions give different results for (A=+0, B=-0) and
5422 (A=-0, B=+0), but the transformed expressions do not.
5424 The first two transformations are correct if either A or B
5425 is a NaN. In the first transformation, the condition will
5426 be false, and B will indeed be chosen. In the case of the
5427 second transformation, the condition A != B will be true,
5428 and A will be chosen.
5430 The conversions to max() and min() are not correct if B is
5431 a number and A is not. The conditions in the original
5432 expressions will be false, so all four give B. The min()
5433 and max() versions would give a NaN instead. */
5434 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5435 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5436 /* Avoid these transformations if the COND_EXPR may be used
5437 as an lvalue in the C++ front-end. PR c++/19199. */
5438 && (in_gimple_form
5439 || (strcmp (lang_hooks.name, "GNU C++") != 0
5440 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5441 || ! maybe_lvalue_p (arg1)
5442 || ! maybe_lvalue_p (arg2)))
5444 tree comp_op0 = arg00;
5445 tree comp_op1 = arg01;
5446 tree comp_type = TREE_TYPE (comp_op0);
5448 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5449 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5451 comp_type = type;
5452 comp_op0 = arg1;
5453 comp_op1 = arg2;
5456 switch (comp_code)
5458 case EQ_EXPR:
5459 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5460 case NE_EXPR:
5461 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5462 case LE_EXPR:
5463 case LT_EXPR:
5464 case UNLE_EXPR:
5465 case UNLT_EXPR:
5466 /* In C++ a ?: expression can be an lvalue, so put the
5467 operand which will be used if they are equal first
5468 so that we can convert this back to the
5469 corresponding COND_EXPR. */
5470 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5472 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5473 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5474 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5475 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5476 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5477 comp_op1, comp_op0);
5478 return pedantic_non_lvalue_loc (loc,
5479 fold_convert_loc (loc, type, tem));
5481 break;
5482 case GE_EXPR:
5483 case GT_EXPR:
5484 case UNGE_EXPR:
5485 case UNGT_EXPR:
5486 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5488 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5489 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5490 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5491 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5492 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5493 comp_op1, comp_op0);
5494 return pedantic_non_lvalue_loc (loc,
5495 fold_convert_loc (loc, type, tem));
5497 break;
5498 case UNEQ_EXPR:
5499 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5500 return pedantic_non_lvalue_loc (loc,
5501 fold_convert_loc (loc, type, arg2));
5502 break;
5503 case LTGT_EXPR:
5504 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5505 return pedantic_non_lvalue_loc (loc,
5506 fold_convert_loc (loc, type, arg1));
5507 break;
5508 default:
5509 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5510 break;
5514 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5515 we might still be able to simplify this. For example,
5516 if C1 is one less or one more than C2, this might have started
5517 out as a MIN or MAX and been transformed by this function.
5518 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5520 if (INTEGRAL_TYPE_P (type)
5521 && TREE_CODE (arg01) == INTEGER_CST
5522 && TREE_CODE (arg2) == INTEGER_CST)
5523 switch (comp_code)
5525 case EQ_EXPR:
5526 if (TREE_CODE (arg1) == INTEGER_CST)
5527 break;
5528 /* We can replace A with C1 in this case. */
5529 arg1 = fold_convert_loc (loc, type, arg01);
5530 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5532 case LT_EXPR:
5533 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5534 MIN_EXPR, to preserve the signedness of the comparison. */
5535 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5536 OEP_ONLY_CONST)
5537 && operand_equal_p (arg01,
5538 const_binop (PLUS_EXPR, arg2,
5539 build_int_cst (type, 1), 0),
5540 OEP_ONLY_CONST))
5542 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5543 fold_convert_loc (loc, TREE_TYPE (arg00),
5544 arg2));
5545 return pedantic_non_lvalue_loc (loc,
5546 fold_convert_loc (loc, type, tem));
5548 break;
5550 case LE_EXPR:
5551 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5552 as above. */
5553 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5554 OEP_ONLY_CONST)
5555 && operand_equal_p (arg01,
5556 const_binop (MINUS_EXPR, arg2,
5557 build_int_cst (type, 1), 0),
5558 OEP_ONLY_CONST))
5560 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5561 fold_convert_loc (loc, TREE_TYPE (arg00),
5562 arg2));
5563 return pedantic_non_lvalue_loc (loc,
5564 fold_convert_loc (loc, type, tem));
5566 break;
5568 case GT_EXPR:
5569 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5570 MAX_EXPR, to preserve the signedness of the comparison. */
5571 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5572 OEP_ONLY_CONST)
5573 && operand_equal_p (arg01,
5574 const_binop (MINUS_EXPR, arg2,
5575 build_int_cst (type, 1), 0),
5576 OEP_ONLY_CONST))
5578 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5579 fold_convert_loc (loc, TREE_TYPE (arg00),
5580 arg2));
5581 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5583 break;
5585 case GE_EXPR:
5586 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5587 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5588 OEP_ONLY_CONST)
5589 && operand_equal_p (arg01,
5590 const_binop (PLUS_EXPR, arg2,
5591 build_int_cst (type, 1), 0),
5592 OEP_ONLY_CONST))
5594 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5595 fold_convert_loc (loc, TREE_TYPE (arg00),
5596 arg2));
5597 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5599 break;
5600 case NE_EXPR:
5601 break;
5602 default:
5603 gcc_unreachable ();
5606 return NULL_TREE;
5611 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5612 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5613 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5614 false) >= 2)
5615 #endif
5617 /* EXP is some logical combination of boolean tests. See if we can
5618 merge it into some range test. Return the new tree if so. */
5620 static tree
5621 fold_range_test (location_t loc, enum tree_code code, tree type,
5622 tree op0, tree op1)
5624 int or_op = (code == TRUTH_ORIF_EXPR
5625 || code == TRUTH_OR_EXPR);
5626 int in0_p, in1_p, in_p;
5627 tree low0, low1, low, high0, high1, high;
5628 bool strict_overflow_p = false;
5629 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5630 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5631 tree tem;
5632 const char * const warnmsg = G_("assuming signed overflow does not occur "
5633 "when simplifying range test");
5635 /* If this is an OR operation, invert both sides; we will invert
5636 again at the end. */
5637 if (or_op)
5638 in0_p = ! in0_p, in1_p = ! in1_p;
5640 /* If both expressions are the same, if we can merge the ranges, and we
5641 can build the range test, return it or it inverted. If one of the
5642 ranges is always true or always false, consider it to be the same
5643 expression as the other. */
5644 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5645 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5646 in1_p, low1, high1)
5647 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
5648 lhs != 0 ? lhs
5649 : rhs != 0 ? rhs : integer_zero_node,
5650 in_p, low, high))))
5652 if (strict_overflow_p)
5653 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5654 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5657 /* On machines where the branch cost is expensive, if this is a
5658 short-circuited branch and the underlying object on both sides
5659 is the same, make a non-short-circuit operation. */
5660 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5661 && lhs != 0 && rhs != 0
5662 && (code == TRUTH_ANDIF_EXPR
5663 || code == TRUTH_ORIF_EXPR)
5664 && operand_equal_p (lhs, rhs, 0))
5666 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5667 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5668 which cases we can't do this. */
5669 if (simple_operand_p (lhs))
5671 tem = build2 (code == TRUTH_ANDIF_EXPR
5672 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5673 type, op0, op1);
5674 SET_EXPR_LOCATION (tem, loc);
5675 return tem;
5678 else if (lang_hooks.decls.global_bindings_p () == 0
5679 && ! CONTAINS_PLACEHOLDER_P (lhs))
5681 tree common = save_expr (lhs);
5683 if (0 != (lhs = build_range_check (loc, type, common,
5684 or_op ? ! in0_p : in0_p,
5685 low0, high0))
5686 && (0 != (rhs = build_range_check (loc, type, common,
5687 or_op ? ! in1_p : in1_p,
5688 low1, high1))))
5690 if (strict_overflow_p)
5691 fold_overflow_warning (warnmsg,
5692 WARN_STRICT_OVERFLOW_COMPARISON);
5693 tem = build2 (code == TRUTH_ANDIF_EXPR
5694 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5695 type, lhs, rhs);
5696 SET_EXPR_LOCATION (tem, loc);
5697 return tem;
5702 return 0;
5705 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5706 bit value. Arrange things so the extra bits will be set to zero if and
5707 only if C is signed-extended to its full width. If MASK is nonzero,
5708 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5710 static tree
5711 unextend (tree c, int p, int unsignedp, tree mask)
5713 tree type = TREE_TYPE (c);
5714 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5715 tree temp;
5717 if (p == modesize || unsignedp)
5718 return c;
5720 /* We work by getting just the sign bit into the low-order bit, then
5721 into the high-order bit, then sign-extend. We then XOR that value
5722 with C. */
5723 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5724 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5726 /* We must use a signed type in order to get an arithmetic right shift.
5727 However, we must also avoid introducing accidental overflows, so that
5728 a subsequent call to integer_zerop will work. Hence we must
5729 do the type conversion here. At this point, the constant is either
5730 zero or one, and the conversion to a signed type can never overflow.
5731 We could get an overflow if this conversion is done anywhere else. */
5732 if (TYPE_UNSIGNED (type))
5733 temp = fold_convert (signed_type_for (type), temp);
5735 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5736 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5737 if (mask != 0)
5738 temp = const_binop (BIT_AND_EXPR, temp,
5739 fold_convert (TREE_TYPE (c), mask),
5741 /* If necessary, convert the type back to match the type of C. */
5742 if (TYPE_UNSIGNED (type))
5743 temp = fold_convert (type, temp);
5745 return fold_convert (type,
5746 const_binop (BIT_XOR_EXPR, c, temp, 0));
5749 /* Find ways of folding logical expressions of LHS and RHS:
5750 Try to merge two comparisons to the same innermost item.
5751 Look for range tests like "ch >= '0' && ch <= '9'".
5752 Look for combinations of simple terms on machines with expensive branches
5753 and evaluate the RHS unconditionally.
5755 For example, if we have p->a == 2 && p->b == 4 and we can make an
5756 object large enough to span both A and B, we can do this with a comparison
5757 against the object ANDed with the a mask.
5759 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5760 operations to do this with one comparison.
5762 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5763 function and the one above.
5765 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5766 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5768 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5769 two operands.
5771 We return the simplified tree or 0 if no optimization is possible. */
5773 static tree
5774 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5775 tree lhs, tree rhs)
5777 /* If this is the "or" of two comparisons, we can do something if
5778 the comparisons are NE_EXPR. If this is the "and", we can do something
5779 if the comparisons are EQ_EXPR. I.e.,
5780 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5782 WANTED_CODE is this operation code. For single bit fields, we can
5783 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5784 comparison for one-bit fields. */
5786 enum tree_code wanted_code;
5787 enum tree_code lcode, rcode;
5788 tree ll_arg, lr_arg, rl_arg, rr_arg;
5789 tree ll_inner, lr_inner, rl_inner, rr_inner;
5790 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5791 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5792 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5793 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5794 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5795 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5796 enum machine_mode lnmode, rnmode;
5797 tree ll_mask, lr_mask, rl_mask, rr_mask;
5798 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5799 tree l_const, r_const;
5800 tree lntype, rntype, result;
5801 HOST_WIDE_INT first_bit, end_bit;
5802 int volatilep;
5803 tree orig_lhs = lhs, orig_rhs = rhs;
5804 enum tree_code orig_code = code;
5806 /* Start by getting the comparison codes. Fail if anything is volatile.
5807 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5808 it were surrounded with a NE_EXPR. */
5810 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5811 return 0;
5813 lcode = TREE_CODE (lhs);
5814 rcode = TREE_CODE (rhs);
5816 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5818 lhs = build2 (NE_EXPR, truth_type, lhs,
5819 build_int_cst (TREE_TYPE (lhs), 0));
5820 lcode = NE_EXPR;
5823 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5825 rhs = build2 (NE_EXPR, truth_type, rhs,
5826 build_int_cst (TREE_TYPE (rhs), 0));
5827 rcode = NE_EXPR;
5830 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5831 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5832 return 0;
5834 ll_arg = TREE_OPERAND (lhs, 0);
5835 lr_arg = TREE_OPERAND (lhs, 1);
5836 rl_arg = TREE_OPERAND (rhs, 0);
5837 rr_arg = TREE_OPERAND (rhs, 1);
5839 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5840 if (simple_operand_p (ll_arg)
5841 && simple_operand_p (lr_arg))
5843 tree result;
5844 if (operand_equal_p (ll_arg, rl_arg, 0)
5845 && operand_equal_p (lr_arg, rr_arg, 0))
5847 result = combine_comparisons (loc, code, lcode, rcode,
5848 truth_type, ll_arg, lr_arg);
5849 if (result)
5850 return result;
5852 else if (operand_equal_p (ll_arg, rr_arg, 0)
5853 && operand_equal_p (lr_arg, rl_arg, 0))
5855 result = combine_comparisons (loc, code, lcode,
5856 swap_tree_comparison (rcode),
5857 truth_type, ll_arg, lr_arg);
5858 if (result)
5859 return result;
5863 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5864 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5866 /* If the RHS can be evaluated unconditionally and its operands are
5867 simple, it wins to evaluate the RHS unconditionally on machines
5868 with expensive branches. In this case, this isn't a comparison
5869 that can be merged. Avoid doing this if the RHS is a floating-point
5870 comparison since those can trap. */
5872 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5873 false) >= 2
5874 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5875 && simple_operand_p (rl_arg)
5876 && simple_operand_p (rr_arg))
5878 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5879 if (code == TRUTH_OR_EXPR
5880 && lcode == NE_EXPR && integer_zerop (lr_arg)
5881 && rcode == NE_EXPR && integer_zerop (rr_arg)
5882 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5883 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5885 result = build2 (NE_EXPR, truth_type,
5886 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5887 ll_arg, rl_arg),
5888 build_int_cst (TREE_TYPE (ll_arg), 0));
5889 goto fold_truthop_exit;
5892 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5893 if (code == TRUTH_AND_EXPR
5894 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5895 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5896 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5897 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5899 result = build2 (EQ_EXPR, truth_type,
5900 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5901 ll_arg, rl_arg),
5902 build_int_cst (TREE_TYPE (ll_arg), 0));
5903 goto fold_truthop_exit;
5906 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5908 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5910 result = build2 (code, truth_type, lhs, rhs);
5911 goto fold_truthop_exit;
5913 return NULL_TREE;
5917 /* See if the comparisons can be merged. Then get all the parameters for
5918 each side. */
5920 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5921 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5922 return 0;
5924 volatilep = 0;
5925 ll_inner = decode_field_reference (loc, ll_arg,
5926 &ll_bitsize, &ll_bitpos, &ll_mode,
5927 &ll_unsignedp, &volatilep, &ll_mask,
5928 &ll_and_mask);
5929 lr_inner = decode_field_reference (loc, lr_arg,
5930 &lr_bitsize, &lr_bitpos, &lr_mode,
5931 &lr_unsignedp, &volatilep, &lr_mask,
5932 &lr_and_mask);
5933 rl_inner = decode_field_reference (loc, rl_arg,
5934 &rl_bitsize, &rl_bitpos, &rl_mode,
5935 &rl_unsignedp, &volatilep, &rl_mask,
5936 &rl_and_mask);
5937 rr_inner = decode_field_reference (loc, rr_arg,
5938 &rr_bitsize, &rr_bitpos, &rr_mode,
5939 &rr_unsignedp, &volatilep, &rr_mask,
5940 &rr_and_mask);
5942 /* It must be true that the inner operation on the lhs of each
5943 comparison must be the same if we are to be able to do anything.
5944 Then see if we have constants. If not, the same must be true for
5945 the rhs's. */
5946 if (volatilep || ll_inner == 0 || rl_inner == 0
5947 || ! operand_equal_p (ll_inner, rl_inner, 0))
5948 return 0;
5950 if (TREE_CODE (lr_arg) == INTEGER_CST
5951 && TREE_CODE (rr_arg) == INTEGER_CST)
5952 l_const = lr_arg, r_const = rr_arg;
5953 else if (lr_inner == 0 || rr_inner == 0
5954 || ! operand_equal_p (lr_inner, rr_inner, 0))
5955 return 0;
5956 else
5957 l_const = r_const = 0;
5959 /* If either comparison code is not correct for our logical operation,
5960 fail. However, we can convert a one-bit comparison against zero into
5961 the opposite comparison against that bit being set in the field. */
5963 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5964 if (lcode != wanted_code)
5966 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5968 /* Make the left operand unsigned, since we are only interested
5969 in the value of one bit. Otherwise we are doing the wrong
5970 thing below. */
5971 ll_unsignedp = 1;
5972 l_const = ll_mask;
5974 else
5975 return 0;
5978 /* This is analogous to the code for l_const above. */
5979 if (rcode != wanted_code)
5981 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5983 rl_unsignedp = 1;
5984 r_const = rl_mask;
5986 else
5987 return 0;
5990 /* See if we can find a mode that contains both fields being compared on
5991 the left. If we can't, fail. Otherwise, update all constants and masks
5992 to be relative to a field of that size. */
5993 first_bit = MIN (ll_bitpos, rl_bitpos);
5994 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5995 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5996 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5997 volatilep);
5998 if (lnmode == VOIDmode)
5999 return 0;
6001 lnbitsize = GET_MODE_BITSIZE (lnmode);
6002 lnbitpos = first_bit & ~ (lnbitsize - 1);
6003 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6004 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6006 if (BYTES_BIG_ENDIAN)
6008 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6009 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6012 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6013 size_int (xll_bitpos), 0);
6014 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6015 size_int (xrl_bitpos), 0);
6017 if (l_const)
6019 l_const = fold_convert_loc (loc, lntype, l_const);
6020 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6021 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
6022 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6023 fold_build1_loc (loc, BIT_NOT_EXPR,
6024 lntype, ll_mask),
6025 0)))
6027 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6029 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6032 if (r_const)
6034 r_const = fold_convert_loc (loc, lntype, r_const);
6035 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6036 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
6037 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6038 fold_build1_loc (loc, BIT_NOT_EXPR,
6039 lntype, rl_mask),
6040 0)))
6042 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6044 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6048 /* If the right sides are not constant, do the same for it. Also,
6049 disallow this optimization if a size or signedness mismatch occurs
6050 between the left and right sides. */
6051 if (l_const == 0)
6053 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6054 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6055 /* Make sure the two fields on the right
6056 correspond to the left without being swapped. */
6057 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6058 return 0;
6060 first_bit = MIN (lr_bitpos, rr_bitpos);
6061 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6062 rnmode = get_best_mode (end_bit - first_bit, first_bit,
6063 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
6064 volatilep);
6065 if (rnmode == VOIDmode)
6066 return 0;
6068 rnbitsize = GET_MODE_BITSIZE (rnmode);
6069 rnbitpos = first_bit & ~ (rnbitsize - 1);
6070 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6071 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6073 if (BYTES_BIG_ENDIAN)
6075 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6076 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6079 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6080 rntype, lr_mask),
6081 size_int (xlr_bitpos), 0);
6082 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6083 rntype, rr_mask),
6084 size_int (xrr_bitpos), 0);
6086 /* Make a mask that corresponds to both fields being compared.
6087 Do this for both items being compared. If the operands are the
6088 same size and the bits being compared are in the same position
6089 then we can do this by masking both and comparing the masked
6090 results. */
6091 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
6092 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
6093 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
6095 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
6096 ll_unsignedp || rl_unsignedp);
6097 if (! all_ones_mask_p (ll_mask, lnbitsize))
6098 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6100 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
6101 lr_unsignedp || rr_unsignedp);
6102 if (! all_ones_mask_p (lr_mask, rnbitsize))
6103 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6105 result = build2 (wanted_code, truth_type, lhs, rhs);
6106 goto fold_truthop_exit;
6109 /* There is still another way we can do something: If both pairs of
6110 fields being compared are adjacent, we may be able to make a wider
6111 field containing them both.
6113 Note that we still must mask the lhs/rhs expressions. Furthermore,
6114 the mask must be shifted to account for the shift done by
6115 make_bit_field_ref. */
6116 if ((ll_bitsize + ll_bitpos == rl_bitpos
6117 && lr_bitsize + lr_bitpos == rr_bitpos)
6118 || (ll_bitpos == rl_bitpos + rl_bitsize
6119 && lr_bitpos == rr_bitpos + rr_bitsize))
6121 tree type;
6123 lhs = make_bit_field_ref (loc, ll_inner, lntype,
6124 ll_bitsize + rl_bitsize,
6125 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
6126 rhs = make_bit_field_ref (loc, lr_inner, rntype,
6127 lr_bitsize + rr_bitsize,
6128 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
6130 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6131 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
6132 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6133 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
6135 /* Convert to the smaller type before masking out unwanted bits. */
6136 type = lntype;
6137 if (lntype != rntype)
6139 if (lnbitsize > rnbitsize)
6141 lhs = fold_convert_loc (loc, rntype, lhs);
6142 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6143 type = rntype;
6145 else if (lnbitsize < rnbitsize)
6147 rhs = fold_convert_loc (loc, lntype, rhs);
6148 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6149 type = lntype;
6153 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6154 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6156 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6157 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6159 result = build2 (wanted_code, truth_type, lhs, rhs);
6160 goto fold_truthop_exit;
6163 return 0;
6166 /* Handle the case of comparisons with constants. If there is something in
6167 common between the masks, those bits of the constants must be the same.
6168 If not, the condition is always false. Test for this to avoid generating
6169 incorrect code below. */
6170 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
6171 if (! integer_zerop (result)
6172 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
6173 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
6175 if (wanted_code == NE_EXPR)
6177 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6178 return constant_boolean_node (true, truth_type);
6180 else
6182 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6183 return constant_boolean_node (false, truth_type);
6187 /* Construct the expression we will return. First get the component
6188 reference we will make. Unless the mask is all ones the width of
6189 that field, perform the mask operation. Then compare with the
6190 merged constant. */
6191 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
6192 ll_unsignedp || rl_unsignedp);
6194 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
6195 if (! all_ones_mask_p (ll_mask, lnbitsize))
6197 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
6198 SET_EXPR_LOCATION (result, loc);
6201 result = build2 (wanted_code, truth_type, result,
6202 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
6204 fold_truthop_exit:
6205 SET_EXPR_LOCATION (result, loc);
6206 return result;
6209 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
6210 constant. */
6212 static tree
6213 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
6214 tree op0, tree op1)
6216 tree arg0 = op0;
6217 enum tree_code op_code;
6218 tree comp_const;
6219 tree minmax_const;
6220 int consts_equal, consts_lt;
6221 tree inner;
6223 STRIP_SIGN_NOPS (arg0);
6225 op_code = TREE_CODE (arg0);
6226 minmax_const = TREE_OPERAND (arg0, 1);
6227 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
6228 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
6229 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
6230 inner = TREE_OPERAND (arg0, 0);
6232 /* If something does not permit us to optimize, return the original tree. */
6233 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
6234 || TREE_CODE (comp_const) != INTEGER_CST
6235 || TREE_OVERFLOW (comp_const)
6236 || TREE_CODE (minmax_const) != INTEGER_CST
6237 || TREE_OVERFLOW (minmax_const))
6238 return NULL_TREE;
6240 /* Now handle all the various comparison codes. We only handle EQ_EXPR
6241 and GT_EXPR, doing the rest with recursive calls using logical
6242 simplifications. */
6243 switch (code)
6245 case NE_EXPR: case LT_EXPR: case LE_EXPR:
6247 tree tem
6248 = optimize_minmax_comparison (loc,
6249 invert_tree_comparison (code, false),
6250 type, op0, op1);
6251 if (tem)
6252 return invert_truthvalue_loc (loc, tem);
6253 return NULL_TREE;
6256 case GE_EXPR:
6257 return
6258 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
6259 optimize_minmax_comparison
6260 (loc, EQ_EXPR, type, arg0, comp_const),
6261 optimize_minmax_comparison
6262 (loc, GT_EXPR, type, arg0, comp_const));
6264 case EQ_EXPR:
6265 if (op_code == MAX_EXPR && consts_equal)
6266 /* MAX (X, 0) == 0 -> X <= 0 */
6267 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
6269 else if (op_code == MAX_EXPR && consts_lt)
6270 /* MAX (X, 0) == 5 -> X == 5 */
6271 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6273 else if (op_code == MAX_EXPR)
6274 /* MAX (X, 0) == -1 -> false */
6275 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6277 else if (consts_equal)
6278 /* MIN (X, 0) == 0 -> X >= 0 */
6279 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
6281 else if (consts_lt)
6282 /* MIN (X, 0) == 5 -> false */
6283 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6285 else
6286 /* MIN (X, 0) == -1 -> X == -1 */
6287 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6289 case GT_EXPR:
6290 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6291 /* MAX (X, 0) > 0 -> X > 0
6292 MAX (X, 0) > 5 -> X > 5 */
6293 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6295 else if (op_code == MAX_EXPR)
6296 /* MAX (X, 0) > -1 -> true */
6297 return omit_one_operand_loc (loc, type, integer_one_node, inner);
6299 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6300 /* MIN (X, 0) > 0 -> false
6301 MIN (X, 0) > 5 -> false */
6302 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6304 else
6305 /* MIN (X, 0) > -1 -> X > -1 */
6306 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6308 default:
6309 return NULL_TREE;
6313 /* T is an integer expression that is being multiplied, divided, or taken a
6314 modulus (CODE says which and what kind of divide or modulus) by a
6315 constant C. See if we can eliminate that operation by folding it with
6316 other operations already in T. WIDE_TYPE, if non-null, is a type that
6317 should be used for the computation if wider than our type.
6319 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6320 (X * 2) + (Y * 4). We must, however, be assured that either the original
6321 expression would not overflow or that overflow is undefined for the type
6322 in the language in question.
6324 If we return a non-null expression, it is an equivalent form of the
6325 original computation, but need not be in the original type.
6327 We set *STRICT_OVERFLOW_P to true if the return values depends on
6328 signed overflow being undefined. Otherwise we do not change
6329 *STRICT_OVERFLOW_P. */
6331 static tree
6332 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6333 bool *strict_overflow_p)
6335 /* To avoid exponential search depth, refuse to allow recursion past
6336 three levels. Beyond that (1) it's highly unlikely that we'll find
6337 something interesting and (2) we've probably processed it before
6338 when we built the inner expression. */
6340 static int depth;
6341 tree ret;
6343 if (depth > 3)
6344 return NULL;
6346 depth++;
6347 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6348 depth--;
6350 return ret;
6353 static tree
6354 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6355 bool *strict_overflow_p)
6357 tree type = TREE_TYPE (t);
6358 enum tree_code tcode = TREE_CODE (t);
6359 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6360 > GET_MODE_SIZE (TYPE_MODE (type)))
6361 ? wide_type : type);
6362 tree t1, t2;
6363 int same_p = tcode == code;
6364 tree op0 = NULL_TREE, op1 = NULL_TREE;
6365 bool sub_strict_overflow_p;
6367 /* Don't deal with constants of zero here; they confuse the code below. */
6368 if (integer_zerop (c))
6369 return NULL_TREE;
6371 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6372 op0 = TREE_OPERAND (t, 0);
6374 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6375 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6377 /* Note that we need not handle conditional operations here since fold
6378 already handles those cases. So just do arithmetic here. */
6379 switch (tcode)
6381 case INTEGER_CST:
6382 /* For a constant, we can always simplify if we are a multiply
6383 or (for divide and modulus) if it is a multiple of our constant. */
6384 if (code == MULT_EXPR
6385 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6386 return const_binop (code, fold_convert (ctype, t),
6387 fold_convert (ctype, c), 0);
6388 break;
6390 CASE_CONVERT: case NON_LVALUE_EXPR:
6391 /* If op0 is an expression ... */
6392 if ((COMPARISON_CLASS_P (op0)
6393 || UNARY_CLASS_P (op0)
6394 || BINARY_CLASS_P (op0)
6395 || VL_EXP_CLASS_P (op0)
6396 || EXPRESSION_CLASS_P (op0))
6397 /* ... and has wrapping overflow, and its type is smaller
6398 than ctype, then we cannot pass through as widening. */
6399 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6400 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6401 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6402 && (TYPE_PRECISION (ctype)
6403 > TYPE_PRECISION (TREE_TYPE (op0))))
6404 /* ... or this is a truncation (t is narrower than op0),
6405 then we cannot pass through this narrowing. */
6406 || (TYPE_PRECISION (type)
6407 < TYPE_PRECISION (TREE_TYPE (op0)))
6408 /* ... or signedness changes for division or modulus,
6409 then we cannot pass through this conversion. */
6410 || (code != MULT_EXPR
6411 && (TYPE_UNSIGNED (ctype)
6412 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6413 /* ... or has undefined overflow while the converted to
6414 type has not, we cannot do the operation in the inner type
6415 as that would introduce undefined overflow. */
6416 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6417 && !TYPE_OVERFLOW_UNDEFINED (type))))
6418 break;
6420 /* Pass the constant down and see if we can make a simplification. If
6421 we can, replace this expression with the inner simplification for
6422 possible later conversion to our or some other type. */
6423 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6424 && TREE_CODE (t2) == INTEGER_CST
6425 && !TREE_OVERFLOW (t2)
6426 && (0 != (t1 = extract_muldiv (op0, t2, code,
6427 code == MULT_EXPR
6428 ? ctype : NULL_TREE,
6429 strict_overflow_p))))
6430 return t1;
6431 break;
6433 case ABS_EXPR:
6434 /* If widening the type changes it from signed to unsigned, then we
6435 must avoid building ABS_EXPR itself as unsigned. */
6436 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6438 tree cstype = (*signed_type_for) (ctype);
6439 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6440 != 0)
6442 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6443 return fold_convert (ctype, t1);
6445 break;
6447 /* If the constant is negative, we cannot simplify this. */
6448 if (tree_int_cst_sgn (c) == -1)
6449 break;
6450 /* FALLTHROUGH */
6451 case NEGATE_EXPR:
6452 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6453 != 0)
6454 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6455 break;
6457 case MIN_EXPR: case MAX_EXPR:
6458 /* If widening the type changes the signedness, then we can't perform
6459 this optimization as that changes the result. */
6460 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6461 break;
6463 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6464 sub_strict_overflow_p = false;
6465 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6466 &sub_strict_overflow_p)) != 0
6467 && (t2 = extract_muldiv (op1, c, code, wide_type,
6468 &sub_strict_overflow_p)) != 0)
6470 if (tree_int_cst_sgn (c) < 0)
6471 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6472 if (sub_strict_overflow_p)
6473 *strict_overflow_p = true;
6474 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6475 fold_convert (ctype, t2));
6477 break;
6479 case LSHIFT_EXPR: case RSHIFT_EXPR:
6480 /* If the second operand is constant, this is a multiplication
6481 or floor division, by a power of two, so we can treat it that
6482 way unless the multiplier or divisor overflows. Signed
6483 left-shift overflow is implementation-defined rather than
6484 undefined in C90, so do not convert signed left shift into
6485 multiplication. */
6486 if (TREE_CODE (op1) == INTEGER_CST
6487 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6488 /* const_binop may not detect overflow correctly,
6489 so check for it explicitly here. */
6490 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6491 && TREE_INT_CST_HIGH (op1) == 0
6492 && 0 != (t1 = fold_convert (ctype,
6493 const_binop (LSHIFT_EXPR,
6494 size_one_node,
6495 op1, 0)))
6496 && !TREE_OVERFLOW (t1))
6497 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6498 ? MULT_EXPR : FLOOR_DIV_EXPR,
6499 ctype,
6500 fold_convert (ctype, op0),
6501 t1),
6502 c, code, wide_type, strict_overflow_p);
6503 break;
6505 case PLUS_EXPR: case MINUS_EXPR:
6506 /* See if we can eliminate the operation on both sides. If we can, we
6507 can return a new PLUS or MINUS. If we can't, the only remaining
6508 cases where we can do anything are if the second operand is a
6509 constant. */
6510 sub_strict_overflow_p = false;
6511 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6512 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6513 if (t1 != 0 && t2 != 0
6514 && (code == MULT_EXPR
6515 /* If not multiplication, we can only do this if both operands
6516 are divisible by c. */
6517 || (multiple_of_p (ctype, op0, c)
6518 && multiple_of_p (ctype, op1, c))))
6520 if (sub_strict_overflow_p)
6521 *strict_overflow_p = true;
6522 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6523 fold_convert (ctype, t2));
6526 /* If this was a subtraction, negate OP1 and set it to be an addition.
6527 This simplifies the logic below. */
6528 if (tcode == MINUS_EXPR)
6530 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6531 /* If OP1 was not easily negatable, the constant may be OP0. */
6532 if (TREE_CODE (op0) == INTEGER_CST)
6534 tree tem = op0;
6535 op0 = op1;
6536 op1 = tem;
6537 tem = t1;
6538 t1 = t2;
6539 t2 = tem;
6543 if (TREE_CODE (op1) != INTEGER_CST)
6544 break;
6546 /* If either OP1 or C are negative, this optimization is not safe for
6547 some of the division and remainder types while for others we need
6548 to change the code. */
6549 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6551 if (code == CEIL_DIV_EXPR)
6552 code = FLOOR_DIV_EXPR;
6553 else if (code == FLOOR_DIV_EXPR)
6554 code = CEIL_DIV_EXPR;
6555 else if (code != MULT_EXPR
6556 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6557 break;
6560 /* If it's a multiply or a division/modulus operation of a multiple
6561 of our constant, do the operation and verify it doesn't overflow. */
6562 if (code == MULT_EXPR
6563 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6565 op1 = const_binop (code, fold_convert (ctype, op1),
6566 fold_convert (ctype, c), 0);
6567 /* We allow the constant to overflow with wrapping semantics. */
6568 if (op1 == 0
6569 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6570 break;
6572 else
6573 break;
6575 /* If we have an unsigned type is not a sizetype, we cannot widen
6576 the operation since it will change the result if the original
6577 computation overflowed. */
6578 if (TYPE_UNSIGNED (ctype)
6579 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6580 && ctype != type)
6581 break;
6583 /* If we were able to eliminate our operation from the first side,
6584 apply our operation to the second side and reform the PLUS. */
6585 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6586 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6588 /* The last case is if we are a multiply. In that case, we can
6589 apply the distributive law to commute the multiply and addition
6590 if the multiplication of the constants doesn't overflow. */
6591 if (code == MULT_EXPR)
6592 return fold_build2 (tcode, ctype,
6593 fold_build2 (code, ctype,
6594 fold_convert (ctype, op0),
6595 fold_convert (ctype, c)),
6596 op1);
6598 break;
6600 case MULT_EXPR:
6601 /* We have a special case here if we are doing something like
6602 (C * 8) % 4 since we know that's zero. */
6603 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6604 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6605 /* If the multiplication can overflow we cannot optimize this.
6606 ??? Until we can properly mark individual operations as
6607 not overflowing we need to treat sizetype special here as
6608 stor-layout relies on this opimization to make
6609 DECL_FIELD_BIT_OFFSET always a constant. */
6610 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6611 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
6612 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
6613 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6614 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6616 *strict_overflow_p = true;
6617 return omit_one_operand (type, integer_zero_node, op0);
6620 /* ... fall through ... */
6622 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6623 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6624 /* If we can extract our operation from the LHS, do so and return a
6625 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6626 do something only if the second operand is a constant. */
6627 if (same_p
6628 && (t1 = extract_muldiv (op0, c, code, wide_type,
6629 strict_overflow_p)) != 0)
6630 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6631 fold_convert (ctype, op1));
6632 else if (tcode == MULT_EXPR && code == MULT_EXPR
6633 && (t1 = extract_muldiv (op1, c, code, wide_type,
6634 strict_overflow_p)) != 0)
6635 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6636 fold_convert (ctype, t1));
6637 else if (TREE_CODE (op1) != INTEGER_CST)
6638 return 0;
6640 /* If these are the same operation types, we can associate them
6641 assuming no overflow. */
6642 if (tcode == code
6643 && 0 != (t1 = int_const_binop (MULT_EXPR,
6644 fold_convert (ctype, op1),
6645 fold_convert (ctype, c), 1))
6646 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
6647 TREE_INT_CST_HIGH (t1),
6648 (TYPE_UNSIGNED (ctype)
6649 && tcode != MULT_EXPR) ? -1 : 1,
6650 TREE_OVERFLOW (t1)))
6651 && !TREE_OVERFLOW (t1))
6652 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6654 /* If these operations "cancel" each other, we have the main
6655 optimizations of this pass, which occur when either constant is a
6656 multiple of the other, in which case we replace this with either an
6657 operation or CODE or TCODE.
6659 If we have an unsigned type that is not a sizetype, we cannot do
6660 this since it will change the result if the original computation
6661 overflowed. */
6662 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6663 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6664 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6665 || (tcode == MULT_EXPR
6666 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6667 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6668 && code != MULT_EXPR)))
6670 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6672 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6673 *strict_overflow_p = true;
6674 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6675 fold_convert (ctype,
6676 const_binop (TRUNC_DIV_EXPR,
6677 op1, c, 0)));
6679 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6681 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6682 *strict_overflow_p = true;
6683 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6684 fold_convert (ctype,
6685 const_binop (TRUNC_DIV_EXPR,
6686 c, op1, 0)));
6689 break;
6691 default:
6692 break;
6695 return 0;
6698 /* Return a node which has the indicated constant VALUE (either 0 or
6699 1), and is of the indicated TYPE. */
6701 tree
6702 constant_boolean_node (int value, tree type)
6704 if (type == integer_type_node)
6705 return value ? integer_one_node : integer_zero_node;
6706 else if (type == boolean_type_node)
6707 return value ? boolean_true_node : boolean_false_node;
6708 else
6709 return build_int_cst (type, value);
6713 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6714 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6715 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6716 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6717 COND is the first argument to CODE; otherwise (as in the example
6718 given here), it is the second argument. TYPE is the type of the
6719 original expression. Return NULL_TREE if no simplification is
6720 possible. */
6722 static tree
6723 fold_binary_op_with_conditional_arg (location_t loc,
6724 enum tree_code code,
6725 tree type, tree op0, tree op1,
6726 tree cond, tree arg, int cond_first_p)
6728 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6729 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6730 tree test, true_value, false_value;
6731 tree lhs = NULL_TREE;
6732 tree rhs = NULL_TREE;
6734 /* This transformation is only worthwhile if we don't have to wrap
6735 arg in a SAVE_EXPR, and the operation can be simplified on at least
6736 one of the branches once its pushed inside the COND_EXPR. */
6737 if (!TREE_CONSTANT (arg))
6738 return NULL_TREE;
6740 if (TREE_CODE (cond) == COND_EXPR)
6742 test = TREE_OPERAND (cond, 0);
6743 true_value = TREE_OPERAND (cond, 1);
6744 false_value = TREE_OPERAND (cond, 2);
6745 /* If this operand throws an expression, then it does not make
6746 sense to try to perform a logical or arithmetic operation
6747 involving it. */
6748 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6749 lhs = true_value;
6750 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6751 rhs = false_value;
6753 else
6755 tree testtype = TREE_TYPE (cond);
6756 test = cond;
6757 true_value = constant_boolean_node (true, testtype);
6758 false_value = constant_boolean_node (false, testtype);
6761 arg = fold_convert_loc (loc, arg_type, arg);
6762 if (lhs == 0)
6764 true_value = fold_convert_loc (loc, cond_type, true_value);
6765 if (cond_first_p)
6766 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6767 else
6768 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6770 if (rhs == 0)
6772 false_value = fold_convert_loc (loc, cond_type, false_value);
6773 if (cond_first_p)
6774 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6775 else
6776 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6779 test = fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6780 return fold_convert_loc (loc, type, test);
6784 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6786 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6787 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6788 ADDEND is the same as X.
6790 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6791 and finite. The problematic cases are when X is zero, and its mode
6792 has signed zeros. In the case of rounding towards -infinity,
6793 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6794 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6796 bool
6797 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6799 if (!real_zerop (addend))
6800 return false;
6802 /* Don't allow the fold with -fsignaling-nans. */
6803 if (HONOR_SNANS (TYPE_MODE (type)))
6804 return false;
6806 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6807 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6808 return true;
6810 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6811 if (TREE_CODE (addend) == REAL_CST
6812 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6813 negate = !negate;
6815 /* The mode has signed zeros, and we have to honor their sign.
6816 In this situation, there is only one case we can return true for.
6817 X - 0 is the same as X unless rounding towards -infinity is
6818 supported. */
6819 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6822 /* Subroutine of fold() that checks comparisons of built-in math
6823 functions against real constants.
6825 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6826 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6827 is the type of the result and ARG0 and ARG1 are the operands of the
6828 comparison. ARG1 must be a TREE_REAL_CST.
6830 The function returns the constant folded tree if a simplification
6831 can be made, and NULL_TREE otherwise. */
6833 static tree
6834 fold_mathfn_compare (location_t loc,
6835 enum built_in_function fcode, enum tree_code code,
6836 tree type, tree arg0, tree arg1)
6838 REAL_VALUE_TYPE c;
6840 if (BUILTIN_SQRT_P (fcode))
6842 tree arg = CALL_EXPR_ARG (arg0, 0);
6843 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6845 c = TREE_REAL_CST (arg1);
6846 if (REAL_VALUE_NEGATIVE (c))
6848 /* sqrt(x) < y is always false, if y is negative. */
6849 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6850 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6852 /* sqrt(x) > y is always true, if y is negative and we
6853 don't care about NaNs, i.e. negative values of x. */
6854 if (code == NE_EXPR || !HONOR_NANS (mode))
6855 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6857 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6858 return fold_build2_loc (loc, GE_EXPR, type, arg,
6859 build_real (TREE_TYPE (arg), dconst0));
6861 else if (code == GT_EXPR || code == GE_EXPR)
6863 REAL_VALUE_TYPE c2;
6865 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6866 real_convert (&c2, mode, &c2);
6868 if (REAL_VALUE_ISINF (c2))
6870 /* sqrt(x) > y is x == +Inf, when y is very large. */
6871 if (HONOR_INFINITIES (mode))
6872 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6873 build_real (TREE_TYPE (arg), c2));
6875 /* sqrt(x) > y is always false, when y is very large
6876 and we don't care about infinities. */
6877 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6880 /* sqrt(x) > c is the same as x > c*c. */
6881 return fold_build2_loc (loc, code, type, arg,
6882 build_real (TREE_TYPE (arg), c2));
6884 else if (code == LT_EXPR || code == LE_EXPR)
6886 REAL_VALUE_TYPE c2;
6888 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6889 real_convert (&c2, mode, &c2);
6891 if (REAL_VALUE_ISINF (c2))
6893 /* sqrt(x) < y is always true, when y is a very large
6894 value and we don't care about NaNs or Infinities. */
6895 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6896 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6898 /* sqrt(x) < y is x != +Inf when y is very large and we
6899 don't care about NaNs. */
6900 if (! HONOR_NANS (mode))
6901 return fold_build2_loc (loc, NE_EXPR, type, arg,
6902 build_real (TREE_TYPE (arg), c2));
6904 /* sqrt(x) < y is x >= 0 when y is very large and we
6905 don't care about Infinities. */
6906 if (! HONOR_INFINITIES (mode))
6907 return fold_build2_loc (loc, GE_EXPR, type, arg,
6908 build_real (TREE_TYPE (arg), dconst0));
6910 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6911 if (lang_hooks.decls.global_bindings_p () != 0
6912 || CONTAINS_PLACEHOLDER_P (arg))
6913 return NULL_TREE;
6915 arg = save_expr (arg);
6916 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6917 fold_build2_loc (loc, GE_EXPR, type, arg,
6918 build_real (TREE_TYPE (arg),
6919 dconst0)),
6920 fold_build2_loc (loc, NE_EXPR, type, arg,
6921 build_real (TREE_TYPE (arg),
6922 c2)));
6925 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6926 if (! HONOR_NANS (mode))
6927 return fold_build2_loc (loc, code, type, arg,
6928 build_real (TREE_TYPE (arg), c2));
6930 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6931 if (lang_hooks.decls.global_bindings_p () == 0
6932 && ! CONTAINS_PLACEHOLDER_P (arg))
6934 arg = save_expr (arg);
6935 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6936 fold_build2_loc (loc, GE_EXPR, type, arg,
6937 build_real (TREE_TYPE (arg),
6938 dconst0)),
6939 fold_build2_loc (loc, code, type, arg,
6940 build_real (TREE_TYPE (arg),
6941 c2)));
6946 return NULL_TREE;
6949 /* Subroutine of fold() that optimizes comparisons against Infinities,
6950 either +Inf or -Inf.
6952 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6953 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6954 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6956 The function returns the constant folded tree if a simplification
6957 can be made, and NULL_TREE otherwise. */
6959 static tree
6960 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6961 tree arg0, tree arg1)
6963 enum machine_mode mode;
6964 REAL_VALUE_TYPE max;
6965 tree temp;
6966 bool neg;
6968 mode = TYPE_MODE (TREE_TYPE (arg0));
6970 /* For negative infinity swap the sense of the comparison. */
6971 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6972 if (neg)
6973 code = swap_tree_comparison (code);
6975 switch (code)
6977 case GT_EXPR:
6978 /* x > +Inf is always false, if with ignore sNANs. */
6979 if (HONOR_SNANS (mode))
6980 return NULL_TREE;
6981 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6983 case LE_EXPR:
6984 /* x <= +Inf is always true, if we don't case about NaNs. */
6985 if (! HONOR_NANS (mode))
6986 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6988 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6989 if (lang_hooks.decls.global_bindings_p () == 0
6990 && ! CONTAINS_PLACEHOLDER_P (arg0))
6992 arg0 = save_expr (arg0);
6993 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6995 break;
6997 case EQ_EXPR:
6998 case GE_EXPR:
6999 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
7000 real_maxval (&max, neg, mode);
7001 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
7002 arg0, build_real (TREE_TYPE (arg0), max));
7004 case LT_EXPR:
7005 /* x < +Inf is always equal to x <= DBL_MAX. */
7006 real_maxval (&max, neg, mode);
7007 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
7008 arg0, build_real (TREE_TYPE (arg0), max));
7010 case NE_EXPR:
7011 /* x != +Inf is always equal to !(x > DBL_MAX). */
7012 real_maxval (&max, neg, mode);
7013 if (! HONOR_NANS (mode))
7014 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
7015 arg0, build_real (TREE_TYPE (arg0), max));
7017 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
7018 arg0, build_real (TREE_TYPE (arg0), max));
7019 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
7021 default:
7022 break;
7025 return NULL_TREE;
7028 /* Subroutine of fold() that optimizes comparisons of a division by
7029 a nonzero integer constant against an integer constant, i.e.
7030 X/C1 op C2.
7032 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7033 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
7034 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
7036 The function returns the constant folded tree if a simplification
7037 can be made, and NULL_TREE otherwise. */
7039 static tree
7040 fold_div_compare (location_t loc,
7041 enum tree_code code, tree type, tree arg0, tree arg1)
7043 tree prod, tmp, hi, lo;
7044 tree arg00 = TREE_OPERAND (arg0, 0);
7045 tree arg01 = TREE_OPERAND (arg0, 1);
7046 unsigned HOST_WIDE_INT lpart;
7047 HOST_WIDE_INT hpart;
7048 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
7049 bool neg_overflow;
7050 int overflow;
7052 /* We have to do this the hard way to detect unsigned overflow.
7053 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
7054 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
7055 TREE_INT_CST_HIGH (arg01),
7056 TREE_INT_CST_LOW (arg1),
7057 TREE_INT_CST_HIGH (arg1),
7058 &lpart, &hpart, unsigned_p);
7059 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
7060 -1, overflow);
7061 neg_overflow = false;
7063 if (unsigned_p)
7065 tmp = int_const_binop (MINUS_EXPR, arg01,
7066 build_int_cst (TREE_TYPE (arg01), 1), 0);
7067 lo = prod;
7069 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
7070 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
7071 TREE_INT_CST_HIGH (prod),
7072 TREE_INT_CST_LOW (tmp),
7073 TREE_INT_CST_HIGH (tmp),
7074 &lpart, &hpart, unsigned_p);
7075 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
7076 -1, overflow | TREE_OVERFLOW (prod));
7078 else if (tree_int_cst_sgn (arg01) >= 0)
7080 tmp = int_const_binop (MINUS_EXPR, arg01,
7081 build_int_cst (TREE_TYPE (arg01), 1), 0);
7082 switch (tree_int_cst_sgn (arg1))
7084 case -1:
7085 neg_overflow = true;
7086 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
7087 hi = prod;
7088 break;
7090 case 0:
7091 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
7092 hi = tmp;
7093 break;
7095 case 1:
7096 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
7097 lo = prod;
7098 break;
7100 default:
7101 gcc_unreachable ();
7104 else
7106 /* A negative divisor reverses the relational operators. */
7107 code = swap_tree_comparison (code);
7109 tmp = int_const_binop (PLUS_EXPR, arg01,
7110 build_int_cst (TREE_TYPE (arg01), 1), 0);
7111 switch (tree_int_cst_sgn (arg1))
7113 case -1:
7114 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
7115 lo = prod;
7116 break;
7118 case 0:
7119 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
7120 lo = tmp;
7121 break;
7123 case 1:
7124 neg_overflow = true;
7125 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
7126 hi = prod;
7127 break;
7129 default:
7130 gcc_unreachable ();
7134 switch (code)
7136 case EQ_EXPR:
7137 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
7138 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
7139 if (TREE_OVERFLOW (hi))
7140 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
7141 if (TREE_OVERFLOW (lo))
7142 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
7143 return build_range_check (loc, type, arg00, 1, lo, hi);
7145 case NE_EXPR:
7146 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
7147 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
7148 if (TREE_OVERFLOW (hi))
7149 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
7150 if (TREE_OVERFLOW (lo))
7151 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
7152 return build_range_check (loc, type, arg00, 0, lo, hi);
7154 case LT_EXPR:
7155 if (TREE_OVERFLOW (lo))
7157 tmp = neg_overflow ? integer_zero_node : integer_one_node;
7158 return omit_one_operand_loc (loc, type, tmp, arg00);
7160 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
7162 case LE_EXPR:
7163 if (TREE_OVERFLOW (hi))
7165 tmp = neg_overflow ? integer_zero_node : integer_one_node;
7166 return omit_one_operand_loc (loc, type, tmp, arg00);
7168 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
7170 case GT_EXPR:
7171 if (TREE_OVERFLOW (hi))
7173 tmp = neg_overflow ? integer_one_node : integer_zero_node;
7174 return omit_one_operand_loc (loc, type, tmp, arg00);
7176 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
7178 case GE_EXPR:
7179 if (TREE_OVERFLOW (lo))
7181 tmp = neg_overflow ? integer_one_node : integer_zero_node;
7182 return omit_one_operand_loc (loc, type, tmp, arg00);
7184 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
7186 default:
7187 break;
7190 return NULL_TREE;
7194 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7195 equality/inequality test, then return a simplified form of the test
7196 using a sign testing. Otherwise return NULL. TYPE is the desired
7197 result type. */
7199 static tree
7200 fold_single_bit_test_into_sign_test (location_t loc,
7201 enum tree_code code, tree arg0, tree arg1,
7202 tree result_type)
7204 /* If this is testing a single bit, we can optimize the test. */
7205 if ((code == NE_EXPR || code == EQ_EXPR)
7206 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7207 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7209 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7210 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7211 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7213 if (arg00 != NULL_TREE
7214 /* This is only a win if casting to a signed type is cheap,
7215 i.e. when arg00's type is not a partial mode. */
7216 && TYPE_PRECISION (TREE_TYPE (arg00))
7217 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
7219 tree stype = signed_type_for (TREE_TYPE (arg00));
7220 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7221 result_type,
7222 fold_convert_loc (loc, stype, arg00),
7223 build_int_cst (stype, 0));
7227 return NULL_TREE;
7230 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7231 equality/inequality test, then return a simplified form of
7232 the test using shifts and logical operations. Otherwise return
7233 NULL. TYPE is the desired result type. */
7235 tree
7236 fold_single_bit_test (location_t loc, enum tree_code code,
7237 tree arg0, tree arg1, tree result_type)
7239 /* If this is testing a single bit, we can optimize the test. */
7240 if ((code == NE_EXPR || code == EQ_EXPR)
7241 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7242 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7244 tree inner = TREE_OPERAND (arg0, 0);
7245 tree type = TREE_TYPE (arg0);
7246 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7247 enum machine_mode operand_mode = TYPE_MODE (type);
7248 int ops_unsigned;
7249 tree signed_type, unsigned_type, intermediate_type;
7250 tree tem, one;
7252 /* First, see if we can fold the single bit test into a sign-bit
7253 test. */
7254 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7255 result_type);
7256 if (tem)
7257 return tem;
7259 /* Otherwise we have (A & C) != 0 where C is a single bit,
7260 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7261 Similarly for (A & C) == 0. */
7263 /* If INNER is a right shift of a constant and it plus BITNUM does
7264 not overflow, adjust BITNUM and INNER. */
7265 if (TREE_CODE (inner) == RSHIFT_EXPR
7266 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7267 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
7268 && bitnum < TYPE_PRECISION (type)
7269 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
7270 bitnum - TYPE_PRECISION (type)))
7272 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
7273 inner = TREE_OPERAND (inner, 0);
7276 /* If we are going to be able to omit the AND below, we must do our
7277 operations as unsigned. If we must use the AND, we have a choice.
7278 Normally unsigned is faster, but for some machines signed is. */
7279 #ifdef LOAD_EXTEND_OP
7280 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
7281 && !flag_syntax_only) ? 0 : 1;
7282 #else
7283 ops_unsigned = 1;
7284 #endif
7286 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7287 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7288 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7289 inner = fold_convert_loc (loc, intermediate_type, inner);
7291 if (bitnum != 0)
7292 inner = build2 (RSHIFT_EXPR, intermediate_type,
7293 inner, size_int (bitnum));
7295 one = build_int_cst (intermediate_type, 1);
7297 if (code == EQ_EXPR)
7298 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7300 /* Put the AND last so it can combine with more things. */
7301 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7303 /* Make sure to return the proper type. */
7304 inner = fold_convert_loc (loc, result_type, inner);
7306 return inner;
7308 return NULL_TREE;
7311 /* Check whether we are allowed to reorder operands arg0 and arg1,
7312 such that the evaluation of arg1 occurs before arg0. */
7314 static bool
7315 reorder_operands_p (const_tree arg0, const_tree arg1)
7317 if (! flag_evaluation_order)
7318 return true;
7319 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
7320 return true;
7321 return ! TREE_SIDE_EFFECTS (arg0)
7322 && ! TREE_SIDE_EFFECTS (arg1);
7325 /* Test whether it is preferable two swap two operands, ARG0 and
7326 ARG1, for example because ARG0 is an integer constant and ARG1
7327 isn't. If REORDER is true, only recommend swapping if we can
7328 evaluate the operands in reverse order. */
7330 bool
7331 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
7333 STRIP_SIGN_NOPS (arg0);
7334 STRIP_SIGN_NOPS (arg1);
7336 if (TREE_CODE (arg1) == INTEGER_CST)
7337 return 0;
7338 if (TREE_CODE (arg0) == INTEGER_CST)
7339 return 1;
7341 if (TREE_CODE (arg1) == REAL_CST)
7342 return 0;
7343 if (TREE_CODE (arg0) == REAL_CST)
7344 return 1;
7346 if (TREE_CODE (arg1) == FIXED_CST)
7347 return 0;
7348 if (TREE_CODE (arg0) == FIXED_CST)
7349 return 1;
7351 if (TREE_CODE (arg1) == COMPLEX_CST)
7352 return 0;
7353 if (TREE_CODE (arg0) == COMPLEX_CST)
7354 return 1;
7356 if (TREE_CONSTANT (arg1))
7357 return 0;
7358 if (TREE_CONSTANT (arg0))
7359 return 1;
7361 if (optimize_function_for_size_p (cfun))
7362 return 0;
7364 if (reorder && flag_evaluation_order
7365 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7366 return 0;
7368 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7369 for commutative and comparison operators. Ensuring a canonical
7370 form allows the optimizers to find additional redundancies without
7371 having to explicitly check for both orderings. */
7372 if (TREE_CODE (arg0) == SSA_NAME
7373 && TREE_CODE (arg1) == SSA_NAME
7374 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7375 return 1;
7377 /* Put SSA_NAMEs last. */
7378 if (TREE_CODE (arg1) == SSA_NAME)
7379 return 0;
7380 if (TREE_CODE (arg0) == SSA_NAME)
7381 return 1;
7383 /* Put variables last. */
7384 if (DECL_P (arg1))
7385 return 0;
7386 if (DECL_P (arg0))
7387 return 1;
7389 return 0;
7392 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7393 ARG0 is extended to a wider type. */
7395 static tree
7396 fold_widened_comparison (location_t loc, enum tree_code code,
7397 tree type, tree arg0, tree arg1)
7399 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7400 tree arg1_unw;
7401 tree shorter_type, outer_type;
7402 tree min, max;
7403 bool above, below;
7405 if (arg0_unw == arg0)
7406 return NULL_TREE;
7407 shorter_type = TREE_TYPE (arg0_unw);
7409 #ifdef HAVE_canonicalize_funcptr_for_compare
7410 /* Disable this optimization if we're casting a function pointer
7411 type on targets that require function pointer canonicalization. */
7412 if (HAVE_canonicalize_funcptr_for_compare
7413 && TREE_CODE (shorter_type) == POINTER_TYPE
7414 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7415 return NULL_TREE;
7416 #endif
7418 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7419 return NULL_TREE;
7421 arg1_unw = get_unwidened (arg1, NULL_TREE);
7423 /* If possible, express the comparison in the shorter mode. */
7424 if ((code == EQ_EXPR || code == NE_EXPR
7425 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7426 && (TREE_TYPE (arg1_unw) == shorter_type
7427 || ((TYPE_PRECISION (shorter_type)
7428 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7429 && (TYPE_UNSIGNED (shorter_type)
7430 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7431 || (TREE_CODE (arg1_unw) == INTEGER_CST
7432 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7433 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7434 && int_fits_type_p (arg1_unw, shorter_type))))
7435 return fold_build2_loc (loc, code, type, arg0_unw,
7436 fold_convert_loc (loc, shorter_type, arg1_unw));
7438 if (TREE_CODE (arg1_unw) != INTEGER_CST
7439 || TREE_CODE (shorter_type) != INTEGER_TYPE
7440 || !int_fits_type_p (arg1_unw, shorter_type))
7441 return NULL_TREE;
7443 /* If we are comparing with the integer that does not fit into the range
7444 of the shorter type, the result is known. */
7445 outer_type = TREE_TYPE (arg1_unw);
7446 min = lower_bound_in_type (outer_type, shorter_type);
7447 max = upper_bound_in_type (outer_type, shorter_type);
7449 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7450 max, arg1_unw));
7451 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7452 arg1_unw, min));
7454 switch (code)
7456 case EQ_EXPR:
7457 if (above || below)
7458 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7459 break;
7461 case NE_EXPR:
7462 if (above || below)
7463 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7464 break;
7466 case LT_EXPR:
7467 case LE_EXPR:
7468 if (above)
7469 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7470 else if (below)
7471 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7473 case GT_EXPR:
7474 case GE_EXPR:
7475 if (above)
7476 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7477 else if (below)
7478 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7480 default:
7481 break;
7484 return NULL_TREE;
7487 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7488 ARG0 just the signedness is changed. */
7490 static tree
7491 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7492 tree arg0, tree arg1)
7494 tree arg0_inner;
7495 tree inner_type, outer_type;
7497 if (!CONVERT_EXPR_P (arg0))
7498 return NULL_TREE;
7500 outer_type = TREE_TYPE (arg0);
7501 arg0_inner = TREE_OPERAND (arg0, 0);
7502 inner_type = TREE_TYPE (arg0_inner);
7504 #ifdef HAVE_canonicalize_funcptr_for_compare
7505 /* Disable this optimization if we're casting a function pointer
7506 type on targets that require function pointer canonicalization. */
7507 if (HAVE_canonicalize_funcptr_for_compare
7508 && TREE_CODE (inner_type) == POINTER_TYPE
7509 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7510 return NULL_TREE;
7511 #endif
7513 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7514 return NULL_TREE;
7516 if (TREE_CODE (arg1) != INTEGER_CST
7517 && !(CONVERT_EXPR_P (arg1)
7518 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7519 return NULL_TREE;
7521 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7522 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7523 && code != NE_EXPR
7524 && code != EQ_EXPR)
7525 return NULL_TREE;
7527 if (TREE_CODE (arg1) == INTEGER_CST)
7528 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7529 TREE_INT_CST_HIGH (arg1), 0,
7530 TREE_OVERFLOW (arg1));
7531 else
7532 arg1 = fold_convert_loc (loc, inner_type, arg1);
7534 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7537 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7538 step of the array. Reconstructs s and delta in the case of s *
7539 delta being an integer constant (and thus already folded). ADDR is
7540 the address. MULT is the multiplicative expression. If the
7541 function succeeds, the new address expression is returned.
7542 Otherwise NULL_TREE is returned. LOC is the location of the
7543 resulting expression. */
7545 static tree
7546 try_move_mult_to_index (location_t loc, tree addr, tree op1)
7548 tree s, delta, step;
7549 tree ref = TREE_OPERAND (addr, 0), pref;
7550 tree ret, pos;
7551 tree itype;
7552 bool mdim = false;
7554 /* Strip the nops that might be added when converting op1 to sizetype. */
7555 STRIP_NOPS (op1);
7557 /* Canonicalize op1 into a possibly non-constant delta
7558 and an INTEGER_CST s. */
7559 if (TREE_CODE (op1) == MULT_EXPR)
7561 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7563 STRIP_NOPS (arg0);
7564 STRIP_NOPS (arg1);
7566 if (TREE_CODE (arg0) == INTEGER_CST)
7568 s = arg0;
7569 delta = arg1;
7571 else if (TREE_CODE (arg1) == INTEGER_CST)
7573 s = arg1;
7574 delta = arg0;
7576 else
7577 return NULL_TREE;
7579 else if (TREE_CODE (op1) == INTEGER_CST)
7581 delta = op1;
7582 s = NULL_TREE;
7584 else
7586 /* Simulate we are delta * 1. */
7587 delta = op1;
7588 s = integer_one_node;
7591 for (;; ref = TREE_OPERAND (ref, 0))
7593 if (TREE_CODE (ref) == ARRAY_REF)
7595 tree domain;
7597 /* Remember if this was a multi-dimensional array. */
7598 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7599 mdim = true;
7601 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7602 if (! domain)
7603 continue;
7604 itype = TREE_TYPE (domain);
7606 step = array_ref_element_size (ref);
7607 if (TREE_CODE (step) != INTEGER_CST)
7608 continue;
7610 if (s)
7612 if (! tree_int_cst_equal (step, s))
7613 continue;
7615 else
7617 /* Try if delta is a multiple of step. */
7618 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7619 if (! tmp)
7620 continue;
7621 delta = tmp;
7624 /* Only fold here if we can verify we do not overflow one
7625 dimension of a multi-dimensional array. */
7626 if (mdim)
7628 tree tmp;
7630 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7631 || !TYPE_MAX_VALUE (domain)
7632 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7633 continue;
7635 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7636 fold_convert_loc (loc, itype,
7637 TREE_OPERAND (ref, 1)),
7638 fold_convert_loc (loc, itype, delta));
7639 if (!tmp
7640 || TREE_CODE (tmp) != INTEGER_CST
7641 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7642 continue;
7645 break;
7647 else
7648 mdim = false;
7650 if (!handled_component_p (ref))
7651 return NULL_TREE;
7654 /* We found the suitable array reference. So copy everything up to it,
7655 and replace the index. */
7657 pref = TREE_OPERAND (addr, 0);
7658 ret = copy_node (pref);
7659 SET_EXPR_LOCATION (ret, loc);
7660 pos = ret;
7662 while (pref != ref)
7664 pref = TREE_OPERAND (pref, 0);
7665 TREE_OPERAND (pos, 0) = copy_node (pref);
7666 pos = TREE_OPERAND (pos, 0);
7669 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
7670 fold_convert_loc (loc, itype,
7671 TREE_OPERAND (pos, 1)),
7672 fold_convert_loc (loc, itype, delta));
7674 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7678 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7679 means A >= Y && A != MAX, but in this case we know that
7680 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7682 static tree
7683 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7685 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7687 if (TREE_CODE (bound) == LT_EXPR)
7688 a = TREE_OPERAND (bound, 0);
7689 else if (TREE_CODE (bound) == GT_EXPR)
7690 a = TREE_OPERAND (bound, 1);
7691 else
7692 return NULL_TREE;
7694 typea = TREE_TYPE (a);
7695 if (!INTEGRAL_TYPE_P (typea)
7696 && !POINTER_TYPE_P (typea))
7697 return NULL_TREE;
7699 if (TREE_CODE (ineq) == LT_EXPR)
7701 a1 = TREE_OPERAND (ineq, 1);
7702 y = TREE_OPERAND (ineq, 0);
7704 else if (TREE_CODE (ineq) == GT_EXPR)
7706 a1 = TREE_OPERAND (ineq, 0);
7707 y = TREE_OPERAND (ineq, 1);
7709 else
7710 return NULL_TREE;
7712 if (TREE_TYPE (a1) != typea)
7713 return NULL_TREE;
7715 if (POINTER_TYPE_P (typea))
7717 /* Convert the pointer types into integer before taking the difference. */
7718 tree ta = fold_convert_loc (loc, ssizetype, a);
7719 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7720 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7722 else
7723 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7725 if (!diff || !integer_onep (diff))
7726 return NULL_TREE;
7728 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7731 /* Fold a sum or difference of at least one multiplication.
7732 Returns the folded tree or NULL if no simplification could be made. */
7734 static tree
7735 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7736 tree arg0, tree arg1)
7738 tree arg00, arg01, arg10, arg11;
7739 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7741 /* (A * C) +- (B * C) -> (A+-B) * C.
7742 (A * C) +- A -> A * (C+-1).
7743 We are most concerned about the case where C is a constant,
7744 but other combinations show up during loop reduction. Since
7745 it is not difficult, try all four possibilities. */
7747 if (TREE_CODE (arg0) == MULT_EXPR)
7749 arg00 = TREE_OPERAND (arg0, 0);
7750 arg01 = TREE_OPERAND (arg0, 1);
7752 else if (TREE_CODE (arg0) == INTEGER_CST)
7754 arg00 = build_one_cst (type);
7755 arg01 = arg0;
7757 else
7759 /* We cannot generate constant 1 for fract. */
7760 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7761 return NULL_TREE;
7762 arg00 = arg0;
7763 arg01 = build_one_cst (type);
7765 if (TREE_CODE (arg1) == MULT_EXPR)
7767 arg10 = TREE_OPERAND (arg1, 0);
7768 arg11 = TREE_OPERAND (arg1, 1);
7770 else if (TREE_CODE (arg1) == INTEGER_CST)
7772 arg10 = build_one_cst (type);
7773 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7774 the purpose of this canonicalization. */
7775 if (TREE_INT_CST_HIGH (arg1) == -1
7776 && negate_expr_p (arg1)
7777 && code == PLUS_EXPR)
7779 arg11 = negate_expr (arg1);
7780 code = MINUS_EXPR;
7782 else
7783 arg11 = arg1;
7785 else
7787 /* We cannot generate constant 1 for fract. */
7788 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7789 return NULL_TREE;
7790 arg10 = arg1;
7791 arg11 = build_one_cst (type);
7793 same = NULL_TREE;
7795 if (operand_equal_p (arg01, arg11, 0))
7796 same = arg01, alt0 = arg00, alt1 = arg10;
7797 else if (operand_equal_p (arg00, arg10, 0))
7798 same = arg00, alt0 = arg01, alt1 = arg11;
7799 else if (operand_equal_p (arg00, arg11, 0))
7800 same = arg00, alt0 = arg01, alt1 = arg10;
7801 else if (operand_equal_p (arg01, arg10, 0))
7802 same = arg01, alt0 = arg00, alt1 = arg11;
7804 /* No identical multiplicands; see if we can find a common
7805 power-of-two factor in non-power-of-two multiplies. This
7806 can help in multi-dimensional array access. */
7807 else if (host_integerp (arg01, 0)
7808 && host_integerp (arg11, 0))
7810 HOST_WIDE_INT int01, int11, tmp;
7811 bool swap = false;
7812 tree maybe_same;
7813 int01 = TREE_INT_CST_LOW (arg01);
7814 int11 = TREE_INT_CST_LOW (arg11);
7816 /* Move min of absolute values to int11. */
7817 if ((int01 >= 0 ? int01 : -int01)
7818 < (int11 >= 0 ? int11 : -int11))
7820 tmp = int01, int01 = int11, int11 = tmp;
7821 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7822 maybe_same = arg01;
7823 swap = true;
7825 else
7826 maybe_same = arg11;
7828 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7829 /* The remainder should not be a constant, otherwise we
7830 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7831 increased the number of multiplications necessary. */
7832 && TREE_CODE (arg10) != INTEGER_CST)
7834 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7835 build_int_cst (TREE_TYPE (arg00),
7836 int01 / int11));
7837 alt1 = arg10;
7838 same = maybe_same;
7839 if (swap)
7840 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7844 if (same)
7845 return fold_build2_loc (loc, MULT_EXPR, type,
7846 fold_build2_loc (loc, code, type,
7847 fold_convert_loc (loc, type, alt0),
7848 fold_convert_loc (loc, type, alt1)),
7849 fold_convert_loc (loc, type, same));
7851 return NULL_TREE;
7854 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7855 specified by EXPR into the buffer PTR of length LEN bytes.
7856 Return the number of bytes placed in the buffer, or zero
7857 upon failure. */
7859 static int
7860 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7862 tree type = TREE_TYPE (expr);
7863 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7864 int byte, offset, word, words;
7865 unsigned char value;
7867 if (total_bytes > len)
7868 return 0;
7869 words = total_bytes / UNITS_PER_WORD;
7871 for (byte = 0; byte < total_bytes; byte++)
7873 int bitpos = byte * BITS_PER_UNIT;
7874 if (bitpos < HOST_BITS_PER_WIDE_INT)
7875 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7876 else
7877 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7878 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7880 if (total_bytes > UNITS_PER_WORD)
7882 word = byte / UNITS_PER_WORD;
7883 if (WORDS_BIG_ENDIAN)
7884 word = (words - 1) - word;
7885 offset = word * UNITS_PER_WORD;
7886 if (BYTES_BIG_ENDIAN)
7887 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7888 else
7889 offset += byte % UNITS_PER_WORD;
7891 else
7892 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7893 ptr[offset] = value;
7895 return total_bytes;
7899 /* Subroutine of native_encode_expr. Encode the REAL_CST
7900 specified by EXPR into the buffer PTR of length LEN bytes.
7901 Return the number of bytes placed in the buffer, or zero
7902 upon failure. */
7904 static int
7905 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7907 tree type = TREE_TYPE (expr);
7908 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7909 int byte, offset, word, words, bitpos;
7910 unsigned char value;
7912 /* There are always 32 bits in each long, no matter the size of
7913 the hosts long. We handle floating point representations with
7914 up to 192 bits. */
7915 long tmp[6];
7917 if (total_bytes > len)
7918 return 0;
7919 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7921 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7923 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7924 bitpos += BITS_PER_UNIT)
7926 byte = (bitpos / BITS_PER_UNIT) & 3;
7927 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7929 if (UNITS_PER_WORD < 4)
7931 word = byte / UNITS_PER_WORD;
7932 if (WORDS_BIG_ENDIAN)
7933 word = (words - 1) - word;
7934 offset = word * UNITS_PER_WORD;
7935 if (BYTES_BIG_ENDIAN)
7936 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7937 else
7938 offset += byte % UNITS_PER_WORD;
7940 else
7941 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7942 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7944 return total_bytes;
7947 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7948 specified by EXPR into the buffer PTR of length LEN bytes.
7949 Return the number of bytes placed in the buffer, or zero
7950 upon failure. */
7952 static int
7953 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7955 int rsize, isize;
7956 tree part;
7958 part = TREE_REALPART (expr);
7959 rsize = native_encode_expr (part, ptr, len);
7960 if (rsize == 0)
7961 return 0;
7962 part = TREE_IMAGPART (expr);
7963 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7964 if (isize != rsize)
7965 return 0;
7966 return rsize + isize;
7970 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7971 specified by EXPR into the buffer PTR of length LEN bytes.
7972 Return the number of bytes placed in the buffer, or zero
7973 upon failure. */
7975 static int
7976 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7978 int i, size, offset, count;
7979 tree itype, elem, elements;
7981 offset = 0;
7982 elements = TREE_VECTOR_CST_ELTS (expr);
7983 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7984 itype = TREE_TYPE (TREE_TYPE (expr));
7985 size = GET_MODE_SIZE (TYPE_MODE (itype));
7986 for (i = 0; i < count; i++)
7988 if (elements)
7990 elem = TREE_VALUE (elements);
7991 elements = TREE_CHAIN (elements);
7993 else
7994 elem = NULL_TREE;
7996 if (elem)
7998 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7999 return 0;
8001 else
8003 if (offset + size > len)
8004 return 0;
8005 memset (ptr+offset, 0, size);
8007 offset += size;
8009 return offset;
8013 /* Subroutine of native_encode_expr. Encode the STRING_CST
8014 specified by EXPR into the buffer PTR of length LEN bytes.
8015 Return the number of bytes placed in the buffer, or zero
8016 upon failure. */
8018 static int
8019 native_encode_string (const_tree expr, unsigned char *ptr, int len)
8021 tree type = TREE_TYPE (expr);
8022 HOST_WIDE_INT total_bytes;
8024 if (TREE_CODE (type) != ARRAY_TYPE
8025 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
8026 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
8027 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
8028 return 0;
8029 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
8030 if (total_bytes > len)
8031 return 0;
8032 if (TREE_STRING_LENGTH (expr) < total_bytes)
8034 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
8035 memset (ptr + TREE_STRING_LENGTH (expr), 0,
8036 total_bytes - TREE_STRING_LENGTH (expr));
8038 else
8039 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
8040 return total_bytes;
8044 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
8045 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
8046 buffer PTR of length LEN bytes. Return the number of bytes
8047 placed in the buffer, or zero upon failure. */
8050 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
8052 switch (TREE_CODE (expr))
8054 case INTEGER_CST:
8055 return native_encode_int (expr, ptr, len);
8057 case REAL_CST:
8058 return native_encode_real (expr, ptr, len);
8060 case COMPLEX_CST:
8061 return native_encode_complex (expr, ptr, len);
8063 case VECTOR_CST:
8064 return native_encode_vector (expr, ptr, len);
8066 case STRING_CST:
8067 return native_encode_string (expr, ptr, len);
8069 default:
8070 return 0;
8075 /* Subroutine of native_interpret_expr. Interpret the contents of
8076 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8077 If the buffer cannot be interpreted, return NULL_TREE. */
8079 static tree
8080 native_interpret_int (tree type, const unsigned char *ptr, int len)
8082 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
8083 int byte, offset, word, words;
8084 unsigned char value;
8085 unsigned int HOST_WIDE_INT lo = 0;
8086 HOST_WIDE_INT hi = 0;
8088 if (total_bytes > len)
8089 return NULL_TREE;
8090 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
8091 return NULL_TREE;
8092 words = total_bytes / UNITS_PER_WORD;
8094 for (byte = 0; byte < total_bytes; byte++)
8096 int bitpos = byte * BITS_PER_UNIT;
8097 if (total_bytes > UNITS_PER_WORD)
8099 word = byte / UNITS_PER_WORD;
8100 if (WORDS_BIG_ENDIAN)
8101 word = (words - 1) - word;
8102 offset = word * UNITS_PER_WORD;
8103 if (BYTES_BIG_ENDIAN)
8104 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8105 else
8106 offset += byte % UNITS_PER_WORD;
8108 else
8109 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
8110 value = ptr[offset];
8112 if (bitpos < HOST_BITS_PER_WIDE_INT)
8113 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
8114 else
8115 hi |= (unsigned HOST_WIDE_INT) value
8116 << (bitpos - HOST_BITS_PER_WIDE_INT);
8119 return build_int_cst_wide_type (type, lo, hi);
8123 /* Subroutine of native_interpret_expr. Interpret the contents of
8124 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8125 If the buffer cannot be interpreted, return NULL_TREE. */
8127 static tree
8128 native_interpret_real (tree type, const unsigned char *ptr, int len)
8130 enum machine_mode mode = TYPE_MODE (type);
8131 int total_bytes = GET_MODE_SIZE (mode);
8132 int byte, offset, word, words, bitpos;
8133 unsigned char value;
8134 /* There are always 32 bits in each long, no matter the size of
8135 the hosts long. We handle floating point representations with
8136 up to 192 bits. */
8137 REAL_VALUE_TYPE r;
8138 long tmp[6];
8140 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
8141 if (total_bytes > len || total_bytes > 24)
8142 return NULL_TREE;
8143 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8145 memset (tmp, 0, sizeof (tmp));
8146 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8147 bitpos += BITS_PER_UNIT)
8149 byte = (bitpos / BITS_PER_UNIT) & 3;
8150 if (UNITS_PER_WORD < 4)
8152 word = byte / UNITS_PER_WORD;
8153 if (WORDS_BIG_ENDIAN)
8154 word = (words - 1) - word;
8155 offset = word * UNITS_PER_WORD;
8156 if (BYTES_BIG_ENDIAN)
8157 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8158 else
8159 offset += byte % UNITS_PER_WORD;
8161 else
8162 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
8163 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8165 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8168 real_from_target (&r, tmp, mode);
8169 return build_real (type, r);
8173 /* Subroutine of native_interpret_expr. Interpret the contents of
8174 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8175 If the buffer cannot be interpreted, return NULL_TREE. */
8177 static tree
8178 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8180 tree etype, rpart, ipart;
8181 int size;
8183 etype = TREE_TYPE (type);
8184 size = GET_MODE_SIZE (TYPE_MODE (etype));
8185 if (size * 2 > len)
8186 return NULL_TREE;
8187 rpart = native_interpret_expr (etype, ptr, size);
8188 if (!rpart)
8189 return NULL_TREE;
8190 ipart = native_interpret_expr (etype, ptr+size, size);
8191 if (!ipart)
8192 return NULL_TREE;
8193 return build_complex (type, rpart, ipart);
8197 /* Subroutine of native_interpret_expr. Interpret the contents of
8198 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8199 If the buffer cannot be interpreted, return NULL_TREE. */
8201 static tree
8202 native_interpret_vector (tree type, const unsigned char *ptr, int len)
8204 tree etype, elem, elements;
8205 int i, size, count;
8207 etype = TREE_TYPE (type);
8208 size = GET_MODE_SIZE (TYPE_MODE (etype));
8209 count = TYPE_VECTOR_SUBPARTS (type);
8210 if (size * count > len)
8211 return NULL_TREE;
8213 elements = NULL_TREE;
8214 for (i = count - 1; i >= 0; i--)
8216 elem = native_interpret_expr (etype, ptr+(i*size), size);
8217 if (!elem)
8218 return NULL_TREE;
8219 elements = tree_cons (NULL_TREE, elem, elements);
8221 return build_vector (type, elements);
8225 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8226 the buffer PTR of length LEN as a constant of type TYPE. For
8227 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8228 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8229 return NULL_TREE. */
8231 tree
8232 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8234 switch (TREE_CODE (type))
8236 case INTEGER_TYPE:
8237 case ENUMERAL_TYPE:
8238 case BOOLEAN_TYPE:
8239 return native_interpret_int (type, ptr, len);
8241 case REAL_TYPE:
8242 return native_interpret_real (type, ptr, len);
8244 case COMPLEX_TYPE:
8245 return native_interpret_complex (type, ptr, len);
8247 case VECTOR_TYPE:
8248 return native_interpret_vector (type, ptr, len);
8250 default:
8251 return NULL_TREE;
8256 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8257 TYPE at compile-time. If we're unable to perform the conversion
8258 return NULL_TREE. */
8260 static tree
8261 fold_view_convert_expr (tree type, tree expr)
8263 /* We support up to 512-bit values (for V8DFmode). */
8264 unsigned char buffer[64];
8265 int len;
8267 /* Check that the host and target are sane. */
8268 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8269 return NULL_TREE;
8271 len = native_encode_expr (expr, buffer, sizeof (buffer));
8272 if (len == 0)
8273 return NULL_TREE;
8275 return native_interpret_expr (type, buffer, len);
8278 /* Build an expression for the address of T. Folds away INDIRECT_REF
8279 to avoid confusing the gimplify process. */
8281 tree
8282 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8284 /* The size of the object is not relevant when talking about its address. */
8285 if (TREE_CODE (t) == WITH_SIZE_EXPR)
8286 t = TREE_OPERAND (t, 0);
8288 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
8289 if (TREE_CODE (t) == INDIRECT_REF
8290 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
8292 t = TREE_OPERAND (t, 0);
8294 if (TREE_TYPE (t) != ptrtype)
8296 t = build1 (NOP_EXPR, ptrtype, t);
8297 SET_EXPR_LOCATION (t, loc);
8300 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8302 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8304 if (TREE_TYPE (t) != ptrtype)
8305 t = fold_convert_loc (loc, ptrtype, t);
8307 else
8309 t = build1 (ADDR_EXPR, ptrtype, t);
8310 SET_EXPR_LOCATION (t, loc);
8313 return t;
8316 /* Build an expression for the address of T. */
8318 tree
8319 build_fold_addr_expr_loc (location_t loc, tree t)
8321 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8323 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8326 /* Fold a unary expression of code CODE and type TYPE with operand
8327 OP0. Return the folded expression if folding is successful.
8328 Otherwise, return NULL_TREE. */
8330 tree
8331 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8333 tree tem;
8334 tree arg0;
8335 enum tree_code_class kind = TREE_CODE_CLASS (code);
8337 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8338 && TREE_CODE_LENGTH (code) == 1);
8340 arg0 = op0;
8341 if (arg0)
8343 if (CONVERT_EXPR_CODE_P (code)
8344 || code == FLOAT_EXPR || code == ABS_EXPR)
8346 /* Don't use STRIP_NOPS, because signedness of argument type
8347 matters. */
8348 STRIP_SIGN_NOPS (arg0);
8350 else
8352 /* Strip any conversions that don't change the mode. This
8353 is safe for every expression, except for a comparison
8354 expression because its signedness is derived from its
8355 operands.
8357 Note that this is done as an internal manipulation within
8358 the constant folder, in order to find the simplest
8359 representation of the arguments so that their form can be
8360 studied. In any cases, the appropriate type conversions
8361 should be put back in the tree that will get out of the
8362 constant folder. */
8363 STRIP_NOPS (arg0);
8367 if (TREE_CODE_CLASS (code) == tcc_unary)
8369 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8370 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8371 fold_build1_loc (loc, code, type,
8372 fold_convert_loc (loc, TREE_TYPE (op0),
8373 TREE_OPERAND (arg0, 1))));
8374 else if (TREE_CODE (arg0) == COND_EXPR)
8376 tree arg01 = TREE_OPERAND (arg0, 1);
8377 tree arg02 = TREE_OPERAND (arg0, 2);
8378 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8379 arg01 = fold_build1_loc (loc, code, type,
8380 fold_convert_loc (loc,
8381 TREE_TYPE (op0), arg01));
8382 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8383 arg02 = fold_build1_loc (loc, code, type,
8384 fold_convert_loc (loc,
8385 TREE_TYPE (op0), arg02));
8386 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8387 arg01, arg02);
8389 /* If this was a conversion, and all we did was to move into
8390 inside the COND_EXPR, bring it back out. But leave it if
8391 it is a conversion from integer to integer and the
8392 result precision is no wider than a word since such a
8393 conversion is cheap and may be optimized away by combine,
8394 while it couldn't if it were outside the COND_EXPR. Then return
8395 so we don't get into an infinite recursion loop taking the
8396 conversion out and then back in. */
8398 if ((CONVERT_EXPR_CODE_P (code)
8399 || code == NON_LVALUE_EXPR)
8400 && TREE_CODE (tem) == COND_EXPR
8401 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8402 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8403 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8404 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8405 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8406 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8407 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8408 && (INTEGRAL_TYPE_P
8409 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8410 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8411 || flag_syntax_only))
8413 tem = build1 (code, type,
8414 build3 (COND_EXPR,
8415 TREE_TYPE (TREE_OPERAND
8416 (TREE_OPERAND (tem, 1), 0)),
8417 TREE_OPERAND (tem, 0),
8418 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8419 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8420 SET_EXPR_LOCATION (tem, loc);
8422 return tem;
8424 else if (COMPARISON_CLASS_P (arg0))
8426 if (TREE_CODE (type) == BOOLEAN_TYPE)
8428 arg0 = copy_node (arg0);
8429 TREE_TYPE (arg0) = type;
8430 return arg0;
8432 else if (TREE_CODE (type) != INTEGER_TYPE)
8433 return fold_build3_loc (loc, COND_EXPR, type, arg0,
8434 fold_build1_loc (loc, code, type,
8435 integer_one_node),
8436 fold_build1_loc (loc, code, type,
8437 integer_zero_node));
8441 switch (code)
8443 case PAREN_EXPR:
8444 /* Re-association barriers around constants and other re-association
8445 barriers can be removed. */
8446 if (CONSTANT_CLASS_P (op0)
8447 || TREE_CODE (op0) == PAREN_EXPR)
8448 return fold_convert_loc (loc, type, op0);
8449 return NULL_TREE;
8451 CASE_CONVERT:
8452 case FLOAT_EXPR:
8453 case FIX_TRUNC_EXPR:
8454 if (TREE_TYPE (op0) == type)
8455 return op0;
8457 /* If we have (type) (a CMP b) and type is an integral type, return
8458 new expression involving the new type. */
8459 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8460 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8461 TREE_OPERAND (op0, 1));
8463 /* Handle cases of two conversions in a row. */
8464 if (CONVERT_EXPR_P (op0))
8466 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8467 tree inter_type = TREE_TYPE (op0);
8468 int inside_int = INTEGRAL_TYPE_P (inside_type);
8469 int inside_ptr = POINTER_TYPE_P (inside_type);
8470 int inside_float = FLOAT_TYPE_P (inside_type);
8471 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8472 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8473 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8474 int inter_int = INTEGRAL_TYPE_P (inter_type);
8475 int inter_ptr = POINTER_TYPE_P (inter_type);
8476 int inter_float = FLOAT_TYPE_P (inter_type);
8477 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8478 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8479 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8480 int final_int = INTEGRAL_TYPE_P (type);
8481 int final_ptr = POINTER_TYPE_P (type);
8482 int final_float = FLOAT_TYPE_P (type);
8483 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8484 unsigned int final_prec = TYPE_PRECISION (type);
8485 int final_unsignedp = TYPE_UNSIGNED (type);
8487 /* In addition to the cases of two conversions in a row
8488 handled below, if we are converting something to its own
8489 type via an object of identical or wider precision, neither
8490 conversion is needed. */
8491 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8492 && (((inter_int || inter_ptr) && final_int)
8493 || (inter_float && final_float))
8494 && inter_prec >= final_prec)
8495 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8497 /* Likewise, if the intermediate and initial types are either both
8498 float or both integer, we don't need the middle conversion if the
8499 former is wider than the latter and doesn't change the signedness
8500 (for integers). Avoid this if the final type is a pointer since
8501 then we sometimes need the middle conversion. Likewise if the
8502 final type has a precision not equal to the size of its mode. */
8503 if (((inter_int && inside_int)
8504 || (inter_float && inside_float)
8505 || (inter_vec && inside_vec))
8506 && inter_prec >= inside_prec
8507 && (inter_float || inter_vec
8508 || inter_unsignedp == inside_unsignedp)
8509 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8510 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8511 && ! final_ptr
8512 && (! final_vec || inter_prec == inside_prec))
8513 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8515 /* If we have a sign-extension of a zero-extended value, we can
8516 replace that by a single zero-extension. */
8517 if (inside_int && inter_int && final_int
8518 && inside_prec < inter_prec && inter_prec < final_prec
8519 && inside_unsignedp && !inter_unsignedp)
8520 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8522 /* Two conversions in a row are not needed unless:
8523 - some conversion is floating-point (overstrict for now), or
8524 - some conversion is a vector (overstrict for now), or
8525 - the intermediate type is narrower than both initial and
8526 final, or
8527 - the intermediate type and innermost type differ in signedness,
8528 and the outermost type is wider than the intermediate, or
8529 - the initial type is a pointer type and the precisions of the
8530 intermediate and final types differ, or
8531 - the final type is a pointer type and the precisions of the
8532 initial and intermediate types differ. */
8533 if (! inside_float && ! inter_float && ! final_float
8534 && ! inside_vec && ! inter_vec && ! final_vec
8535 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8536 && ! (inside_int && inter_int
8537 && inter_unsignedp != inside_unsignedp
8538 && inter_prec < final_prec)
8539 && ((inter_unsignedp && inter_prec > inside_prec)
8540 == (final_unsignedp && final_prec > inter_prec))
8541 && ! (inside_ptr && inter_prec != final_prec)
8542 && ! (final_ptr && inside_prec != inter_prec)
8543 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8544 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8545 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8548 /* Handle (T *)&A.B.C for A being of type T and B and C
8549 living at offset zero. This occurs frequently in
8550 C++ upcasting and then accessing the base. */
8551 if (TREE_CODE (op0) == ADDR_EXPR
8552 && POINTER_TYPE_P (type)
8553 && handled_component_p (TREE_OPERAND (op0, 0)))
8555 HOST_WIDE_INT bitsize, bitpos;
8556 tree offset;
8557 enum machine_mode mode;
8558 int unsignedp, volatilep;
8559 tree base = TREE_OPERAND (op0, 0);
8560 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8561 &mode, &unsignedp, &volatilep, false);
8562 /* If the reference was to a (constant) zero offset, we can use
8563 the address of the base if it has the same base type
8564 as the result type and the pointer type is unqualified. */
8565 if (! offset && bitpos == 0
8566 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8567 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8568 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8569 return fold_convert_loc (loc, type,
8570 build_fold_addr_expr_loc (loc, base));
8573 if (TREE_CODE (op0) == MODIFY_EXPR
8574 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8575 /* Detect assigning a bitfield. */
8576 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8577 && DECL_BIT_FIELD
8578 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8580 /* Don't leave an assignment inside a conversion
8581 unless assigning a bitfield. */
8582 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8583 /* First do the assignment, then return converted constant. */
8584 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8585 TREE_NO_WARNING (tem) = 1;
8586 TREE_USED (tem) = 1;
8587 SET_EXPR_LOCATION (tem, loc);
8588 return tem;
8591 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8592 constants (if x has signed type, the sign bit cannot be set
8593 in c). This folds extension into the BIT_AND_EXPR.
8594 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8595 very likely don't have maximal range for their precision and this
8596 transformation effectively doesn't preserve non-maximal ranges. */
8597 if (TREE_CODE (type) == INTEGER_TYPE
8598 && TREE_CODE (op0) == BIT_AND_EXPR
8599 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8601 tree and_expr = op0;
8602 tree and0 = TREE_OPERAND (and_expr, 0);
8603 tree and1 = TREE_OPERAND (and_expr, 1);
8604 int change = 0;
8606 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8607 || (TYPE_PRECISION (type)
8608 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8609 change = 1;
8610 else if (TYPE_PRECISION (TREE_TYPE (and1))
8611 <= HOST_BITS_PER_WIDE_INT
8612 && host_integerp (and1, 1))
8614 unsigned HOST_WIDE_INT cst;
8616 cst = tree_low_cst (and1, 1);
8617 cst &= (HOST_WIDE_INT) -1
8618 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8619 change = (cst == 0);
8620 #ifdef LOAD_EXTEND_OP
8621 if (change
8622 && !flag_syntax_only
8623 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8624 == ZERO_EXTEND))
8626 tree uns = unsigned_type_for (TREE_TYPE (and0));
8627 and0 = fold_convert_loc (loc, uns, and0);
8628 and1 = fold_convert_loc (loc, uns, and1);
8630 #endif
8632 if (change)
8634 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8635 TREE_INT_CST_HIGH (and1), 0,
8636 TREE_OVERFLOW (and1));
8637 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8638 fold_convert_loc (loc, type, and0), tem);
8642 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8643 when one of the new casts will fold away. Conservatively we assume
8644 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8645 if (POINTER_TYPE_P (type)
8646 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8647 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8648 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8649 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8651 tree arg00 = TREE_OPERAND (arg0, 0);
8652 tree arg01 = TREE_OPERAND (arg0, 1);
8654 return fold_build2_loc (loc,
8655 TREE_CODE (arg0), type,
8656 fold_convert_loc (loc, type, arg00),
8657 fold_convert_loc (loc, sizetype, arg01));
8660 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8661 of the same precision, and X is an integer type not narrower than
8662 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8663 if (INTEGRAL_TYPE_P (type)
8664 && TREE_CODE (op0) == BIT_NOT_EXPR
8665 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8666 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8667 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8669 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8670 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8671 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8672 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8673 fold_convert_loc (loc, type, tem));
8676 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8677 type of X and Y (integer types only). */
8678 if (INTEGRAL_TYPE_P (type)
8679 && TREE_CODE (op0) == MULT_EXPR
8680 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8681 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8683 /* Be careful not to introduce new overflows. */
8684 tree mult_type;
8685 if (TYPE_OVERFLOW_WRAPS (type))
8686 mult_type = type;
8687 else
8688 mult_type = unsigned_type_for (type);
8690 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8692 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8693 fold_convert_loc (loc, mult_type,
8694 TREE_OPERAND (op0, 0)),
8695 fold_convert_loc (loc, mult_type,
8696 TREE_OPERAND (op0, 1)));
8697 return fold_convert_loc (loc, type, tem);
8701 tem = fold_convert_const (code, type, op0);
8702 return tem ? tem : NULL_TREE;
8704 case ADDR_SPACE_CONVERT_EXPR:
8705 if (integer_zerop (arg0))
8706 return fold_convert_const (code, type, arg0);
8707 return NULL_TREE;
8709 case FIXED_CONVERT_EXPR:
8710 tem = fold_convert_const (code, type, arg0);
8711 return tem ? tem : NULL_TREE;
8713 case VIEW_CONVERT_EXPR:
8714 if (TREE_TYPE (op0) == type)
8715 return op0;
8716 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8717 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8718 type, TREE_OPERAND (op0, 0));
8720 /* For integral conversions with the same precision or pointer
8721 conversions use a NOP_EXPR instead. */
8722 if ((INTEGRAL_TYPE_P (type)
8723 || POINTER_TYPE_P (type))
8724 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8725 || POINTER_TYPE_P (TREE_TYPE (op0)))
8726 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8727 return fold_convert_loc (loc, type, op0);
8729 /* Strip inner integral conversions that do not change the precision. */
8730 if (CONVERT_EXPR_P (op0)
8731 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8732 || POINTER_TYPE_P (TREE_TYPE (op0)))
8733 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8734 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8735 && (TYPE_PRECISION (TREE_TYPE (op0))
8736 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8737 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8738 type, TREE_OPERAND (op0, 0));
8740 return fold_view_convert_expr (type, op0);
8742 case NEGATE_EXPR:
8743 tem = fold_negate_expr (loc, arg0);
8744 if (tem)
8745 return fold_convert_loc (loc, type, tem);
8746 return NULL_TREE;
8748 case ABS_EXPR:
8749 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8750 return fold_abs_const (arg0, type);
8751 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8752 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8753 /* Convert fabs((double)float) into (double)fabsf(float). */
8754 else if (TREE_CODE (arg0) == NOP_EXPR
8755 && TREE_CODE (type) == REAL_TYPE)
8757 tree targ0 = strip_float_extensions (arg0);
8758 if (targ0 != arg0)
8759 return fold_convert_loc (loc, type,
8760 fold_build1_loc (loc, ABS_EXPR,
8761 TREE_TYPE (targ0),
8762 targ0));
8764 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8765 else if (TREE_CODE (arg0) == ABS_EXPR)
8766 return arg0;
8767 else if (tree_expr_nonnegative_p (arg0))
8768 return arg0;
8770 /* Strip sign ops from argument. */
8771 if (TREE_CODE (type) == REAL_TYPE)
8773 tem = fold_strip_sign_ops (arg0);
8774 if (tem)
8775 return fold_build1_loc (loc, ABS_EXPR, type,
8776 fold_convert_loc (loc, type, tem));
8778 return NULL_TREE;
8780 case CONJ_EXPR:
8781 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8782 return fold_convert_loc (loc, type, arg0);
8783 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8785 tree itype = TREE_TYPE (type);
8786 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8787 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8788 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8789 negate_expr (ipart));
8791 if (TREE_CODE (arg0) == COMPLEX_CST)
8793 tree itype = TREE_TYPE (type);
8794 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8795 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8796 return build_complex (type, rpart, negate_expr (ipart));
8798 if (TREE_CODE (arg0) == CONJ_EXPR)
8799 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8800 return NULL_TREE;
8802 case BIT_NOT_EXPR:
8803 if (TREE_CODE (arg0) == INTEGER_CST)
8804 return fold_not_const (arg0, type);
8805 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8806 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8807 /* Convert ~ (-A) to A - 1. */
8808 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8809 return fold_build2_loc (loc, MINUS_EXPR, type,
8810 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8811 build_int_cst (type, 1));
8812 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8813 else if (INTEGRAL_TYPE_P (type)
8814 && ((TREE_CODE (arg0) == MINUS_EXPR
8815 && integer_onep (TREE_OPERAND (arg0, 1)))
8816 || (TREE_CODE (arg0) == PLUS_EXPR
8817 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8818 return fold_build1_loc (loc, NEGATE_EXPR, type,
8819 fold_convert_loc (loc, type,
8820 TREE_OPERAND (arg0, 0)));
8821 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8822 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8823 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8824 fold_convert_loc (loc, type,
8825 TREE_OPERAND (arg0, 0)))))
8826 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8827 fold_convert_loc (loc, type,
8828 TREE_OPERAND (arg0, 1)));
8829 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8830 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8831 fold_convert_loc (loc, type,
8832 TREE_OPERAND (arg0, 1)))))
8833 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8834 fold_convert_loc (loc, type,
8835 TREE_OPERAND (arg0, 0)), tem);
8836 /* Perform BIT_NOT_EXPR on each element individually. */
8837 else if (TREE_CODE (arg0) == VECTOR_CST)
8839 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8840 int count = TYPE_VECTOR_SUBPARTS (type), i;
8842 for (i = 0; i < count; i++)
8844 if (elements)
8846 elem = TREE_VALUE (elements);
8847 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8848 if (elem == NULL_TREE)
8849 break;
8850 elements = TREE_CHAIN (elements);
8852 else
8853 elem = build_int_cst (TREE_TYPE (type), -1);
8854 list = tree_cons (NULL_TREE, elem, list);
8856 if (i == count)
8857 return build_vector (type, nreverse (list));
8860 return NULL_TREE;
8862 case TRUTH_NOT_EXPR:
8863 /* The argument to invert_truthvalue must have Boolean type. */
8864 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8865 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8867 /* Note that the operand of this must be an int
8868 and its values must be 0 or 1.
8869 ("true" is a fixed value perhaps depending on the language,
8870 but we don't handle values other than 1 correctly yet.) */
8871 tem = fold_truth_not_expr (loc, arg0);
8872 if (!tem)
8873 return NULL_TREE;
8874 return fold_convert_loc (loc, type, tem);
8876 case REALPART_EXPR:
8877 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8878 return fold_convert_loc (loc, type, arg0);
8879 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8880 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8881 TREE_OPERAND (arg0, 1));
8882 if (TREE_CODE (arg0) == COMPLEX_CST)
8883 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8884 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8886 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8887 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8888 fold_build1_loc (loc, REALPART_EXPR, itype,
8889 TREE_OPERAND (arg0, 0)),
8890 fold_build1_loc (loc, REALPART_EXPR, itype,
8891 TREE_OPERAND (arg0, 1)));
8892 return fold_convert_loc (loc, type, tem);
8894 if (TREE_CODE (arg0) == CONJ_EXPR)
8896 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8897 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8898 TREE_OPERAND (arg0, 0));
8899 return fold_convert_loc (loc, type, tem);
8901 if (TREE_CODE (arg0) == CALL_EXPR)
8903 tree fn = get_callee_fndecl (arg0);
8904 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8905 switch (DECL_FUNCTION_CODE (fn))
8907 CASE_FLT_FN (BUILT_IN_CEXPI):
8908 fn = mathfn_built_in (type, BUILT_IN_COS);
8909 if (fn)
8910 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8911 break;
8913 default:
8914 break;
8917 return NULL_TREE;
8919 case IMAGPART_EXPR:
8920 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8921 return fold_convert_loc (loc, type, integer_zero_node);
8922 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8923 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8924 TREE_OPERAND (arg0, 0));
8925 if (TREE_CODE (arg0) == COMPLEX_CST)
8926 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8927 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8929 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8930 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8931 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8932 TREE_OPERAND (arg0, 0)),
8933 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8934 TREE_OPERAND (arg0, 1)));
8935 return fold_convert_loc (loc, type, tem);
8937 if (TREE_CODE (arg0) == CONJ_EXPR)
8939 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8940 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8941 return fold_convert_loc (loc, type, negate_expr (tem));
8943 if (TREE_CODE (arg0) == CALL_EXPR)
8945 tree fn = get_callee_fndecl (arg0);
8946 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8947 switch (DECL_FUNCTION_CODE (fn))
8949 CASE_FLT_FN (BUILT_IN_CEXPI):
8950 fn = mathfn_built_in (type, BUILT_IN_SIN);
8951 if (fn)
8952 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8953 break;
8955 default:
8956 break;
8959 return NULL_TREE;
8961 case INDIRECT_REF:
8962 /* Fold *&X to X if X is an lvalue. */
8963 if (TREE_CODE (op0) == ADDR_EXPR)
8965 tree op00 = TREE_OPERAND (op0, 0);
8966 if ((TREE_CODE (op00) == VAR_DECL
8967 || TREE_CODE (op00) == PARM_DECL
8968 || TREE_CODE (op00) == RESULT_DECL)
8969 && !TREE_READONLY (op00))
8970 return op00;
8972 return NULL_TREE;
8974 default:
8975 return NULL_TREE;
8976 } /* switch (code) */
8980 /* If the operation was a conversion do _not_ mark a resulting constant
8981 with TREE_OVERFLOW if the original constant was not. These conversions
8982 have implementation defined behavior and retaining the TREE_OVERFLOW
8983 flag here would confuse later passes such as VRP. */
8984 tree
8985 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8986 tree type, tree op0)
8988 tree res = fold_unary_loc (loc, code, type, op0);
8989 if (res
8990 && TREE_CODE (res) == INTEGER_CST
8991 && TREE_CODE (op0) == INTEGER_CST
8992 && CONVERT_EXPR_CODE_P (code))
8993 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8995 return res;
8998 /* Fold a binary expression of code CODE and type TYPE with operands
8999 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
9000 Return the folded expression if folding is successful. Otherwise,
9001 return NULL_TREE. */
9003 static tree
9004 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
9006 enum tree_code compl_code;
9008 if (code == MIN_EXPR)
9009 compl_code = MAX_EXPR;
9010 else if (code == MAX_EXPR)
9011 compl_code = MIN_EXPR;
9012 else
9013 gcc_unreachable ();
9015 /* MIN (MAX (a, b), b) == b. */
9016 if (TREE_CODE (op0) == compl_code
9017 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
9018 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
9020 /* MIN (MAX (b, a), b) == b. */
9021 if (TREE_CODE (op0) == compl_code
9022 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
9023 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
9024 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
9026 /* MIN (a, MAX (a, b)) == a. */
9027 if (TREE_CODE (op1) == compl_code
9028 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
9029 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
9030 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
9032 /* MIN (a, MAX (b, a)) == a. */
9033 if (TREE_CODE (op1) == compl_code
9034 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
9035 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
9036 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
9038 return NULL_TREE;
9041 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9042 by changing CODE to reduce the magnitude of constants involved in
9043 ARG0 of the comparison.
9044 Returns a canonicalized comparison tree if a simplification was
9045 possible, otherwise returns NULL_TREE.
9046 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9047 valid if signed overflow is undefined. */
9049 static tree
9050 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9051 tree arg0, tree arg1,
9052 bool *strict_overflow_p)
9054 enum tree_code code0 = TREE_CODE (arg0);
9055 tree t, cst0 = NULL_TREE;
9056 int sgn0;
9057 bool swap = false;
9059 /* Match A +- CST code arg1 and CST code arg1. We can change the
9060 first form only if overflow is undefined. */
9061 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9062 /* In principle pointers also have undefined overflow behavior,
9063 but that causes problems elsewhere. */
9064 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9065 && (code0 == MINUS_EXPR
9066 || code0 == PLUS_EXPR)
9067 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9068 || code0 == INTEGER_CST))
9069 return NULL_TREE;
9071 /* Identify the constant in arg0 and its sign. */
9072 if (code0 == INTEGER_CST)
9073 cst0 = arg0;
9074 else
9075 cst0 = TREE_OPERAND (arg0, 1);
9076 sgn0 = tree_int_cst_sgn (cst0);
9078 /* Overflowed constants and zero will cause problems. */
9079 if (integer_zerop (cst0)
9080 || TREE_OVERFLOW (cst0))
9081 return NULL_TREE;
9083 /* See if we can reduce the magnitude of the constant in
9084 arg0 by changing the comparison code. */
9085 if (code0 == INTEGER_CST)
9087 /* CST <= arg1 -> CST-1 < arg1. */
9088 if (code == LE_EXPR && sgn0 == 1)
9089 code = LT_EXPR;
9090 /* -CST < arg1 -> -CST-1 <= arg1. */
9091 else if (code == LT_EXPR && sgn0 == -1)
9092 code = LE_EXPR;
9093 /* CST > arg1 -> CST-1 >= arg1. */
9094 else if (code == GT_EXPR && sgn0 == 1)
9095 code = GE_EXPR;
9096 /* -CST >= arg1 -> -CST-1 > arg1. */
9097 else if (code == GE_EXPR && sgn0 == -1)
9098 code = GT_EXPR;
9099 else
9100 return NULL_TREE;
9101 /* arg1 code' CST' might be more canonical. */
9102 swap = true;
9104 else
9106 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9107 if (code == LT_EXPR
9108 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9109 code = LE_EXPR;
9110 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9111 else if (code == GT_EXPR
9112 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9113 code = GE_EXPR;
9114 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9115 else if (code == LE_EXPR
9116 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9117 code = LT_EXPR;
9118 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9119 else if (code == GE_EXPR
9120 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9121 code = GT_EXPR;
9122 else
9123 return NULL_TREE;
9124 *strict_overflow_p = true;
9127 /* Now build the constant reduced in magnitude. But not if that
9128 would produce one outside of its types range. */
9129 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9130 && ((sgn0 == 1
9131 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9132 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9133 || (sgn0 == -1
9134 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9135 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9136 /* We cannot swap the comparison here as that would cause us to
9137 endlessly recurse. */
9138 return NULL_TREE;
9140 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9141 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
9142 if (code0 != INTEGER_CST)
9143 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9145 /* If swapping might yield to a more canonical form, do so. */
9146 if (swap)
9147 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
9148 else
9149 return fold_build2_loc (loc, code, type, t, arg1);
9152 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9153 overflow further. Try to decrease the magnitude of constants involved
9154 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9155 and put sole constants at the second argument position.
9156 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9158 static tree
9159 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9160 tree arg0, tree arg1)
9162 tree t;
9163 bool strict_overflow_p;
9164 const char * const warnmsg = G_("assuming signed overflow does not occur "
9165 "when reducing constant in comparison");
9167 /* Try canonicalization by simplifying arg0. */
9168 strict_overflow_p = false;
9169 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9170 &strict_overflow_p);
9171 if (t)
9173 if (strict_overflow_p)
9174 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9175 return t;
9178 /* Try canonicalization by simplifying arg1 using the swapped
9179 comparison. */
9180 code = swap_tree_comparison (code);
9181 strict_overflow_p = false;
9182 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9183 &strict_overflow_p);
9184 if (t && strict_overflow_p)
9185 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9186 return t;
9189 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9190 space. This is used to avoid issuing overflow warnings for
9191 expressions like &p->x which can not wrap. */
9193 static bool
9194 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
9196 unsigned HOST_WIDE_INT offset_low, total_low;
9197 HOST_WIDE_INT size, offset_high, total_high;
9199 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9200 return true;
9202 if (bitpos < 0)
9203 return true;
9205 if (offset == NULL_TREE)
9207 offset_low = 0;
9208 offset_high = 0;
9210 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
9211 return true;
9212 else
9214 offset_low = TREE_INT_CST_LOW (offset);
9215 offset_high = TREE_INT_CST_HIGH (offset);
9218 if (add_double_with_sign (offset_low, offset_high,
9219 bitpos / BITS_PER_UNIT, 0,
9220 &total_low, &total_high,
9221 true))
9222 return true;
9224 if (total_high != 0)
9225 return true;
9227 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
9228 if (size <= 0)
9229 return true;
9231 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9232 array. */
9233 if (TREE_CODE (base) == ADDR_EXPR)
9235 HOST_WIDE_INT base_size;
9237 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
9238 if (base_size > 0 && size < base_size)
9239 size = base_size;
9242 return total_low > (unsigned HOST_WIDE_INT) size;
9245 /* Subroutine of fold_binary. This routine performs all of the
9246 transformations that are common to the equality/inequality
9247 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9248 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9249 fold_binary should call fold_binary. Fold a comparison with
9250 tree code CODE and type TYPE with operands OP0 and OP1. Return
9251 the folded comparison or NULL_TREE. */
9253 static tree
9254 fold_comparison (location_t loc, enum tree_code code, tree type,
9255 tree op0, tree op1)
9257 tree arg0, arg1, tem;
9259 arg0 = op0;
9260 arg1 = op1;
9262 STRIP_SIGN_NOPS (arg0);
9263 STRIP_SIGN_NOPS (arg1);
9265 tem = fold_relational_const (code, type, arg0, arg1);
9266 if (tem != NULL_TREE)
9267 return tem;
9269 /* If one arg is a real or integer constant, put it last. */
9270 if (tree_swap_operands_p (arg0, arg1, true))
9271 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9273 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9274 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9275 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9276 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9277 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
9278 && (TREE_CODE (arg1) == INTEGER_CST
9279 && !TREE_OVERFLOW (arg1)))
9281 tree const1 = TREE_OPERAND (arg0, 1);
9282 tree const2 = arg1;
9283 tree variable = TREE_OPERAND (arg0, 0);
9284 tree lhs;
9285 int lhs_add;
9286 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9288 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9289 TREE_TYPE (arg1), const2, const1);
9291 /* If the constant operation overflowed this can be
9292 simplified as a comparison against INT_MAX/INT_MIN. */
9293 if (TREE_CODE (lhs) == INTEGER_CST
9294 && TREE_OVERFLOW (lhs))
9296 int const1_sgn = tree_int_cst_sgn (const1);
9297 enum tree_code code2 = code;
9299 /* Get the sign of the constant on the lhs if the
9300 operation were VARIABLE + CONST1. */
9301 if (TREE_CODE (arg0) == MINUS_EXPR)
9302 const1_sgn = -const1_sgn;
9304 /* The sign of the constant determines if we overflowed
9305 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9306 Canonicalize to the INT_MIN overflow by swapping the comparison
9307 if necessary. */
9308 if (const1_sgn == -1)
9309 code2 = swap_tree_comparison (code);
9311 /* We now can look at the canonicalized case
9312 VARIABLE + 1 CODE2 INT_MIN
9313 and decide on the result. */
9314 if (code2 == LT_EXPR
9315 || code2 == LE_EXPR
9316 || code2 == EQ_EXPR)
9317 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9318 else if (code2 == NE_EXPR
9319 || code2 == GE_EXPR
9320 || code2 == GT_EXPR)
9321 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9324 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9325 && (TREE_CODE (lhs) != INTEGER_CST
9326 || !TREE_OVERFLOW (lhs)))
9328 fold_overflow_warning (("assuming signed overflow does not occur "
9329 "when changing X +- C1 cmp C2 to "
9330 "X cmp C1 +- C2"),
9331 WARN_STRICT_OVERFLOW_COMPARISON);
9332 return fold_build2_loc (loc, code, type, variable, lhs);
9336 /* For comparisons of pointers we can decompose it to a compile time
9337 comparison of the base objects and the offsets into the object.
9338 This requires at least one operand being an ADDR_EXPR or a
9339 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9340 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9341 && (TREE_CODE (arg0) == ADDR_EXPR
9342 || TREE_CODE (arg1) == ADDR_EXPR
9343 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9344 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9346 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9347 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9348 enum machine_mode mode;
9349 int volatilep, unsignedp;
9350 bool indirect_base0 = false, indirect_base1 = false;
9352 /* Get base and offset for the access. Strip ADDR_EXPR for
9353 get_inner_reference, but put it back by stripping INDIRECT_REF
9354 off the base object if possible. indirect_baseN will be true
9355 if baseN is not an address but refers to the object itself. */
9356 base0 = arg0;
9357 if (TREE_CODE (arg0) == ADDR_EXPR)
9359 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9360 &bitsize, &bitpos0, &offset0, &mode,
9361 &unsignedp, &volatilep, false);
9362 if (TREE_CODE (base0) == INDIRECT_REF)
9363 base0 = TREE_OPERAND (base0, 0);
9364 else
9365 indirect_base0 = true;
9367 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9369 base0 = TREE_OPERAND (arg0, 0);
9370 offset0 = TREE_OPERAND (arg0, 1);
9373 base1 = arg1;
9374 if (TREE_CODE (arg1) == ADDR_EXPR)
9376 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9377 &bitsize, &bitpos1, &offset1, &mode,
9378 &unsignedp, &volatilep, false);
9379 if (TREE_CODE (base1) == INDIRECT_REF)
9380 base1 = TREE_OPERAND (base1, 0);
9381 else
9382 indirect_base1 = true;
9384 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9386 base1 = TREE_OPERAND (arg1, 0);
9387 offset1 = TREE_OPERAND (arg1, 1);
9390 /* If we have equivalent bases we might be able to simplify. */
9391 if (indirect_base0 == indirect_base1
9392 && operand_equal_p (base0, base1, 0))
9394 /* We can fold this expression to a constant if the non-constant
9395 offset parts are equal. */
9396 if ((offset0 == offset1
9397 || (offset0 && offset1
9398 && operand_equal_p (offset0, offset1, 0)))
9399 && (code == EQ_EXPR
9400 || code == NE_EXPR
9401 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9404 if (code != EQ_EXPR
9405 && code != NE_EXPR
9406 && bitpos0 != bitpos1
9407 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9408 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9409 fold_overflow_warning (("assuming pointer wraparound does not "
9410 "occur when comparing P +- C1 with "
9411 "P +- C2"),
9412 WARN_STRICT_OVERFLOW_CONDITIONAL);
9414 switch (code)
9416 case EQ_EXPR:
9417 return constant_boolean_node (bitpos0 == bitpos1, type);
9418 case NE_EXPR:
9419 return constant_boolean_node (bitpos0 != bitpos1, type);
9420 case LT_EXPR:
9421 return constant_boolean_node (bitpos0 < bitpos1, type);
9422 case LE_EXPR:
9423 return constant_boolean_node (bitpos0 <= bitpos1, type);
9424 case GE_EXPR:
9425 return constant_boolean_node (bitpos0 >= bitpos1, type);
9426 case GT_EXPR:
9427 return constant_boolean_node (bitpos0 > bitpos1, type);
9428 default:;
9431 /* We can simplify the comparison to a comparison of the variable
9432 offset parts if the constant offset parts are equal.
9433 Be careful to use signed size type here because otherwise we
9434 mess with array offsets in the wrong way. This is possible
9435 because pointer arithmetic is restricted to retain within an
9436 object and overflow on pointer differences is undefined as of
9437 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9438 else if (bitpos0 == bitpos1
9439 && ((code == EQ_EXPR || code == NE_EXPR)
9440 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9442 tree signed_size_type_node;
9443 signed_size_type_node = signed_type_for (size_type_node);
9445 /* By converting to signed size type we cover middle-end pointer
9446 arithmetic which operates on unsigned pointer types of size
9447 type size and ARRAY_REF offsets which are properly sign or
9448 zero extended from their type in case it is narrower than
9449 size type. */
9450 if (offset0 == NULL_TREE)
9451 offset0 = build_int_cst (signed_size_type_node, 0);
9452 else
9453 offset0 = fold_convert_loc (loc, signed_size_type_node,
9454 offset0);
9455 if (offset1 == NULL_TREE)
9456 offset1 = build_int_cst (signed_size_type_node, 0);
9457 else
9458 offset1 = fold_convert_loc (loc, signed_size_type_node,
9459 offset1);
9461 if (code != EQ_EXPR
9462 && code != NE_EXPR
9463 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9464 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9465 fold_overflow_warning (("assuming pointer wraparound does not "
9466 "occur when comparing P +- C1 with "
9467 "P +- C2"),
9468 WARN_STRICT_OVERFLOW_COMPARISON);
9470 return fold_build2_loc (loc, code, type, offset0, offset1);
9473 /* For non-equal bases we can simplify if they are addresses
9474 of local binding decls or constants. */
9475 else if (indirect_base0 && indirect_base1
9476 /* We know that !operand_equal_p (base0, base1, 0)
9477 because the if condition was false. But make
9478 sure two decls are not the same. */
9479 && base0 != base1
9480 && TREE_CODE (arg0) == ADDR_EXPR
9481 && TREE_CODE (arg1) == ADDR_EXPR
9482 && (((TREE_CODE (base0) == VAR_DECL
9483 || TREE_CODE (base0) == PARM_DECL)
9484 && (targetm.binds_local_p (base0)
9485 || CONSTANT_CLASS_P (base1)))
9486 || CONSTANT_CLASS_P (base0))
9487 && (((TREE_CODE (base1) == VAR_DECL
9488 || TREE_CODE (base1) == PARM_DECL)
9489 && (targetm.binds_local_p (base1)
9490 || CONSTANT_CLASS_P (base0)))
9491 || CONSTANT_CLASS_P (base1)))
9493 if (code == EQ_EXPR)
9494 return omit_two_operands_loc (loc, type, boolean_false_node,
9495 arg0, arg1);
9496 else if (code == NE_EXPR)
9497 return omit_two_operands_loc (loc, type, boolean_true_node,
9498 arg0, arg1);
9500 /* For equal offsets we can simplify to a comparison of the
9501 base addresses. */
9502 else if (bitpos0 == bitpos1
9503 && (indirect_base0
9504 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9505 && (indirect_base1
9506 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9507 && ((offset0 == offset1)
9508 || (offset0 && offset1
9509 && operand_equal_p (offset0, offset1, 0))))
9511 if (indirect_base0)
9512 base0 = build_fold_addr_expr_loc (loc, base0);
9513 if (indirect_base1)
9514 base1 = build_fold_addr_expr_loc (loc, base1);
9515 return fold_build2_loc (loc, code, type, base0, base1);
9519 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9520 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9521 the resulting offset is smaller in absolute value than the
9522 original one. */
9523 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9524 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9525 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9526 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9527 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9528 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9529 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9531 tree const1 = TREE_OPERAND (arg0, 1);
9532 tree const2 = TREE_OPERAND (arg1, 1);
9533 tree variable1 = TREE_OPERAND (arg0, 0);
9534 tree variable2 = TREE_OPERAND (arg1, 0);
9535 tree cst;
9536 const char * const warnmsg = G_("assuming signed overflow does not "
9537 "occur when combining constants around "
9538 "a comparison");
9540 /* Put the constant on the side where it doesn't overflow and is
9541 of lower absolute value than before. */
9542 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9543 ? MINUS_EXPR : PLUS_EXPR,
9544 const2, const1, 0);
9545 if (!TREE_OVERFLOW (cst)
9546 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9548 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9549 return fold_build2_loc (loc, code, type,
9550 variable1,
9551 fold_build2_loc (loc,
9552 TREE_CODE (arg1), TREE_TYPE (arg1),
9553 variable2, cst));
9556 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9557 ? MINUS_EXPR : PLUS_EXPR,
9558 const1, const2, 0);
9559 if (!TREE_OVERFLOW (cst)
9560 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9562 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9563 return fold_build2_loc (loc, code, type,
9564 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9565 variable1, cst),
9566 variable2);
9570 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9571 signed arithmetic case. That form is created by the compiler
9572 often enough for folding it to be of value. One example is in
9573 computing loop trip counts after Operator Strength Reduction. */
9574 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9575 && TREE_CODE (arg0) == MULT_EXPR
9576 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9577 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9578 && integer_zerop (arg1))
9580 tree const1 = TREE_OPERAND (arg0, 1);
9581 tree const2 = arg1; /* zero */
9582 tree variable1 = TREE_OPERAND (arg0, 0);
9583 enum tree_code cmp_code = code;
9585 /* Handle unfolded multiplication by zero. */
9586 if (integer_zerop (const1))
9587 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9589 fold_overflow_warning (("assuming signed overflow does not occur when "
9590 "eliminating multiplication in comparison "
9591 "with zero"),
9592 WARN_STRICT_OVERFLOW_COMPARISON);
9594 /* If const1 is negative we swap the sense of the comparison. */
9595 if (tree_int_cst_sgn (const1) < 0)
9596 cmp_code = swap_tree_comparison (cmp_code);
9598 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9601 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
9602 if (tem)
9603 return tem;
9605 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9607 tree targ0 = strip_float_extensions (arg0);
9608 tree targ1 = strip_float_extensions (arg1);
9609 tree newtype = TREE_TYPE (targ0);
9611 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9612 newtype = TREE_TYPE (targ1);
9614 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9615 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9616 return fold_build2_loc (loc, code, type,
9617 fold_convert_loc (loc, newtype, targ0),
9618 fold_convert_loc (loc, newtype, targ1));
9620 /* (-a) CMP (-b) -> b CMP a */
9621 if (TREE_CODE (arg0) == NEGATE_EXPR
9622 && TREE_CODE (arg1) == NEGATE_EXPR)
9623 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9624 TREE_OPERAND (arg0, 0));
9626 if (TREE_CODE (arg1) == REAL_CST)
9628 REAL_VALUE_TYPE cst;
9629 cst = TREE_REAL_CST (arg1);
9631 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9632 if (TREE_CODE (arg0) == NEGATE_EXPR)
9633 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9634 TREE_OPERAND (arg0, 0),
9635 build_real (TREE_TYPE (arg1),
9636 REAL_VALUE_NEGATE (cst)));
9638 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9639 /* a CMP (-0) -> a CMP 0 */
9640 if (REAL_VALUE_MINUS_ZERO (cst))
9641 return fold_build2_loc (loc, code, type, arg0,
9642 build_real (TREE_TYPE (arg1), dconst0));
9644 /* x != NaN is always true, other ops are always false. */
9645 if (REAL_VALUE_ISNAN (cst)
9646 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9648 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9649 return omit_one_operand_loc (loc, type, tem, arg0);
9652 /* Fold comparisons against infinity. */
9653 if (REAL_VALUE_ISINF (cst)
9654 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9656 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9657 if (tem != NULL_TREE)
9658 return tem;
9662 /* If this is a comparison of a real constant with a PLUS_EXPR
9663 or a MINUS_EXPR of a real constant, we can convert it into a
9664 comparison with a revised real constant as long as no overflow
9665 occurs when unsafe_math_optimizations are enabled. */
9666 if (flag_unsafe_math_optimizations
9667 && TREE_CODE (arg1) == REAL_CST
9668 && (TREE_CODE (arg0) == PLUS_EXPR
9669 || TREE_CODE (arg0) == MINUS_EXPR)
9670 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9671 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9672 ? MINUS_EXPR : PLUS_EXPR,
9673 arg1, TREE_OPERAND (arg0, 1), 0))
9674 && !TREE_OVERFLOW (tem))
9675 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9677 /* Likewise, we can simplify a comparison of a real constant with
9678 a MINUS_EXPR whose first operand is also a real constant, i.e.
9679 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9680 floating-point types only if -fassociative-math is set. */
9681 if (flag_associative_math
9682 && TREE_CODE (arg1) == REAL_CST
9683 && TREE_CODE (arg0) == MINUS_EXPR
9684 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9685 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9686 arg1, 0))
9687 && !TREE_OVERFLOW (tem))
9688 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9689 TREE_OPERAND (arg0, 1), tem);
9691 /* Fold comparisons against built-in math functions. */
9692 if (TREE_CODE (arg1) == REAL_CST
9693 && flag_unsafe_math_optimizations
9694 && ! flag_errno_math)
9696 enum built_in_function fcode = builtin_mathfn_code (arg0);
9698 if (fcode != END_BUILTINS)
9700 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9701 if (tem != NULL_TREE)
9702 return tem;
9707 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9708 && CONVERT_EXPR_P (arg0))
9710 /* If we are widening one operand of an integer comparison,
9711 see if the other operand is similarly being widened. Perhaps we
9712 can do the comparison in the narrower type. */
9713 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9714 if (tem)
9715 return tem;
9717 /* Or if we are changing signedness. */
9718 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9719 if (tem)
9720 return tem;
9723 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9724 constant, we can simplify it. */
9725 if (TREE_CODE (arg1) == INTEGER_CST
9726 && (TREE_CODE (arg0) == MIN_EXPR
9727 || TREE_CODE (arg0) == MAX_EXPR)
9728 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9730 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9731 if (tem)
9732 return tem;
9735 /* Simplify comparison of something with itself. (For IEEE
9736 floating-point, we can only do some of these simplifications.) */
9737 if (operand_equal_p (arg0, arg1, 0))
9739 switch (code)
9741 case EQ_EXPR:
9742 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9743 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9744 return constant_boolean_node (1, type);
9745 break;
9747 case GE_EXPR:
9748 case LE_EXPR:
9749 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9750 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9751 return constant_boolean_node (1, type);
9752 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9754 case NE_EXPR:
9755 /* For NE, we can only do this simplification if integer
9756 or we don't honor IEEE floating point NaNs. */
9757 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9758 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9759 break;
9760 /* ... fall through ... */
9761 case GT_EXPR:
9762 case LT_EXPR:
9763 return constant_boolean_node (0, type);
9764 default:
9765 gcc_unreachable ();
9769 /* If we are comparing an expression that just has comparisons
9770 of two integer values, arithmetic expressions of those comparisons,
9771 and constants, we can simplify it. There are only three cases
9772 to check: the two values can either be equal, the first can be
9773 greater, or the second can be greater. Fold the expression for
9774 those three values. Since each value must be 0 or 1, we have
9775 eight possibilities, each of which corresponds to the constant 0
9776 or 1 or one of the six possible comparisons.
9778 This handles common cases like (a > b) == 0 but also handles
9779 expressions like ((x > y) - (y > x)) > 0, which supposedly
9780 occur in macroized code. */
9782 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9784 tree cval1 = 0, cval2 = 0;
9785 int save_p = 0;
9787 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9788 /* Don't handle degenerate cases here; they should already
9789 have been handled anyway. */
9790 && cval1 != 0 && cval2 != 0
9791 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9792 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9793 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9794 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9795 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9796 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9797 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9799 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9800 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9802 /* We can't just pass T to eval_subst in case cval1 or cval2
9803 was the same as ARG1. */
9805 tree high_result
9806 = fold_build2_loc (loc, code, type,
9807 eval_subst (loc, arg0, cval1, maxval,
9808 cval2, minval),
9809 arg1);
9810 tree equal_result
9811 = fold_build2_loc (loc, code, type,
9812 eval_subst (loc, arg0, cval1, maxval,
9813 cval2, maxval),
9814 arg1);
9815 tree low_result
9816 = fold_build2_loc (loc, code, type,
9817 eval_subst (loc, arg0, cval1, minval,
9818 cval2, maxval),
9819 arg1);
9821 /* All three of these results should be 0 or 1. Confirm they are.
9822 Then use those values to select the proper code to use. */
9824 if (TREE_CODE (high_result) == INTEGER_CST
9825 && TREE_CODE (equal_result) == INTEGER_CST
9826 && TREE_CODE (low_result) == INTEGER_CST)
9828 /* Make a 3-bit mask with the high-order bit being the
9829 value for `>', the next for '=', and the low for '<'. */
9830 switch ((integer_onep (high_result) * 4)
9831 + (integer_onep (equal_result) * 2)
9832 + integer_onep (low_result))
9834 case 0:
9835 /* Always false. */
9836 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9837 case 1:
9838 code = LT_EXPR;
9839 break;
9840 case 2:
9841 code = EQ_EXPR;
9842 break;
9843 case 3:
9844 code = LE_EXPR;
9845 break;
9846 case 4:
9847 code = GT_EXPR;
9848 break;
9849 case 5:
9850 code = NE_EXPR;
9851 break;
9852 case 6:
9853 code = GE_EXPR;
9854 break;
9855 case 7:
9856 /* Always true. */
9857 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9860 if (save_p)
9862 tem = save_expr (build2 (code, type, cval1, cval2));
9863 SET_EXPR_LOCATION (tem, loc);
9864 return tem;
9866 return fold_build2_loc (loc, code, type, cval1, cval2);
9871 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9872 into a single range test. */
9873 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9874 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9875 && TREE_CODE (arg1) == INTEGER_CST
9876 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9877 && !integer_zerop (TREE_OPERAND (arg0, 1))
9878 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9879 && !TREE_OVERFLOW (arg1))
9881 tem = fold_div_compare (loc, code, type, arg0, arg1);
9882 if (tem != NULL_TREE)
9883 return tem;
9886 /* Fold ~X op ~Y as Y op X. */
9887 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9888 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9890 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9891 return fold_build2_loc (loc, code, type,
9892 fold_convert_loc (loc, cmp_type,
9893 TREE_OPERAND (arg1, 0)),
9894 TREE_OPERAND (arg0, 0));
9897 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9898 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9899 && TREE_CODE (arg1) == INTEGER_CST)
9901 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9902 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9903 TREE_OPERAND (arg0, 0),
9904 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9905 fold_convert_loc (loc, cmp_type, arg1)));
9908 return NULL_TREE;
9912 /* Subroutine of fold_binary. Optimize complex multiplications of the
9913 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9914 argument EXPR represents the expression "z" of type TYPE. */
9916 static tree
9917 fold_mult_zconjz (location_t loc, tree type, tree expr)
9919 tree itype = TREE_TYPE (type);
9920 tree rpart, ipart, tem;
9922 if (TREE_CODE (expr) == COMPLEX_EXPR)
9924 rpart = TREE_OPERAND (expr, 0);
9925 ipart = TREE_OPERAND (expr, 1);
9927 else if (TREE_CODE (expr) == COMPLEX_CST)
9929 rpart = TREE_REALPART (expr);
9930 ipart = TREE_IMAGPART (expr);
9932 else
9934 expr = save_expr (expr);
9935 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9936 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9939 rpart = save_expr (rpart);
9940 ipart = save_expr (ipart);
9941 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9942 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9943 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9944 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9945 fold_convert_loc (loc, itype, integer_zero_node));
9949 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9950 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9951 guarantees that P and N have the same least significant log2(M) bits.
9952 N is not otherwise constrained. In particular, N is not normalized to
9953 0 <= N < M as is common. In general, the precise value of P is unknown.
9954 M is chosen as large as possible such that constant N can be determined.
9956 Returns M and sets *RESIDUE to N.
9958 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9959 account. This is not always possible due to PR 35705.
9962 static unsigned HOST_WIDE_INT
9963 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9964 bool allow_func_align)
9966 enum tree_code code;
9968 *residue = 0;
9970 code = TREE_CODE (expr);
9971 if (code == ADDR_EXPR)
9973 expr = TREE_OPERAND (expr, 0);
9974 if (handled_component_p (expr))
9976 HOST_WIDE_INT bitsize, bitpos;
9977 tree offset;
9978 enum machine_mode mode;
9979 int unsignedp, volatilep;
9981 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9982 &mode, &unsignedp, &volatilep, false);
9983 *residue = bitpos / BITS_PER_UNIT;
9984 if (offset)
9986 if (TREE_CODE (offset) == INTEGER_CST)
9987 *residue += TREE_INT_CST_LOW (offset);
9988 else
9989 /* We don't handle more complicated offset expressions. */
9990 return 1;
9994 if (DECL_P (expr)
9995 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9996 return DECL_ALIGN_UNIT (expr);
9998 else if (code == POINTER_PLUS_EXPR)
10000 tree op0, op1;
10001 unsigned HOST_WIDE_INT modulus;
10002 enum tree_code inner_code;
10004 op0 = TREE_OPERAND (expr, 0);
10005 STRIP_NOPS (op0);
10006 modulus = get_pointer_modulus_and_residue (op0, residue,
10007 allow_func_align);
10009 op1 = TREE_OPERAND (expr, 1);
10010 STRIP_NOPS (op1);
10011 inner_code = TREE_CODE (op1);
10012 if (inner_code == INTEGER_CST)
10014 *residue += TREE_INT_CST_LOW (op1);
10015 return modulus;
10017 else if (inner_code == MULT_EXPR)
10019 op1 = TREE_OPERAND (op1, 1);
10020 if (TREE_CODE (op1) == INTEGER_CST)
10022 unsigned HOST_WIDE_INT align;
10024 /* Compute the greatest power-of-2 divisor of op1. */
10025 align = TREE_INT_CST_LOW (op1);
10026 align &= -align;
10028 /* If align is non-zero and less than *modulus, replace
10029 *modulus with align., If align is 0, then either op1 is 0
10030 or the greatest power-of-2 divisor of op1 doesn't fit in an
10031 unsigned HOST_WIDE_INT. In either case, no additional
10032 constraint is imposed. */
10033 if (align)
10034 modulus = MIN (modulus, align);
10036 return modulus;
10041 /* If we get here, we were unable to determine anything useful about the
10042 expression. */
10043 return 1;
10047 /* Fold a binary expression of code CODE and type TYPE with operands
10048 OP0 and OP1. LOC is the location of the resulting expression.
10049 Return the folded expression if folding is successful. Otherwise,
10050 return NULL_TREE. */
10052 tree
10053 fold_binary_loc (location_t loc,
10054 enum tree_code code, tree type, tree op0, tree op1)
10056 enum tree_code_class kind = TREE_CODE_CLASS (code);
10057 tree arg0, arg1, tem;
10058 tree t1 = NULL_TREE;
10059 bool strict_overflow_p;
10061 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10062 && TREE_CODE_LENGTH (code) == 2
10063 && op0 != NULL_TREE
10064 && op1 != NULL_TREE);
10066 arg0 = op0;
10067 arg1 = op1;
10069 /* Strip any conversions that don't change the mode. This is
10070 safe for every expression, except for a comparison expression
10071 because its signedness is derived from its operands. So, in
10072 the latter case, only strip conversions that don't change the
10073 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10074 preserved.
10076 Note that this is done as an internal manipulation within the
10077 constant folder, in order to find the simplest representation
10078 of the arguments so that their form can be studied. In any
10079 cases, the appropriate type conversions should be put back in
10080 the tree that will get out of the constant folder. */
10082 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10084 STRIP_SIGN_NOPS (arg0);
10085 STRIP_SIGN_NOPS (arg1);
10087 else
10089 STRIP_NOPS (arg0);
10090 STRIP_NOPS (arg1);
10093 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10094 constant but we can't do arithmetic on them. */
10095 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10096 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10097 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10098 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10099 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10100 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
10102 if (kind == tcc_binary)
10104 /* Make sure type and arg0 have the same saturating flag. */
10105 gcc_assert (TYPE_SATURATING (type)
10106 == TYPE_SATURATING (TREE_TYPE (arg0)));
10107 tem = const_binop (code, arg0, arg1, 0);
10109 else if (kind == tcc_comparison)
10110 tem = fold_relational_const (code, type, arg0, arg1);
10111 else
10112 tem = NULL_TREE;
10114 if (tem != NULL_TREE)
10116 if (TREE_TYPE (tem) != type)
10117 tem = fold_convert_loc (loc, type, tem);
10118 return tem;
10122 /* If this is a commutative operation, and ARG0 is a constant, move it
10123 to ARG1 to reduce the number of tests below. */
10124 if (commutative_tree_code (code)
10125 && tree_swap_operands_p (arg0, arg1, true))
10126 return fold_build2_loc (loc, code, type, op1, op0);
10128 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10130 First check for cases where an arithmetic operation is applied to a
10131 compound, conditional, or comparison operation. Push the arithmetic
10132 operation inside the compound or conditional to see if any folding
10133 can then be done. Convert comparison to conditional for this purpose.
10134 The also optimizes non-constant cases that used to be done in
10135 expand_expr.
10137 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10138 one of the operands is a comparison and the other is a comparison, a
10139 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10140 code below would make the expression more complex. Change it to a
10141 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10142 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10144 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10145 || code == EQ_EXPR || code == NE_EXPR)
10146 && ((truth_value_p (TREE_CODE (arg0))
10147 && (truth_value_p (TREE_CODE (arg1))
10148 || (TREE_CODE (arg1) == BIT_AND_EXPR
10149 && integer_onep (TREE_OPERAND (arg1, 1)))))
10150 || (truth_value_p (TREE_CODE (arg1))
10151 && (truth_value_p (TREE_CODE (arg0))
10152 || (TREE_CODE (arg0) == BIT_AND_EXPR
10153 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10155 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10156 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10157 : TRUTH_XOR_EXPR,
10158 boolean_type_node,
10159 fold_convert_loc (loc, boolean_type_node, arg0),
10160 fold_convert_loc (loc, boolean_type_node, arg1));
10162 if (code == EQ_EXPR)
10163 tem = invert_truthvalue_loc (loc, tem);
10165 return fold_convert_loc (loc, type, tem);
10168 if (TREE_CODE_CLASS (code) == tcc_binary
10169 || TREE_CODE_CLASS (code) == tcc_comparison)
10171 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10173 tem = fold_build2_loc (loc, code, type,
10174 fold_convert_loc (loc, TREE_TYPE (op0),
10175 TREE_OPERAND (arg0, 1)), op1);
10176 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
10177 goto fold_binary_exit;
10179 if (TREE_CODE (arg1) == COMPOUND_EXPR
10180 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10182 tem = fold_build2_loc (loc, code, type, op0,
10183 fold_convert_loc (loc, TREE_TYPE (op1),
10184 TREE_OPERAND (arg1, 1)));
10185 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
10186 goto fold_binary_exit;
10189 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
10191 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10192 arg0, arg1,
10193 /*cond_first_p=*/1);
10194 if (tem != NULL_TREE)
10195 return tem;
10198 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
10200 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10201 arg1, arg0,
10202 /*cond_first_p=*/0);
10203 if (tem != NULL_TREE)
10204 return tem;
10208 switch (code)
10210 case POINTER_PLUS_EXPR:
10211 /* 0 +p index -> (type)index */
10212 if (integer_zerop (arg0))
10213 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10215 /* PTR +p 0 -> PTR */
10216 if (integer_zerop (arg1))
10217 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10219 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10220 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10221 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10222 return fold_convert_loc (loc, type,
10223 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10224 fold_convert_loc (loc, sizetype,
10225 arg1),
10226 fold_convert_loc (loc, sizetype,
10227 arg0)));
10229 /* index +p PTR -> PTR +p index */
10230 if (POINTER_TYPE_P (TREE_TYPE (arg1))
10231 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10232 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
10233 fold_convert_loc (loc, type, arg1),
10234 fold_convert_loc (loc, sizetype, arg0));
10236 /* (PTR +p B) +p A -> PTR +p (B + A) */
10237 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10239 tree inner;
10240 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10241 tree arg00 = TREE_OPERAND (arg0, 0);
10242 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10243 arg01, fold_convert_loc (loc, sizetype, arg1));
10244 return fold_convert_loc (loc, type,
10245 fold_build2_loc (loc, POINTER_PLUS_EXPR,
10246 TREE_TYPE (arg00),
10247 arg00, inner));
10250 /* PTR_CST +p CST -> CST1 */
10251 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10252 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10253 fold_convert_loc (loc, type, arg1));
10255 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10256 of the array. Loop optimizer sometimes produce this type of
10257 expressions. */
10258 if (TREE_CODE (arg0) == ADDR_EXPR)
10260 tem = try_move_mult_to_index (loc, arg0,
10261 fold_convert_loc (loc, sizetype, arg1));
10262 if (tem)
10263 return fold_convert_loc (loc, type, tem);
10266 return NULL_TREE;
10268 case PLUS_EXPR:
10269 /* A + (-B) -> A - B */
10270 if (TREE_CODE (arg1) == NEGATE_EXPR)
10271 return fold_build2_loc (loc, MINUS_EXPR, type,
10272 fold_convert_loc (loc, type, arg0),
10273 fold_convert_loc (loc, type,
10274 TREE_OPERAND (arg1, 0)));
10275 /* (-A) + B -> B - A */
10276 if (TREE_CODE (arg0) == NEGATE_EXPR
10277 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10278 return fold_build2_loc (loc, MINUS_EXPR, type,
10279 fold_convert_loc (loc, type, arg1),
10280 fold_convert_loc (loc, type,
10281 TREE_OPERAND (arg0, 0)));
10283 if (INTEGRAL_TYPE_P (type))
10285 /* Convert ~A + 1 to -A. */
10286 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10287 && integer_onep (arg1))
10288 return fold_build1_loc (loc, NEGATE_EXPR, type,
10289 fold_convert_loc (loc, type,
10290 TREE_OPERAND (arg0, 0)));
10292 /* ~X + X is -1. */
10293 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10294 && !TYPE_OVERFLOW_TRAPS (type))
10296 tree tem = TREE_OPERAND (arg0, 0);
10298 STRIP_NOPS (tem);
10299 if (operand_equal_p (tem, arg1, 0))
10301 t1 = build_int_cst_type (type, -1);
10302 return omit_one_operand_loc (loc, type, t1, arg1);
10306 /* X + ~X is -1. */
10307 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10308 && !TYPE_OVERFLOW_TRAPS (type))
10310 tree tem = TREE_OPERAND (arg1, 0);
10312 STRIP_NOPS (tem);
10313 if (operand_equal_p (arg0, tem, 0))
10315 t1 = build_int_cst_type (type, -1);
10316 return omit_one_operand_loc (loc, type, t1, arg0);
10320 /* X + (X / CST) * -CST is X % CST. */
10321 if (TREE_CODE (arg1) == MULT_EXPR
10322 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10323 && operand_equal_p (arg0,
10324 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10326 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10327 tree cst1 = TREE_OPERAND (arg1, 1);
10328 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10329 cst1, cst0);
10330 if (sum && integer_zerop (sum))
10331 return fold_convert_loc (loc, type,
10332 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10333 TREE_TYPE (arg0), arg0,
10334 cst0));
10338 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
10339 same or one. Make sure type is not saturating.
10340 fold_plusminus_mult_expr will re-associate. */
10341 if ((TREE_CODE (arg0) == MULT_EXPR
10342 || TREE_CODE (arg1) == MULT_EXPR)
10343 && !TYPE_SATURATING (type)
10344 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10346 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10347 if (tem)
10348 return tem;
10351 if (! FLOAT_TYPE_P (type))
10353 if (integer_zerop (arg1))
10354 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10356 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10357 with a constant, and the two constants have no bits in common,
10358 we should treat this as a BIT_IOR_EXPR since this may produce more
10359 simplifications. */
10360 if (TREE_CODE (arg0) == BIT_AND_EXPR
10361 && TREE_CODE (arg1) == BIT_AND_EXPR
10362 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10363 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10364 && integer_zerop (const_binop (BIT_AND_EXPR,
10365 TREE_OPERAND (arg0, 1),
10366 TREE_OPERAND (arg1, 1), 0)))
10368 code = BIT_IOR_EXPR;
10369 goto bit_ior;
10372 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10373 (plus (plus (mult) (mult)) (foo)) so that we can
10374 take advantage of the factoring cases below. */
10375 if (((TREE_CODE (arg0) == PLUS_EXPR
10376 || TREE_CODE (arg0) == MINUS_EXPR)
10377 && TREE_CODE (arg1) == MULT_EXPR)
10378 || ((TREE_CODE (arg1) == PLUS_EXPR
10379 || TREE_CODE (arg1) == MINUS_EXPR)
10380 && TREE_CODE (arg0) == MULT_EXPR))
10382 tree parg0, parg1, parg, marg;
10383 enum tree_code pcode;
10385 if (TREE_CODE (arg1) == MULT_EXPR)
10386 parg = arg0, marg = arg1;
10387 else
10388 parg = arg1, marg = arg0;
10389 pcode = TREE_CODE (parg);
10390 parg0 = TREE_OPERAND (parg, 0);
10391 parg1 = TREE_OPERAND (parg, 1);
10392 STRIP_NOPS (parg0);
10393 STRIP_NOPS (parg1);
10395 if (TREE_CODE (parg0) == MULT_EXPR
10396 && TREE_CODE (parg1) != MULT_EXPR)
10397 return fold_build2_loc (loc, pcode, type,
10398 fold_build2_loc (loc, PLUS_EXPR, type,
10399 fold_convert_loc (loc, type,
10400 parg0),
10401 fold_convert_loc (loc, type,
10402 marg)),
10403 fold_convert_loc (loc, type, parg1));
10404 if (TREE_CODE (parg0) != MULT_EXPR
10405 && TREE_CODE (parg1) == MULT_EXPR)
10406 return
10407 fold_build2_loc (loc, PLUS_EXPR, type,
10408 fold_convert_loc (loc, type, parg0),
10409 fold_build2_loc (loc, pcode, type,
10410 fold_convert_loc (loc, type, marg),
10411 fold_convert_loc (loc, type,
10412 parg1)));
10415 else
10417 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10418 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10419 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10421 /* Likewise if the operands are reversed. */
10422 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10423 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10425 /* Convert X + -C into X - C. */
10426 if (TREE_CODE (arg1) == REAL_CST
10427 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10429 tem = fold_negate_const (arg1, type);
10430 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10431 return fold_build2_loc (loc, MINUS_EXPR, type,
10432 fold_convert_loc (loc, type, arg0),
10433 fold_convert_loc (loc, type, tem));
10436 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10437 to __complex__ ( x, y ). This is not the same for SNaNs or
10438 if signed zeros are involved. */
10439 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10440 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10441 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10443 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10444 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10445 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10446 bool arg0rz = false, arg0iz = false;
10447 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10448 || (arg0i && (arg0iz = real_zerop (arg0i))))
10450 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10451 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10452 if (arg0rz && arg1i && real_zerop (arg1i))
10454 tree rp = arg1r ? arg1r
10455 : build1 (REALPART_EXPR, rtype, arg1);
10456 tree ip = arg0i ? arg0i
10457 : build1 (IMAGPART_EXPR, rtype, arg0);
10458 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10460 else if (arg0iz && arg1r && real_zerop (arg1r))
10462 tree rp = arg0r ? arg0r
10463 : build1 (REALPART_EXPR, rtype, arg0);
10464 tree ip = arg1i ? arg1i
10465 : build1 (IMAGPART_EXPR, rtype, arg1);
10466 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10471 if (flag_unsafe_math_optimizations
10472 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10473 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10474 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10475 return tem;
10477 /* Convert x+x into x*2.0. */
10478 if (operand_equal_p (arg0, arg1, 0)
10479 && SCALAR_FLOAT_TYPE_P (type))
10480 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10481 build_real (type, dconst2));
10483 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10484 We associate floats only if the user has specified
10485 -fassociative-math. */
10486 if (flag_associative_math
10487 && TREE_CODE (arg1) == PLUS_EXPR
10488 && TREE_CODE (arg0) != MULT_EXPR)
10490 tree tree10 = TREE_OPERAND (arg1, 0);
10491 tree tree11 = TREE_OPERAND (arg1, 1);
10492 if (TREE_CODE (tree11) == MULT_EXPR
10493 && TREE_CODE (tree10) == MULT_EXPR)
10495 tree tree0;
10496 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10497 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10500 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10501 We associate floats only if the user has specified
10502 -fassociative-math. */
10503 if (flag_associative_math
10504 && TREE_CODE (arg0) == PLUS_EXPR
10505 && TREE_CODE (arg1) != MULT_EXPR)
10507 tree tree00 = TREE_OPERAND (arg0, 0);
10508 tree tree01 = TREE_OPERAND (arg0, 1);
10509 if (TREE_CODE (tree01) == MULT_EXPR
10510 && TREE_CODE (tree00) == MULT_EXPR)
10512 tree tree0;
10513 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10514 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10519 bit_rotate:
10520 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10521 is a rotate of A by C1 bits. */
10522 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10523 is a rotate of A by B bits. */
10525 enum tree_code code0, code1;
10526 tree rtype;
10527 code0 = TREE_CODE (arg0);
10528 code1 = TREE_CODE (arg1);
10529 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10530 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10531 && operand_equal_p (TREE_OPERAND (arg0, 0),
10532 TREE_OPERAND (arg1, 0), 0)
10533 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10534 TYPE_UNSIGNED (rtype))
10535 /* Only create rotates in complete modes. Other cases are not
10536 expanded properly. */
10537 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10539 tree tree01, tree11;
10540 enum tree_code code01, code11;
10542 tree01 = TREE_OPERAND (arg0, 1);
10543 tree11 = TREE_OPERAND (arg1, 1);
10544 STRIP_NOPS (tree01);
10545 STRIP_NOPS (tree11);
10546 code01 = TREE_CODE (tree01);
10547 code11 = TREE_CODE (tree11);
10548 if (code01 == INTEGER_CST
10549 && code11 == INTEGER_CST
10550 && TREE_INT_CST_HIGH (tree01) == 0
10551 && TREE_INT_CST_HIGH (tree11) == 0
10552 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10553 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10555 tem = build2 (LROTATE_EXPR,
10556 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10557 TREE_OPERAND (arg0, 0),
10558 code0 == LSHIFT_EXPR
10559 ? tree01 : tree11);
10560 SET_EXPR_LOCATION (tem, loc);
10561 return fold_convert_loc (loc, type, tem);
10563 else if (code11 == MINUS_EXPR)
10565 tree tree110, tree111;
10566 tree110 = TREE_OPERAND (tree11, 0);
10567 tree111 = TREE_OPERAND (tree11, 1);
10568 STRIP_NOPS (tree110);
10569 STRIP_NOPS (tree111);
10570 if (TREE_CODE (tree110) == INTEGER_CST
10571 && 0 == compare_tree_int (tree110,
10572 TYPE_PRECISION
10573 (TREE_TYPE (TREE_OPERAND
10574 (arg0, 0))))
10575 && operand_equal_p (tree01, tree111, 0))
10576 return
10577 fold_convert_loc (loc, type,
10578 build2 ((code0 == LSHIFT_EXPR
10579 ? LROTATE_EXPR
10580 : RROTATE_EXPR),
10581 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10582 TREE_OPERAND (arg0, 0), tree01));
10584 else if (code01 == MINUS_EXPR)
10586 tree tree010, tree011;
10587 tree010 = TREE_OPERAND (tree01, 0);
10588 tree011 = TREE_OPERAND (tree01, 1);
10589 STRIP_NOPS (tree010);
10590 STRIP_NOPS (tree011);
10591 if (TREE_CODE (tree010) == INTEGER_CST
10592 && 0 == compare_tree_int (tree010,
10593 TYPE_PRECISION
10594 (TREE_TYPE (TREE_OPERAND
10595 (arg0, 0))))
10596 && operand_equal_p (tree11, tree011, 0))
10597 return fold_convert_loc
10598 (loc, type,
10599 build2 ((code0 != LSHIFT_EXPR
10600 ? LROTATE_EXPR
10601 : RROTATE_EXPR),
10602 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10603 TREE_OPERAND (arg0, 0), tree11));
10608 associate:
10609 /* In most languages, can't associate operations on floats through
10610 parentheses. Rather than remember where the parentheses were, we
10611 don't associate floats at all, unless the user has specified
10612 -fassociative-math.
10613 And, we need to make sure type is not saturating. */
10615 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10616 && !TYPE_SATURATING (type))
10618 tree var0, con0, lit0, minus_lit0;
10619 tree var1, con1, lit1, minus_lit1;
10620 bool ok = true;
10622 /* Split both trees into variables, constants, and literals. Then
10623 associate each group together, the constants with literals,
10624 then the result with variables. This increases the chances of
10625 literals being recombined later and of generating relocatable
10626 expressions for the sum of a constant and literal. */
10627 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10628 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10629 code == MINUS_EXPR);
10631 /* With undefined overflow we can only associate constants
10632 with one variable. */
10633 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10634 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10635 && var0 && var1)
10637 tree tmp0 = var0;
10638 tree tmp1 = var1;
10640 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10641 tmp0 = TREE_OPERAND (tmp0, 0);
10642 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10643 tmp1 = TREE_OPERAND (tmp1, 0);
10644 /* The only case we can still associate with two variables
10645 is if they are the same, modulo negation. */
10646 if (!operand_equal_p (tmp0, tmp1, 0))
10647 ok = false;
10650 /* Only do something if we found more than two objects. Otherwise,
10651 nothing has changed and we risk infinite recursion. */
10652 if (ok
10653 && (2 < ((var0 != 0) + (var1 != 0)
10654 + (con0 != 0) + (con1 != 0)
10655 + (lit0 != 0) + (lit1 != 0)
10656 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10658 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10659 if (code == MINUS_EXPR)
10660 code = PLUS_EXPR;
10662 var0 = associate_trees (loc, var0, var1, code, type);
10663 con0 = associate_trees (loc, con0, con1, code, type);
10664 lit0 = associate_trees (loc, lit0, lit1, code, type);
10665 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10667 /* Preserve the MINUS_EXPR if the negative part of the literal is
10668 greater than the positive part. Otherwise, the multiplicative
10669 folding code (i.e extract_muldiv) may be fooled in case
10670 unsigned constants are subtracted, like in the following
10671 example: ((X*2 + 4) - 8U)/2. */
10672 if (minus_lit0 && lit0)
10674 if (TREE_CODE (lit0) == INTEGER_CST
10675 && TREE_CODE (minus_lit0) == INTEGER_CST
10676 && tree_int_cst_lt (lit0, minus_lit0))
10678 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10679 MINUS_EXPR, type);
10680 lit0 = 0;
10682 else
10684 lit0 = associate_trees (loc, lit0, minus_lit0,
10685 MINUS_EXPR, type);
10686 minus_lit0 = 0;
10689 if (minus_lit0)
10691 if (con0 == 0)
10692 return
10693 fold_convert_loc (loc, type,
10694 associate_trees (loc, var0, minus_lit0,
10695 MINUS_EXPR, type));
10696 else
10698 con0 = associate_trees (loc, con0, minus_lit0,
10699 MINUS_EXPR, type);
10700 return
10701 fold_convert_loc (loc, type,
10702 associate_trees (loc, var0, con0,
10703 PLUS_EXPR, type));
10707 con0 = associate_trees (loc, con0, lit0, code, type);
10708 return
10709 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10710 code, type));
10714 return NULL_TREE;
10716 case MINUS_EXPR:
10717 /* Pointer simplifications for subtraction, simple reassociations. */
10718 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10720 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10721 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10722 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10724 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10725 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10726 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10727 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10728 return fold_build2_loc (loc, PLUS_EXPR, type,
10729 fold_build2_loc (loc, MINUS_EXPR, type,
10730 arg00, arg10),
10731 fold_build2_loc (loc, MINUS_EXPR, type,
10732 arg01, arg11));
10734 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10735 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10737 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10738 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10739 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10740 fold_convert_loc (loc, type, arg1));
10741 if (tmp)
10742 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10745 /* A - (-B) -> A + B */
10746 if (TREE_CODE (arg1) == NEGATE_EXPR)
10747 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10748 fold_convert_loc (loc, type,
10749 TREE_OPERAND (arg1, 0)));
10750 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10751 if (TREE_CODE (arg0) == NEGATE_EXPR
10752 && (FLOAT_TYPE_P (type)
10753 || INTEGRAL_TYPE_P (type))
10754 && negate_expr_p (arg1)
10755 && reorder_operands_p (arg0, arg1))
10756 return fold_build2_loc (loc, MINUS_EXPR, type,
10757 fold_convert_loc (loc, type,
10758 negate_expr (arg1)),
10759 fold_convert_loc (loc, type,
10760 TREE_OPERAND (arg0, 0)));
10761 /* Convert -A - 1 to ~A. */
10762 if (INTEGRAL_TYPE_P (type)
10763 && TREE_CODE (arg0) == NEGATE_EXPR
10764 && integer_onep (arg1)
10765 && !TYPE_OVERFLOW_TRAPS (type))
10766 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10767 fold_convert_loc (loc, type,
10768 TREE_OPERAND (arg0, 0)));
10770 /* Convert -1 - A to ~A. */
10771 if (INTEGRAL_TYPE_P (type)
10772 && integer_all_onesp (arg0))
10773 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10776 /* X - (X / CST) * CST is X % CST. */
10777 if (INTEGRAL_TYPE_P (type)
10778 && TREE_CODE (arg1) == MULT_EXPR
10779 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10780 && operand_equal_p (arg0,
10781 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10782 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10783 TREE_OPERAND (arg1, 1), 0))
10784 return
10785 fold_convert_loc (loc, type,
10786 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10787 arg0, TREE_OPERAND (arg1, 1)));
10789 if (! FLOAT_TYPE_P (type))
10791 if (integer_zerop (arg0))
10792 return negate_expr (fold_convert_loc (loc, type, arg1));
10793 if (integer_zerop (arg1))
10794 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10796 /* Fold A - (A & B) into ~B & A. */
10797 if (!TREE_SIDE_EFFECTS (arg0)
10798 && TREE_CODE (arg1) == BIT_AND_EXPR)
10800 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10802 tree arg10 = fold_convert_loc (loc, type,
10803 TREE_OPERAND (arg1, 0));
10804 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10805 fold_build1_loc (loc, BIT_NOT_EXPR,
10806 type, arg10),
10807 fold_convert_loc (loc, type, arg0));
10809 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10811 tree arg11 = fold_convert_loc (loc,
10812 type, TREE_OPERAND (arg1, 1));
10813 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10814 fold_build1_loc (loc, BIT_NOT_EXPR,
10815 type, arg11),
10816 fold_convert_loc (loc, type, arg0));
10820 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10821 any power of 2 minus 1. */
10822 if (TREE_CODE (arg0) == BIT_AND_EXPR
10823 && TREE_CODE (arg1) == BIT_AND_EXPR
10824 && operand_equal_p (TREE_OPERAND (arg0, 0),
10825 TREE_OPERAND (arg1, 0), 0))
10827 tree mask0 = TREE_OPERAND (arg0, 1);
10828 tree mask1 = TREE_OPERAND (arg1, 1);
10829 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10831 if (operand_equal_p (tem, mask1, 0))
10833 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10834 TREE_OPERAND (arg0, 0), mask1);
10835 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10840 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10841 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10842 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10844 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10845 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10846 (-ARG1 + ARG0) reduces to -ARG1. */
10847 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10848 return negate_expr (fold_convert_loc (loc, type, arg1));
10850 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10851 __complex__ ( x, -y ). This is not the same for SNaNs or if
10852 signed zeros are involved. */
10853 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10854 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10855 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10857 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10858 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10859 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10860 bool arg0rz = false, arg0iz = false;
10861 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10862 || (arg0i && (arg0iz = real_zerop (arg0i))))
10864 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10865 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10866 if (arg0rz && arg1i && real_zerop (arg1i))
10868 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10869 arg1r ? arg1r
10870 : build1 (REALPART_EXPR, rtype, arg1));
10871 tree ip = arg0i ? arg0i
10872 : build1 (IMAGPART_EXPR, rtype, arg0);
10873 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10875 else if (arg0iz && arg1r && real_zerop (arg1r))
10877 tree rp = arg0r ? arg0r
10878 : build1 (REALPART_EXPR, rtype, arg0);
10879 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10880 arg1i ? arg1i
10881 : build1 (IMAGPART_EXPR, rtype, arg1));
10882 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10887 /* Fold &x - &x. This can happen from &x.foo - &x.
10888 This is unsafe for certain floats even in non-IEEE formats.
10889 In IEEE, it is unsafe because it does wrong for NaNs.
10890 Also note that operand_equal_p is always false if an operand
10891 is volatile. */
10893 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10894 && operand_equal_p (arg0, arg1, 0))
10895 return fold_convert_loc (loc, type, integer_zero_node);
10897 /* A - B -> A + (-B) if B is easily negatable. */
10898 if (negate_expr_p (arg1)
10899 && ((FLOAT_TYPE_P (type)
10900 /* Avoid this transformation if B is a positive REAL_CST. */
10901 && (TREE_CODE (arg1) != REAL_CST
10902 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10903 || INTEGRAL_TYPE_P (type)))
10904 return fold_build2_loc (loc, PLUS_EXPR, type,
10905 fold_convert_loc (loc, type, arg0),
10906 fold_convert_loc (loc, type,
10907 negate_expr (arg1)));
10909 /* Try folding difference of addresses. */
10911 HOST_WIDE_INT diff;
10913 if ((TREE_CODE (arg0) == ADDR_EXPR
10914 || TREE_CODE (arg1) == ADDR_EXPR)
10915 && ptr_difference_const (arg0, arg1, &diff))
10916 return build_int_cst_type (type, diff);
10919 /* Fold &a[i] - &a[j] to i-j. */
10920 if (TREE_CODE (arg0) == ADDR_EXPR
10921 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10922 && TREE_CODE (arg1) == ADDR_EXPR
10923 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10925 tree aref0 = TREE_OPERAND (arg0, 0);
10926 tree aref1 = TREE_OPERAND (arg1, 0);
10927 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10928 TREE_OPERAND (aref1, 0), 0))
10930 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10931 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10932 tree esz = array_ref_element_size (aref0);
10933 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10934 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10935 fold_convert_loc (loc, type, esz));
10940 if (FLOAT_TYPE_P (type)
10941 && flag_unsafe_math_optimizations
10942 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10943 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10944 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10945 return tem;
10947 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10948 same or one. Make sure type is not saturating.
10949 fold_plusminus_mult_expr will re-associate. */
10950 if ((TREE_CODE (arg0) == MULT_EXPR
10951 || TREE_CODE (arg1) == MULT_EXPR)
10952 && !TYPE_SATURATING (type)
10953 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10955 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10956 if (tem)
10957 return tem;
10960 goto associate;
10962 case MULT_EXPR:
10963 /* (-A) * (-B) -> A * B */
10964 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10965 return fold_build2_loc (loc, MULT_EXPR, type,
10966 fold_convert_loc (loc, type,
10967 TREE_OPERAND (arg0, 0)),
10968 fold_convert_loc (loc, type,
10969 negate_expr (arg1)));
10970 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10971 return fold_build2_loc (loc, MULT_EXPR, type,
10972 fold_convert_loc (loc, type,
10973 negate_expr (arg0)),
10974 fold_convert_loc (loc, type,
10975 TREE_OPERAND (arg1, 0)));
10977 if (! FLOAT_TYPE_P (type))
10979 if (integer_zerop (arg1))
10980 return omit_one_operand_loc (loc, type, arg1, arg0);
10981 if (integer_onep (arg1))
10982 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10983 /* Transform x * -1 into -x. Make sure to do the negation
10984 on the original operand with conversions not stripped
10985 because we can only strip non-sign-changing conversions. */
10986 if (integer_all_onesp (arg1))
10987 return fold_convert_loc (loc, type, negate_expr (op0));
10988 /* Transform x * -C into -x * C if x is easily negatable. */
10989 if (TREE_CODE (arg1) == INTEGER_CST
10990 && tree_int_cst_sgn (arg1) == -1
10991 && negate_expr_p (arg0)
10992 && (tem = negate_expr (arg1)) != arg1
10993 && !TREE_OVERFLOW (tem))
10994 return fold_build2_loc (loc, MULT_EXPR, type,
10995 fold_convert_loc (loc, type,
10996 negate_expr (arg0)),
10997 tem);
10999 /* (a * (1 << b)) is (a << b) */
11000 if (TREE_CODE (arg1) == LSHIFT_EXPR
11001 && integer_onep (TREE_OPERAND (arg1, 0)))
11002 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
11003 TREE_OPERAND (arg1, 1));
11004 if (TREE_CODE (arg0) == LSHIFT_EXPR
11005 && integer_onep (TREE_OPERAND (arg0, 0)))
11006 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11007 TREE_OPERAND (arg0, 1));
11009 /* (A + A) * C -> A * 2 * C */
11010 if (TREE_CODE (arg0) == PLUS_EXPR
11011 && TREE_CODE (arg1) == INTEGER_CST
11012 && operand_equal_p (TREE_OPERAND (arg0, 0),
11013 TREE_OPERAND (arg0, 1), 0))
11014 return fold_build2_loc (loc, MULT_EXPR, type,
11015 omit_one_operand_loc (loc, type,
11016 TREE_OPERAND (arg0, 0),
11017 TREE_OPERAND (arg0, 1)),
11018 fold_build2_loc (loc, MULT_EXPR, type,
11019 build_int_cst (type, 2) , arg1));
11021 strict_overflow_p = false;
11022 if (TREE_CODE (arg1) == INTEGER_CST
11023 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11024 &strict_overflow_p)))
11026 if (strict_overflow_p)
11027 fold_overflow_warning (("assuming signed overflow does not "
11028 "occur when simplifying "
11029 "multiplication"),
11030 WARN_STRICT_OVERFLOW_MISC);
11031 return fold_convert_loc (loc, type, tem);
11034 /* Optimize z * conj(z) for integer complex numbers. */
11035 if (TREE_CODE (arg0) == CONJ_EXPR
11036 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11037 return fold_mult_zconjz (loc, type, arg1);
11038 if (TREE_CODE (arg1) == CONJ_EXPR
11039 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11040 return fold_mult_zconjz (loc, type, arg0);
11042 else
11044 /* Maybe fold x * 0 to 0. The expressions aren't the same
11045 when x is NaN, since x * 0 is also NaN. Nor are they the
11046 same in modes with signed zeros, since multiplying a
11047 negative value by 0 gives -0, not +0. */
11048 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11049 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11050 && real_zerop (arg1))
11051 return omit_one_operand_loc (loc, type, arg1, arg0);
11052 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11053 Likewise for complex arithmetic with signed zeros. */
11054 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11055 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11056 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11057 && real_onep (arg1))
11058 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11060 /* Transform x * -1.0 into -x. */
11061 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11062 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11063 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11064 && real_minus_onep (arg1))
11065 return fold_convert_loc (loc, type, negate_expr (arg0));
11067 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11068 the result for floating point types due to rounding so it is applied
11069 only if -fassociative-math was specify. */
11070 if (flag_associative_math
11071 && TREE_CODE (arg0) == RDIV_EXPR
11072 && TREE_CODE (arg1) == REAL_CST
11073 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11075 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11076 arg1, 0);
11077 if (tem)
11078 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11079 TREE_OPERAND (arg0, 1));
11082 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11083 if (operand_equal_p (arg0, arg1, 0))
11085 tree tem = fold_strip_sign_ops (arg0);
11086 if (tem != NULL_TREE)
11088 tem = fold_convert_loc (loc, type, tem);
11089 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11093 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11094 This is not the same for NaNs or if signed zeros are
11095 involved. */
11096 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11097 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11098 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11099 && TREE_CODE (arg1) == COMPLEX_CST
11100 && real_zerop (TREE_REALPART (arg1)))
11102 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11103 if (real_onep (TREE_IMAGPART (arg1)))
11104 return
11105 fold_build2_loc (loc, COMPLEX_EXPR, type,
11106 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11107 rtype, arg0)),
11108 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11109 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11110 return
11111 fold_build2_loc (loc, COMPLEX_EXPR, type,
11112 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11113 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11114 rtype, arg0)));
11117 /* Optimize z * conj(z) for floating point complex numbers.
11118 Guarded by flag_unsafe_math_optimizations as non-finite
11119 imaginary components don't produce scalar results. */
11120 if (flag_unsafe_math_optimizations
11121 && TREE_CODE (arg0) == CONJ_EXPR
11122 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11123 return fold_mult_zconjz (loc, type, arg1);
11124 if (flag_unsafe_math_optimizations
11125 && TREE_CODE (arg1) == CONJ_EXPR
11126 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11127 return fold_mult_zconjz (loc, type, arg0);
11129 if (flag_unsafe_math_optimizations)
11131 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11132 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11134 /* Optimizations of root(...)*root(...). */
11135 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11137 tree rootfn, arg;
11138 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11139 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11141 /* Optimize sqrt(x)*sqrt(x) as x. */
11142 if (BUILTIN_SQRT_P (fcode0)
11143 && operand_equal_p (arg00, arg10, 0)
11144 && ! HONOR_SNANS (TYPE_MODE (type)))
11145 return arg00;
11147 /* Optimize root(x)*root(y) as root(x*y). */
11148 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11149 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11150 return build_call_expr_loc (loc, rootfn, 1, arg);
11153 /* Optimize expN(x)*expN(y) as expN(x+y). */
11154 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11156 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11157 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11158 CALL_EXPR_ARG (arg0, 0),
11159 CALL_EXPR_ARG (arg1, 0));
11160 return build_call_expr_loc (loc, expfn, 1, arg);
11163 /* Optimizations of pow(...)*pow(...). */
11164 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11165 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11166 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11168 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11169 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11170 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11171 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11173 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11174 if (operand_equal_p (arg01, arg11, 0))
11176 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11177 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11178 arg00, arg10);
11179 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11182 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11183 if (operand_equal_p (arg00, arg10, 0))
11185 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11186 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11187 arg01, arg11);
11188 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11192 /* Optimize tan(x)*cos(x) as sin(x). */
11193 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11194 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11195 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11196 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11197 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11198 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11199 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11200 CALL_EXPR_ARG (arg1, 0), 0))
11202 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11204 if (sinfn != NULL_TREE)
11205 return build_call_expr_loc (loc, sinfn, 1,
11206 CALL_EXPR_ARG (arg0, 0));
11209 /* Optimize x*pow(x,c) as pow(x,c+1). */
11210 if (fcode1 == BUILT_IN_POW
11211 || fcode1 == BUILT_IN_POWF
11212 || fcode1 == BUILT_IN_POWL)
11214 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11215 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11216 if (TREE_CODE (arg11) == REAL_CST
11217 && !TREE_OVERFLOW (arg11)
11218 && operand_equal_p (arg0, arg10, 0))
11220 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11221 REAL_VALUE_TYPE c;
11222 tree arg;
11224 c = TREE_REAL_CST (arg11);
11225 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11226 arg = build_real (type, c);
11227 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11231 /* Optimize pow(x,c)*x as pow(x,c+1). */
11232 if (fcode0 == BUILT_IN_POW
11233 || fcode0 == BUILT_IN_POWF
11234 || fcode0 == BUILT_IN_POWL)
11236 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11237 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11238 if (TREE_CODE (arg01) == REAL_CST
11239 && !TREE_OVERFLOW (arg01)
11240 && operand_equal_p (arg1, arg00, 0))
11242 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11243 REAL_VALUE_TYPE c;
11244 tree arg;
11246 c = TREE_REAL_CST (arg01);
11247 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11248 arg = build_real (type, c);
11249 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11253 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
11254 if (optimize_function_for_speed_p (cfun)
11255 && operand_equal_p (arg0, arg1, 0))
11257 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11259 if (powfn)
11261 tree arg = build_real (type, dconst2);
11262 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11267 goto associate;
11269 case BIT_IOR_EXPR:
11270 bit_ior:
11271 if (integer_all_onesp (arg1))
11272 return omit_one_operand_loc (loc, type, arg1, arg0);
11273 if (integer_zerop (arg1))
11274 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11275 if (operand_equal_p (arg0, arg1, 0))
11276 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11278 /* ~X | X is -1. */
11279 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11280 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11282 t1 = fold_convert_loc (loc, type, integer_zero_node);
11283 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11284 return omit_one_operand_loc (loc, type, t1, arg1);
11287 /* X | ~X is -1. */
11288 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11289 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11291 t1 = fold_convert_loc (loc, type, integer_zero_node);
11292 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11293 return omit_one_operand_loc (loc, type, t1, arg0);
11296 /* Canonicalize (X & C1) | C2. */
11297 if (TREE_CODE (arg0) == BIT_AND_EXPR
11298 && TREE_CODE (arg1) == INTEGER_CST
11299 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11301 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
11302 int width = TYPE_PRECISION (type), w;
11303 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
11304 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11305 hi2 = TREE_INT_CST_HIGH (arg1);
11306 lo2 = TREE_INT_CST_LOW (arg1);
11308 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11309 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
11310 return omit_one_operand_loc (loc, type, arg1,
11311 TREE_OPERAND (arg0, 0));
11313 if (width > HOST_BITS_PER_WIDE_INT)
11315 mhi = (unsigned HOST_WIDE_INT) -1
11316 >> (2 * HOST_BITS_PER_WIDE_INT - width);
11317 mlo = -1;
11319 else
11321 mhi = 0;
11322 mlo = (unsigned HOST_WIDE_INT) -1
11323 >> (HOST_BITS_PER_WIDE_INT - width);
11326 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11327 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
11328 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11329 TREE_OPERAND (arg0, 0), arg1);
11331 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11332 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11333 mode which allows further optimizations. */
11334 hi1 &= mhi;
11335 lo1 &= mlo;
11336 hi2 &= mhi;
11337 lo2 &= mlo;
11338 hi3 = hi1 & ~hi2;
11339 lo3 = lo1 & ~lo2;
11340 for (w = BITS_PER_UNIT;
11341 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11342 w <<= 1)
11344 unsigned HOST_WIDE_INT mask
11345 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11346 if (((lo1 | lo2) & mask) == mask
11347 && (lo1 & ~mask) == 0 && hi1 == 0)
11349 hi3 = 0;
11350 lo3 = mask;
11351 break;
11354 if (hi3 != hi1 || lo3 != lo1)
11355 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11356 fold_build2_loc (loc, BIT_AND_EXPR, type,
11357 TREE_OPERAND (arg0, 0),
11358 build_int_cst_wide (type,
11359 lo3, hi3)),
11360 arg1);
11363 /* (X & Y) | Y is (X, Y). */
11364 if (TREE_CODE (arg0) == BIT_AND_EXPR
11365 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11366 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11367 /* (X & Y) | X is (Y, X). */
11368 if (TREE_CODE (arg0) == BIT_AND_EXPR
11369 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11370 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11371 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11372 /* X | (X & Y) is (Y, X). */
11373 if (TREE_CODE (arg1) == BIT_AND_EXPR
11374 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11375 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11376 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11377 /* X | (Y & X) is (Y, X). */
11378 if (TREE_CODE (arg1) == BIT_AND_EXPR
11379 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11380 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11381 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11383 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11384 if (t1 != NULL_TREE)
11385 return t1;
11387 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11389 This results in more efficient code for machines without a NAND
11390 instruction. Combine will canonicalize to the first form
11391 which will allow use of NAND instructions provided by the
11392 backend if they exist. */
11393 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11394 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11396 return
11397 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11398 build2 (BIT_AND_EXPR, type,
11399 fold_convert_loc (loc, type,
11400 TREE_OPERAND (arg0, 0)),
11401 fold_convert_loc (loc, type,
11402 TREE_OPERAND (arg1, 0))));
11405 /* See if this can be simplified into a rotate first. If that
11406 is unsuccessful continue in the association code. */
11407 goto bit_rotate;
11409 case BIT_XOR_EXPR:
11410 if (integer_zerop (arg1))
11411 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11412 if (integer_all_onesp (arg1))
11413 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11414 if (operand_equal_p (arg0, arg1, 0))
11415 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11417 /* ~X ^ X is -1. */
11418 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11419 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11421 t1 = fold_convert_loc (loc, type, integer_zero_node);
11422 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11423 return omit_one_operand_loc (loc, type, t1, arg1);
11426 /* X ^ ~X is -1. */
11427 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11428 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11430 t1 = fold_convert_loc (loc, type, integer_zero_node);
11431 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11432 return omit_one_operand_loc (loc, type, t1, arg0);
11435 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11436 with a constant, and the two constants have no bits in common,
11437 we should treat this as a BIT_IOR_EXPR since this may produce more
11438 simplifications. */
11439 if (TREE_CODE (arg0) == BIT_AND_EXPR
11440 && TREE_CODE (arg1) == BIT_AND_EXPR
11441 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11442 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11443 && integer_zerop (const_binop (BIT_AND_EXPR,
11444 TREE_OPERAND (arg0, 1),
11445 TREE_OPERAND (arg1, 1), 0)))
11447 code = BIT_IOR_EXPR;
11448 goto bit_ior;
11451 /* (X | Y) ^ X -> Y & ~ X*/
11452 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11453 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11455 tree t2 = TREE_OPERAND (arg0, 1);
11456 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11457 arg1);
11458 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11459 fold_convert_loc (loc, type, t2),
11460 fold_convert_loc (loc, type, t1));
11461 return t1;
11464 /* (Y | X) ^ X -> Y & ~ X*/
11465 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11466 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11468 tree t2 = TREE_OPERAND (arg0, 0);
11469 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11470 arg1);
11471 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11472 fold_convert_loc (loc, type, t2),
11473 fold_convert_loc (loc, type, t1));
11474 return t1;
11477 /* X ^ (X | Y) -> Y & ~ X*/
11478 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11479 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11481 tree t2 = TREE_OPERAND (arg1, 1);
11482 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11483 arg0);
11484 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11485 fold_convert_loc (loc, type, t2),
11486 fold_convert_loc (loc, type, t1));
11487 return t1;
11490 /* X ^ (Y | X) -> Y & ~ X*/
11491 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11492 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11494 tree t2 = TREE_OPERAND (arg1, 0);
11495 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11496 arg0);
11497 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11498 fold_convert_loc (loc, type, t2),
11499 fold_convert_loc (loc, type, t1));
11500 return t1;
11503 /* Convert ~X ^ ~Y to X ^ Y. */
11504 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11505 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11506 return fold_build2_loc (loc, code, type,
11507 fold_convert_loc (loc, type,
11508 TREE_OPERAND (arg0, 0)),
11509 fold_convert_loc (loc, type,
11510 TREE_OPERAND (arg1, 0)));
11512 /* Convert ~X ^ C to X ^ ~C. */
11513 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11514 && TREE_CODE (arg1) == INTEGER_CST)
11515 return fold_build2_loc (loc, code, type,
11516 fold_convert_loc (loc, type,
11517 TREE_OPERAND (arg0, 0)),
11518 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11520 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11521 if (TREE_CODE (arg0) == BIT_AND_EXPR
11522 && integer_onep (TREE_OPERAND (arg0, 1))
11523 && integer_onep (arg1))
11524 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11525 build_int_cst (TREE_TYPE (arg0), 0));
11527 /* Fold (X & Y) ^ Y as ~X & Y. */
11528 if (TREE_CODE (arg0) == BIT_AND_EXPR
11529 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11531 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11532 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11533 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11534 fold_convert_loc (loc, type, arg1));
11536 /* Fold (X & Y) ^ X as ~Y & X. */
11537 if (TREE_CODE (arg0) == BIT_AND_EXPR
11538 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11539 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11541 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11542 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11543 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11544 fold_convert_loc (loc, type, arg1));
11546 /* Fold X ^ (X & Y) as X & ~Y. */
11547 if (TREE_CODE (arg1) == BIT_AND_EXPR
11548 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11550 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11551 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11552 fold_convert_loc (loc, type, arg0),
11553 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11555 /* Fold X ^ (Y & X) as ~Y & X. */
11556 if (TREE_CODE (arg1) == BIT_AND_EXPR
11557 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11558 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11560 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11561 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11562 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11563 fold_convert_loc (loc, type, arg0));
11566 /* See if this can be simplified into a rotate first. If that
11567 is unsuccessful continue in the association code. */
11568 goto bit_rotate;
11570 case BIT_AND_EXPR:
11571 if (integer_all_onesp (arg1))
11572 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11573 if (integer_zerop (arg1))
11574 return omit_one_operand_loc (loc, type, arg1, arg0);
11575 if (operand_equal_p (arg0, arg1, 0))
11576 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11578 /* ~X & X is always zero. */
11579 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11580 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11581 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11583 /* X & ~X is always zero. */
11584 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11585 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11586 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11588 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11589 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11590 && TREE_CODE (arg1) == INTEGER_CST
11591 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11593 tree tmp1 = fold_convert_loc (loc, type, arg1);
11594 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11595 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11596 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11597 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11598 return
11599 fold_convert_loc (loc, type,
11600 fold_build2_loc (loc, BIT_IOR_EXPR,
11601 type, tmp2, tmp3));
11604 /* (X | Y) & Y is (X, Y). */
11605 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11606 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11607 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11608 /* (X | Y) & X is (Y, X). */
11609 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11610 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11611 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11612 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11613 /* X & (X | Y) is (Y, X). */
11614 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11615 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11616 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11617 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11618 /* X & (Y | X) is (Y, X). */
11619 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11620 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11621 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11622 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11624 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11625 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11626 && integer_onep (TREE_OPERAND (arg0, 1))
11627 && integer_onep (arg1))
11629 tem = TREE_OPERAND (arg0, 0);
11630 return fold_build2_loc (loc, EQ_EXPR, type,
11631 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11632 build_int_cst (TREE_TYPE (tem), 1)),
11633 build_int_cst (TREE_TYPE (tem), 0));
11635 /* Fold ~X & 1 as (X & 1) == 0. */
11636 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11637 && integer_onep (arg1))
11639 tem = TREE_OPERAND (arg0, 0);
11640 return fold_build2_loc (loc, EQ_EXPR, type,
11641 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11642 build_int_cst (TREE_TYPE (tem), 1)),
11643 build_int_cst (TREE_TYPE (tem), 0));
11646 /* Fold (X ^ Y) & Y as ~X & Y. */
11647 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11648 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11650 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11651 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11652 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11653 fold_convert_loc (loc, type, arg1));
11655 /* Fold (X ^ Y) & X as ~Y & X. */
11656 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11657 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11658 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11660 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11661 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11662 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11663 fold_convert_loc (loc, type, arg1));
11665 /* Fold X & (X ^ Y) as X & ~Y. */
11666 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11667 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11669 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11670 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11671 fold_convert_loc (loc, type, arg0),
11672 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11674 /* Fold X & (Y ^ X) as ~Y & X. */
11675 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11676 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11677 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11679 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11680 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11681 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11682 fold_convert_loc (loc, type, arg0));
11685 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11686 if (t1 != NULL_TREE)
11687 return t1;
11688 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11689 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11690 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11692 unsigned int prec
11693 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11695 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11696 && (~TREE_INT_CST_LOW (arg1)
11697 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11698 return
11699 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11702 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11704 This results in more efficient code for machines without a NOR
11705 instruction. Combine will canonicalize to the first form
11706 which will allow use of NOR instructions provided by the
11707 backend if they exist. */
11708 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11709 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11711 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11712 build2 (BIT_IOR_EXPR, type,
11713 fold_convert_loc (loc, type,
11714 TREE_OPERAND (arg0, 0)),
11715 fold_convert_loc (loc, type,
11716 TREE_OPERAND (arg1, 0))));
11719 /* If arg0 is derived from the address of an object or function, we may
11720 be able to fold this expression using the object or function's
11721 alignment. */
11722 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11724 unsigned HOST_WIDE_INT modulus, residue;
11725 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11727 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11728 integer_onep (arg1));
11730 /* This works because modulus is a power of 2. If this weren't the
11731 case, we'd have to replace it by its greatest power-of-2
11732 divisor: modulus & -modulus. */
11733 if (low < modulus)
11734 return build_int_cst (type, residue & low);
11737 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11738 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11739 if the new mask might be further optimized. */
11740 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11741 || TREE_CODE (arg0) == RSHIFT_EXPR)
11742 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11743 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11744 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11745 < TYPE_PRECISION (TREE_TYPE (arg0))
11746 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11747 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11749 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11750 unsigned HOST_WIDE_INT mask
11751 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11752 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11753 tree shift_type = TREE_TYPE (arg0);
11755 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11756 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11757 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11758 && TYPE_PRECISION (TREE_TYPE (arg0))
11759 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11761 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11762 tree arg00 = TREE_OPERAND (arg0, 0);
11763 /* See if more bits can be proven as zero because of
11764 zero extension. */
11765 if (TREE_CODE (arg00) == NOP_EXPR
11766 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11768 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11769 if (TYPE_PRECISION (inner_type)
11770 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11771 && TYPE_PRECISION (inner_type) < prec)
11773 prec = TYPE_PRECISION (inner_type);
11774 /* See if we can shorten the right shift. */
11775 if (shiftc < prec)
11776 shift_type = inner_type;
11779 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11780 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11781 zerobits <<= prec - shiftc;
11782 /* For arithmetic shift if sign bit could be set, zerobits
11783 can contain actually sign bits, so no transformation is
11784 possible, unless MASK masks them all away. In that
11785 case the shift needs to be converted into logical shift. */
11786 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11787 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11789 if ((mask & zerobits) == 0)
11790 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11791 else
11792 zerobits = 0;
11796 /* ((X << 16) & 0xff00) is (X, 0). */
11797 if ((mask & zerobits) == mask)
11798 return omit_one_operand_loc (loc, type,
11799 build_int_cst (type, 0), arg0);
11801 newmask = mask | zerobits;
11802 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11804 unsigned int prec;
11806 /* Only do the transformation if NEWMASK is some integer
11807 mode's mask. */
11808 for (prec = BITS_PER_UNIT;
11809 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11810 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11811 break;
11812 if (prec < HOST_BITS_PER_WIDE_INT
11813 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11815 tree newmaskt;
11817 if (shift_type != TREE_TYPE (arg0))
11819 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11820 fold_convert_loc (loc, shift_type,
11821 TREE_OPERAND (arg0, 0)),
11822 TREE_OPERAND (arg0, 1));
11823 tem = fold_convert_loc (loc, type, tem);
11825 else
11826 tem = op0;
11827 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11828 if (!tree_int_cst_equal (newmaskt, arg1))
11829 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11834 goto associate;
11836 case RDIV_EXPR:
11837 /* Don't touch a floating-point divide by zero unless the mode
11838 of the constant can represent infinity. */
11839 if (TREE_CODE (arg1) == REAL_CST
11840 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11841 && real_zerop (arg1))
11842 return NULL_TREE;
11844 /* Optimize A / A to 1.0 if we don't care about
11845 NaNs or Infinities. Skip the transformation
11846 for non-real operands. */
11847 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11848 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11849 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11850 && operand_equal_p (arg0, arg1, 0))
11852 tree r = build_real (TREE_TYPE (arg0), dconst1);
11854 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11857 /* The complex version of the above A / A optimization. */
11858 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11859 && operand_equal_p (arg0, arg1, 0))
11861 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11862 if (! HONOR_NANS (TYPE_MODE (elem_type))
11863 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11865 tree r = build_real (elem_type, dconst1);
11866 /* omit_two_operands will call fold_convert for us. */
11867 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11871 /* (-A) / (-B) -> A / B */
11872 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11873 return fold_build2_loc (loc, RDIV_EXPR, type,
11874 TREE_OPERAND (arg0, 0),
11875 negate_expr (arg1));
11876 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11877 return fold_build2_loc (loc, RDIV_EXPR, type,
11878 negate_expr (arg0),
11879 TREE_OPERAND (arg1, 0));
11881 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11882 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11883 && real_onep (arg1))
11884 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11886 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11887 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11888 && real_minus_onep (arg1))
11889 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11890 negate_expr (arg0)));
11892 /* If ARG1 is a constant, we can convert this to a multiply by the
11893 reciprocal. This does not have the same rounding properties,
11894 so only do this if -freciprocal-math. We can actually
11895 always safely do it if ARG1 is a power of two, but it's hard to
11896 tell if it is or not in a portable manner. */
11897 if (TREE_CODE (arg1) == REAL_CST)
11899 if (flag_reciprocal_math
11900 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11901 arg1, 0)))
11902 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11903 /* Find the reciprocal if optimizing and the result is exact. */
11904 if (optimize)
11906 REAL_VALUE_TYPE r;
11907 r = TREE_REAL_CST (arg1);
11908 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11910 tem = build_real (type, r);
11911 return fold_build2_loc (loc, MULT_EXPR, type,
11912 fold_convert_loc (loc, type, arg0), tem);
11916 /* Convert A/B/C to A/(B*C). */
11917 if (flag_reciprocal_math
11918 && TREE_CODE (arg0) == RDIV_EXPR)
11919 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11920 fold_build2_loc (loc, MULT_EXPR, type,
11921 TREE_OPERAND (arg0, 1), arg1));
11923 /* Convert A/(B/C) to (A/B)*C. */
11924 if (flag_reciprocal_math
11925 && TREE_CODE (arg1) == RDIV_EXPR)
11926 return fold_build2_loc (loc, MULT_EXPR, type,
11927 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11928 TREE_OPERAND (arg1, 0)),
11929 TREE_OPERAND (arg1, 1));
11931 /* Convert C1/(X*C2) into (C1/C2)/X. */
11932 if (flag_reciprocal_math
11933 && TREE_CODE (arg1) == MULT_EXPR
11934 && TREE_CODE (arg0) == REAL_CST
11935 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11937 tree tem = const_binop (RDIV_EXPR, arg0,
11938 TREE_OPERAND (arg1, 1), 0);
11939 if (tem)
11940 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11941 TREE_OPERAND (arg1, 0));
11944 if (flag_unsafe_math_optimizations)
11946 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11947 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11949 /* Optimize sin(x)/cos(x) as tan(x). */
11950 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11951 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11952 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11953 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11954 CALL_EXPR_ARG (arg1, 0), 0))
11956 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11958 if (tanfn != NULL_TREE)
11959 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11962 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11963 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11964 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11965 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11966 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11967 CALL_EXPR_ARG (arg1, 0), 0))
11969 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11971 if (tanfn != NULL_TREE)
11973 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11974 CALL_EXPR_ARG (arg0, 0));
11975 return fold_build2_loc (loc, RDIV_EXPR, type,
11976 build_real (type, dconst1), tmp);
11980 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11981 NaNs or Infinities. */
11982 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11983 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11984 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11986 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11987 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11989 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11990 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11991 && operand_equal_p (arg00, arg01, 0))
11993 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11995 if (cosfn != NULL_TREE)
11996 return build_call_expr_loc (loc, cosfn, 1, arg00);
12000 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12001 NaNs or Infinities. */
12002 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12003 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12004 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12006 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12007 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12009 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12010 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12011 && operand_equal_p (arg00, arg01, 0))
12013 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12015 if (cosfn != NULL_TREE)
12017 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12018 return fold_build2_loc (loc, RDIV_EXPR, type,
12019 build_real (type, dconst1),
12020 tmp);
12025 /* Optimize pow(x,c)/x as pow(x,c-1). */
12026 if (fcode0 == BUILT_IN_POW
12027 || fcode0 == BUILT_IN_POWF
12028 || fcode0 == BUILT_IN_POWL)
12030 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12031 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12032 if (TREE_CODE (arg01) == REAL_CST
12033 && !TREE_OVERFLOW (arg01)
12034 && operand_equal_p (arg1, arg00, 0))
12036 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12037 REAL_VALUE_TYPE c;
12038 tree arg;
12040 c = TREE_REAL_CST (arg01);
12041 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12042 arg = build_real (type, c);
12043 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12047 /* Optimize a/root(b/c) into a*root(c/b). */
12048 if (BUILTIN_ROOT_P (fcode1))
12050 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12052 if (TREE_CODE (rootarg) == RDIV_EXPR)
12054 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12055 tree b = TREE_OPERAND (rootarg, 0);
12056 tree c = TREE_OPERAND (rootarg, 1);
12058 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12060 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12061 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12065 /* Optimize x/expN(y) into x*expN(-y). */
12066 if (BUILTIN_EXPONENT_P (fcode1))
12068 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12069 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12070 arg1 = build_call_expr_loc (loc,
12071 expfn, 1,
12072 fold_convert_loc (loc, type, arg));
12073 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12076 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12077 if (fcode1 == BUILT_IN_POW
12078 || fcode1 == BUILT_IN_POWF
12079 || fcode1 == BUILT_IN_POWL)
12081 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12082 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12083 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12084 tree neg11 = fold_convert_loc (loc, type,
12085 negate_expr (arg11));
12086 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12087 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12090 return NULL_TREE;
12092 case TRUNC_DIV_EXPR:
12093 case FLOOR_DIV_EXPR:
12094 /* Simplify A / (B << N) where A and B are positive and B is
12095 a power of 2, to A >> (N + log2(B)). */
12096 strict_overflow_p = false;
12097 if (TREE_CODE (arg1) == LSHIFT_EXPR
12098 && (TYPE_UNSIGNED (type)
12099 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12101 tree sval = TREE_OPERAND (arg1, 0);
12102 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12104 tree sh_cnt = TREE_OPERAND (arg1, 1);
12105 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12107 if (strict_overflow_p)
12108 fold_overflow_warning (("assuming signed overflow does not "
12109 "occur when simplifying A / (B << N)"),
12110 WARN_STRICT_OVERFLOW_MISC);
12112 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12113 sh_cnt, build_int_cst (NULL_TREE, pow2));
12114 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12115 fold_convert_loc (loc, type, arg0), sh_cnt);
12119 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12120 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12121 if (INTEGRAL_TYPE_P (type)
12122 && TYPE_UNSIGNED (type)
12123 && code == FLOOR_DIV_EXPR)
12124 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12126 /* Fall thru */
12128 case ROUND_DIV_EXPR:
12129 case CEIL_DIV_EXPR:
12130 case EXACT_DIV_EXPR:
12131 if (integer_onep (arg1))
12132 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12133 if (integer_zerop (arg1))
12134 return NULL_TREE;
12135 /* X / -1 is -X. */
12136 if (!TYPE_UNSIGNED (type)
12137 && TREE_CODE (arg1) == INTEGER_CST
12138 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12139 && TREE_INT_CST_HIGH (arg1) == -1)
12140 return fold_convert_loc (loc, type, negate_expr (arg0));
12142 /* Convert -A / -B to A / B when the type is signed and overflow is
12143 undefined. */
12144 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12145 && TREE_CODE (arg0) == NEGATE_EXPR
12146 && negate_expr_p (arg1))
12148 if (INTEGRAL_TYPE_P (type))
12149 fold_overflow_warning (("assuming signed overflow does not occur "
12150 "when distributing negation across "
12151 "division"),
12152 WARN_STRICT_OVERFLOW_MISC);
12153 return fold_build2_loc (loc, code, type,
12154 fold_convert_loc (loc, type,
12155 TREE_OPERAND (arg0, 0)),
12156 fold_convert_loc (loc, type,
12157 negate_expr (arg1)));
12159 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12160 && TREE_CODE (arg1) == NEGATE_EXPR
12161 && negate_expr_p (arg0))
12163 if (INTEGRAL_TYPE_P (type))
12164 fold_overflow_warning (("assuming signed overflow does not occur "
12165 "when distributing negation across "
12166 "division"),
12167 WARN_STRICT_OVERFLOW_MISC);
12168 return fold_build2_loc (loc, code, type,
12169 fold_convert_loc (loc, type,
12170 negate_expr (arg0)),
12171 fold_convert_loc (loc, type,
12172 TREE_OPERAND (arg1, 0)));
12175 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12176 operation, EXACT_DIV_EXPR.
12178 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12179 At one time others generated faster code, it's not clear if they do
12180 after the last round to changes to the DIV code in expmed.c. */
12181 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12182 && multiple_of_p (type, arg0, arg1))
12183 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12185 strict_overflow_p = false;
12186 if (TREE_CODE (arg1) == INTEGER_CST
12187 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12188 &strict_overflow_p)))
12190 if (strict_overflow_p)
12191 fold_overflow_warning (("assuming signed overflow does not occur "
12192 "when simplifying division"),
12193 WARN_STRICT_OVERFLOW_MISC);
12194 return fold_convert_loc (loc, type, tem);
12197 return NULL_TREE;
12199 case CEIL_MOD_EXPR:
12200 case FLOOR_MOD_EXPR:
12201 case ROUND_MOD_EXPR:
12202 case TRUNC_MOD_EXPR:
12203 /* X % 1 is always zero, but be sure to preserve any side
12204 effects in X. */
12205 if (integer_onep (arg1))
12206 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12208 /* X % 0, return X % 0 unchanged so that we can get the
12209 proper warnings and errors. */
12210 if (integer_zerop (arg1))
12211 return NULL_TREE;
12213 /* 0 % X is always zero, but be sure to preserve any side
12214 effects in X. Place this after checking for X == 0. */
12215 if (integer_zerop (arg0))
12216 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12218 /* X % -1 is zero. */
12219 if (!TYPE_UNSIGNED (type)
12220 && TREE_CODE (arg1) == INTEGER_CST
12221 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12222 && TREE_INT_CST_HIGH (arg1) == -1)
12223 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12225 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12226 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12227 strict_overflow_p = false;
12228 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12229 && (TYPE_UNSIGNED (type)
12230 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12232 tree c = arg1;
12233 /* Also optimize A % (C << N) where C is a power of 2,
12234 to A & ((C << N) - 1). */
12235 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12236 c = TREE_OPERAND (arg1, 0);
12238 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12240 tree mask = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12241 build_int_cst (TREE_TYPE (arg1), 1));
12242 if (strict_overflow_p)
12243 fold_overflow_warning (("assuming signed overflow does not "
12244 "occur when simplifying "
12245 "X % (power of two)"),
12246 WARN_STRICT_OVERFLOW_MISC);
12247 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12248 fold_convert_loc (loc, type, arg0),
12249 fold_convert_loc (loc, type, mask));
12253 /* X % -C is the same as X % C. */
12254 if (code == TRUNC_MOD_EXPR
12255 && !TYPE_UNSIGNED (type)
12256 && TREE_CODE (arg1) == INTEGER_CST
12257 && !TREE_OVERFLOW (arg1)
12258 && TREE_INT_CST_HIGH (arg1) < 0
12259 && !TYPE_OVERFLOW_TRAPS (type)
12260 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12261 && !sign_bit_p (arg1, arg1))
12262 return fold_build2_loc (loc, code, type,
12263 fold_convert_loc (loc, type, arg0),
12264 fold_convert_loc (loc, type,
12265 negate_expr (arg1)));
12267 /* X % -Y is the same as X % Y. */
12268 if (code == TRUNC_MOD_EXPR
12269 && !TYPE_UNSIGNED (type)
12270 && TREE_CODE (arg1) == NEGATE_EXPR
12271 && !TYPE_OVERFLOW_TRAPS (type))
12272 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12273 fold_convert_loc (loc, type,
12274 TREE_OPERAND (arg1, 0)));
12276 if (TREE_CODE (arg1) == INTEGER_CST
12277 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12278 &strict_overflow_p)))
12280 if (strict_overflow_p)
12281 fold_overflow_warning (("assuming signed overflow does not occur "
12282 "when simplifying modulus"),
12283 WARN_STRICT_OVERFLOW_MISC);
12284 return fold_convert_loc (loc, type, tem);
12287 return NULL_TREE;
12289 case LROTATE_EXPR:
12290 case RROTATE_EXPR:
12291 if (integer_all_onesp (arg0))
12292 return omit_one_operand_loc (loc, type, arg0, arg1);
12293 goto shift;
12295 case RSHIFT_EXPR:
12296 /* Optimize -1 >> x for arithmetic right shifts. */
12297 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12298 && tree_expr_nonnegative_p (arg1))
12299 return omit_one_operand_loc (loc, type, arg0, arg1);
12300 /* ... fall through ... */
12302 case LSHIFT_EXPR:
12303 shift:
12304 if (integer_zerop (arg1))
12305 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12306 if (integer_zerop (arg0))
12307 return omit_one_operand_loc (loc, type, arg0, arg1);
12309 /* Since negative shift count is not well-defined,
12310 don't try to compute it in the compiler. */
12311 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12312 return NULL_TREE;
12314 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12315 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12316 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12317 && host_integerp (TREE_OPERAND (arg0, 1), false)
12318 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12320 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12321 + TREE_INT_CST_LOW (arg1));
12323 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12324 being well defined. */
12325 if (low >= TYPE_PRECISION (type))
12327 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12328 low = low % TYPE_PRECISION (type);
12329 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12330 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12331 TREE_OPERAND (arg0, 0));
12332 else
12333 low = TYPE_PRECISION (type) - 1;
12336 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12337 build_int_cst (type, low));
12340 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12341 into x & ((unsigned)-1 >> c) for unsigned types. */
12342 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12343 || (TYPE_UNSIGNED (type)
12344 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12345 && host_integerp (arg1, false)
12346 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12347 && host_integerp (TREE_OPERAND (arg0, 1), false)
12348 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12350 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12351 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12352 tree lshift;
12353 tree arg00;
12355 if (low0 == low1)
12357 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12359 lshift = build_int_cst (type, -1);
12360 lshift = int_const_binop (code, lshift, arg1, 0);
12362 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12366 /* Rewrite an LROTATE_EXPR by a constant into an
12367 RROTATE_EXPR by a new constant. */
12368 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12370 tree tem = build_int_cst (TREE_TYPE (arg1),
12371 TYPE_PRECISION (type));
12372 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
12373 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12376 /* If we have a rotate of a bit operation with the rotate count and
12377 the second operand of the bit operation both constant,
12378 permute the two operations. */
12379 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12380 && (TREE_CODE (arg0) == BIT_AND_EXPR
12381 || TREE_CODE (arg0) == BIT_IOR_EXPR
12382 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12383 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12384 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12385 fold_build2_loc (loc, code, type,
12386 TREE_OPERAND (arg0, 0), arg1),
12387 fold_build2_loc (loc, code, type,
12388 TREE_OPERAND (arg0, 1), arg1));
12390 /* Two consecutive rotates adding up to the precision of the
12391 type can be ignored. */
12392 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12393 && TREE_CODE (arg0) == RROTATE_EXPR
12394 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12395 && TREE_INT_CST_HIGH (arg1) == 0
12396 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12397 && ((TREE_INT_CST_LOW (arg1)
12398 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12399 == (unsigned int) TYPE_PRECISION (type)))
12400 return TREE_OPERAND (arg0, 0);
12402 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12403 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12404 if the latter can be further optimized. */
12405 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12406 && TREE_CODE (arg0) == BIT_AND_EXPR
12407 && TREE_CODE (arg1) == INTEGER_CST
12408 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12410 tree mask = fold_build2_loc (loc, code, type,
12411 fold_convert_loc (loc, type,
12412 TREE_OPERAND (arg0, 1)),
12413 arg1);
12414 tree shift = fold_build2_loc (loc, code, type,
12415 fold_convert_loc (loc, type,
12416 TREE_OPERAND (arg0, 0)),
12417 arg1);
12418 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12419 if (tem)
12420 return tem;
12423 return NULL_TREE;
12425 case MIN_EXPR:
12426 if (operand_equal_p (arg0, arg1, 0))
12427 return omit_one_operand_loc (loc, type, arg0, arg1);
12428 if (INTEGRAL_TYPE_P (type)
12429 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12430 return omit_one_operand_loc (loc, type, arg1, arg0);
12431 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12432 if (tem)
12433 return tem;
12434 goto associate;
12436 case MAX_EXPR:
12437 if (operand_equal_p (arg0, arg1, 0))
12438 return omit_one_operand_loc (loc, type, arg0, arg1);
12439 if (INTEGRAL_TYPE_P (type)
12440 && TYPE_MAX_VALUE (type)
12441 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12442 return omit_one_operand_loc (loc, type, arg1, arg0);
12443 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12444 if (tem)
12445 return tem;
12446 goto associate;
12448 case TRUTH_ANDIF_EXPR:
12449 /* Note that the operands of this must be ints
12450 and their values must be 0 or 1.
12451 ("true" is a fixed value perhaps depending on the language.) */
12452 /* If first arg is constant zero, return it. */
12453 if (integer_zerop (arg0))
12454 return fold_convert_loc (loc, type, arg0);
12455 case TRUTH_AND_EXPR:
12456 /* If either arg is constant true, drop it. */
12457 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12458 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12459 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12460 /* Preserve sequence points. */
12461 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12462 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12463 /* If second arg is constant zero, result is zero, but first arg
12464 must be evaluated. */
12465 if (integer_zerop (arg1))
12466 return omit_one_operand_loc (loc, type, arg1, arg0);
12467 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12468 case will be handled here. */
12469 if (integer_zerop (arg0))
12470 return omit_one_operand_loc (loc, type, arg0, arg1);
12472 /* !X && X is always false. */
12473 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12474 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12475 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12476 /* X && !X is always false. */
12477 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12478 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12479 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12481 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12482 means A >= Y && A != MAX, but in this case we know that
12483 A < X <= MAX. */
12485 if (!TREE_SIDE_EFFECTS (arg0)
12486 && !TREE_SIDE_EFFECTS (arg1))
12488 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12489 if (tem && !operand_equal_p (tem, arg0, 0))
12490 return fold_build2_loc (loc, code, type, tem, arg1);
12492 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12493 if (tem && !operand_equal_p (tem, arg1, 0))
12494 return fold_build2_loc (loc, code, type, arg0, tem);
12497 truth_andor:
12498 /* We only do these simplifications if we are optimizing. */
12499 if (!optimize)
12500 return NULL_TREE;
12502 /* Check for things like (A || B) && (A || C). We can convert this
12503 to A || (B && C). Note that either operator can be any of the four
12504 truth and/or operations and the transformation will still be
12505 valid. Also note that we only care about order for the
12506 ANDIF and ORIF operators. If B contains side effects, this
12507 might change the truth-value of A. */
12508 if (TREE_CODE (arg0) == TREE_CODE (arg1)
12509 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12510 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12511 || TREE_CODE (arg0) == TRUTH_AND_EXPR
12512 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12513 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12515 tree a00 = TREE_OPERAND (arg0, 0);
12516 tree a01 = TREE_OPERAND (arg0, 1);
12517 tree a10 = TREE_OPERAND (arg1, 0);
12518 tree a11 = TREE_OPERAND (arg1, 1);
12519 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12520 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12521 && (code == TRUTH_AND_EXPR
12522 || code == TRUTH_OR_EXPR));
12524 if (operand_equal_p (a00, a10, 0))
12525 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12526 fold_build2_loc (loc, code, type, a01, a11));
12527 else if (commutative && operand_equal_p (a00, a11, 0))
12528 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12529 fold_build2_loc (loc, code, type, a01, a10));
12530 else if (commutative && operand_equal_p (a01, a10, 0))
12531 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
12532 fold_build2_loc (loc, code, type, a00, a11));
12534 /* This case if tricky because we must either have commutative
12535 operators or else A10 must not have side-effects. */
12537 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12538 && operand_equal_p (a01, a11, 0))
12539 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12540 fold_build2_loc (loc, code, type, a00, a10),
12541 a01);
12544 /* See if we can build a range comparison. */
12545 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
12546 return tem;
12548 /* Check for the possibility of merging component references. If our
12549 lhs is another similar operation, try to merge its rhs with our
12550 rhs. Then try to merge our lhs and rhs. */
12551 if (TREE_CODE (arg0) == code
12552 && 0 != (tem = fold_truthop (loc, code, type,
12553 TREE_OPERAND (arg0, 1), arg1)))
12554 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12556 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
12557 return tem;
12559 return NULL_TREE;
12561 case TRUTH_ORIF_EXPR:
12562 /* Note that the operands of this must be ints
12563 and their values must be 0 or true.
12564 ("true" is a fixed value perhaps depending on the language.) */
12565 /* If first arg is constant true, return it. */
12566 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12567 return fold_convert_loc (loc, type, arg0);
12568 case TRUTH_OR_EXPR:
12569 /* If either arg is constant zero, drop it. */
12570 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12571 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12572 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12573 /* Preserve sequence points. */
12574 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12575 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12576 /* If second arg is constant true, result is true, but we must
12577 evaluate first arg. */
12578 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12579 return omit_one_operand_loc (loc, type, arg1, arg0);
12580 /* Likewise for first arg, but note this only occurs here for
12581 TRUTH_OR_EXPR. */
12582 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12583 return omit_one_operand_loc (loc, type, arg0, arg1);
12585 /* !X || X is always true. */
12586 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12587 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12588 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12589 /* X || !X is always true. */
12590 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12591 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12592 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12594 goto truth_andor;
12596 case TRUTH_XOR_EXPR:
12597 /* If the second arg is constant zero, drop it. */
12598 if (integer_zerop (arg1))
12599 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12600 /* If the second arg is constant true, this is a logical inversion. */
12601 if (integer_onep (arg1))
12603 /* Only call invert_truthvalue if operand is a truth value. */
12604 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12605 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12606 else
12607 tem = invert_truthvalue_loc (loc, arg0);
12608 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12610 /* Identical arguments cancel to zero. */
12611 if (operand_equal_p (arg0, arg1, 0))
12612 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12614 /* !X ^ X is always true. */
12615 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12616 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12617 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12619 /* X ^ !X is always true. */
12620 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12621 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12622 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12624 return NULL_TREE;
12626 case EQ_EXPR:
12627 case NE_EXPR:
12628 tem = fold_comparison (loc, code, type, op0, op1);
12629 if (tem != NULL_TREE)
12630 return tem;
12632 /* bool_var != 0 becomes bool_var. */
12633 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12634 && code == NE_EXPR)
12635 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12637 /* bool_var == 1 becomes bool_var. */
12638 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12639 && code == EQ_EXPR)
12640 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12642 /* bool_var != 1 becomes !bool_var. */
12643 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12644 && code == NE_EXPR)
12645 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12646 fold_convert_loc (loc, type, arg0));
12648 /* bool_var == 0 becomes !bool_var. */
12649 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12650 && code == EQ_EXPR)
12651 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12652 fold_convert_loc (loc, type, arg0));
12654 /* !exp != 0 becomes !exp */
12655 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12656 && code == NE_EXPR)
12657 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12659 /* If this is an equality comparison of the address of two non-weak,
12660 unaliased symbols neither of which are extern (since we do not
12661 have access to attributes for externs), then we know the result. */
12662 if (TREE_CODE (arg0) == ADDR_EXPR
12663 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12664 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12665 && ! lookup_attribute ("alias",
12666 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12667 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12668 && TREE_CODE (arg1) == ADDR_EXPR
12669 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12670 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12671 && ! lookup_attribute ("alias",
12672 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12673 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12675 /* We know that we're looking at the address of two
12676 non-weak, unaliased, static _DECL nodes.
12678 It is both wasteful and incorrect to call operand_equal_p
12679 to compare the two ADDR_EXPR nodes. It is wasteful in that
12680 all we need to do is test pointer equality for the arguments
12681 to the two ADDR_EXPR nodes. It is incorrect to use
12682 operand_equal_p as that function is NOT equivalent to a
12683 C equality test. It can in fact return false for two
12684 objects which would test as equal using the C equality
12685 operator. */
12686 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12687 return constant_boolean_node (equal
12688 ? code == EQ_EXPR : code != EQ_EXPR,
12689 type);
12692 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12693 a MINUS_EXPR of a constant, we can convert it into a comparison with
12694 a revised constant as long as no overflow occurs. */
12695 if (TREE_CODE (arg1) == INTEGER_CST
12696 && (TREE_CODE (arg0) == PLUS_EXPR
12697 || TREE_CODE (arg0) == MINUS_EXPR)
12698 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12699 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12700 ? MINUS_EXPR : PLUS_EXPR,
12701 fold_convert_loc (loc, TREE_TYPE (arg0),
12702 arg1),
12703 TREE_OPERAND (arg0, 1), 0))
12704 && !TREE_OVERFLOW (tem))
12705 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12707 /* Similarly for a NEGATE_EXPR. */
12708 if (TREE_CODE (arg0) == NEGATE_EXPR
12709 && TREE_CODE (arg1) == INTEGER_CST
12710 && 0 != (tem = negate_expr (arg1))
12711 && TREE_CODE (tem) == INTEGER_CST
12712 && !TREE_OVERFLOW (tem))
12713 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12715 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12716 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12717 && TREE_CODE (arg1) == INTEGER_CST
12718 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12719 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12720 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12721 fold_convert_loc (loc,
12722 TREE_TYPE (arg0),
12723 arg1),
12724 TREE_OPERAND (arg0, 1)));
12726 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12727 if ((TREE_CODE (arg0) == PLUS_EXPR
12728 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12729 || TREE_CODE (arg0) == MINUS_EXPR)
12730 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12731 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12732 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12734 tree val = TREE_OPERAND (arg0, 1);
12735 return omit_two_operands_loc (loc, type,
12736 fold_build2_loc (loc, code, type,
12737 val,
12738 build_int_cst (TREE_TYPE (val),
12739 0)),
12740 TREE_OPERAND (arg0, 0), arg1);
12743 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12744 if (TREE_CODE (arg0) == MINUS_EXPR
12745 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12746 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12747 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12749 return omit_two_operands_loc (loc, type,
12750 code == NE_EXPR
12751 ? boolean_true_node : boolean_false_node,
12752 TREE_OPERAND (arg0, 1), arg1);
12755 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12756 for !=. Don't do this for ordered comparisons due to overflow. */
12757 if (TREE_CODE (arg0) == MINUS_EXPR
12758 && integer_zerop (arg1))
12759 return fold_build2_loc (loc, code, type,
12760 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12762 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12763 if (TREE_CODE (arg0) == ABS_EXPR
12764 && (integer_zerop (arg1) || real_zerop (arg1)))
12765 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12767 /* If this is an EQ or NE comparison with zero and ARG0 is
12768 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12769 two operations, but the latter can be done in one less insn
12770 on machines that have only two-operand insns or on which a
12771 constant cannot be the first operand. */
12772 if (TREE_CODE (arg0) == BIT_AND_EXPR
12773 && integer_zerop (arg1))
12775 tree arg00 = TREE_OPERAND (arg0, 0);
12776 tree arg01 = TREE_OPERAND (arg0, 1);
12777 if (TREE_CODE (arg00) == LSHIFT_EXPR
12778 && integer_onep (TREE_OPERAND (arg00, 0)))
12780 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12781 arg01, TREE_OPERAND (arg00, 1));
12782 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12783 build_int_cst (TREE_TYPE (arg0), 1));
12784 return fold_build2_loc (loc, code, type,
12785 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12786 arg1);
12788 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12789 && integer_onep (TREE_OPERAND (arg01, 0)))
12791 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12792 arg00, TREE_OPERAND (arg01, 1));
12793 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12794 build_int_cst (TREE_TYPE (arg0), 1));
12795 return fold_build2_loc (loc, code, type,
12796 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12797 arg1);
12801 /* If this is an NE or EQ comparison of zero against the result of a
12802 signed MOD operation whose second operand is a power of 2, make
12803 the MOD operation unsigned since it is simpler and equivalent. */
12804 if (integer_zerop (arg1)
12805 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12806 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12807 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12808 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12809 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12810 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12812 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12813 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12814 fold_convert_loc (loc, newtype,
12815 TREE_OPERAND (arg0, 0)),
12816 fold_convert_loc (loc, newtype,
12817 TREE_OPERAND (arg0, 1)));
12819 return fold_build2_loc (loc, code, type, newmod,
12820 fold_convert_loc (loc, newtype, arg1));
12823 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12824 C1 is a valid shift constant, and C2 is a power of two, i.e.
12825 a single bit. */
12826 if (TREE_CODE (arg0) == BIT_AND_EXPR
12827 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12828 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12829 == INTEGER_CST
12830 && integer_pow2p (TREE_OPERAND (arg0, 1))
12831 && integer_zerop (arg1))
12833 tree itype = TREE_TYPE (arg0);
12834 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12835 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12837 /* Check for a valid shift count. */
12838 if (TREE_INT_CST_HIGH (arg001) == 0
12839 && TREE_INT_CST_LOW (arg001) < prec)
12841 tree arg01 = TREE_OPERAND (arg0, 1);
12842 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12843 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12844 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12845 can be rewritten as (X & (C2 << C1)) != 0. */
12846 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12848 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12849 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12850 return fold_build2_loc (loc, code, type, tem, arg1);
12852 /* Otherwise, for signed (arithmetic) shifts,
12853 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12854 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12855 else if (!TYPE_UNSIGNED (itype))
12856 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12857 arg000, build_int_cst (itype, 0));
12858 /* Otherwise, of unsigned (logical) shifts,
12859 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12860 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12861 else
12862 return omit_one_operand_loc (loc, type,
12863 code == EQ_EXPR ? integer_one_node
12864 : integer_zero_node,
12865 arg000);
12869 /* If this is an NE comparison of zero with an AND of one, remove the
12870 comparison since the AND will give the correct value. */
12871 if (code == NE_EXPR
12872 && integer_zerop (arg1)
12873 && TREE_CODE (arg0) == BIT_AND_EXPR
12874 && integer_onep (TREE_OPERAND (arg0, 1)))
12875 return fold_convert_loc (loc, type, arg0);
12877 /* If we have (A & C) == C where C is a power of 2, convert this into
12878 (A & C) != 0. Similarly for NE_EXPR. */
12879 if (TREE_CODE (arg0) == BIT_AND_EXPR
12880 && integer_pow2p (TREE_OPERAND (arg0, 1))
12881 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12882 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12883 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12884 integer_zero_node));
12886 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12887 bit, then fold the expression into A < 0 or A >= 0. */
12888 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12889 if (tem)
12890 return tem;
12892 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12893 Similarly for NE_EXPR. */
12894 if (TREE_CODE (arg0) == BIT_AND_EXPR
12895 && TREE_CODE (arg1) == INTEGER_CST
12896 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12898 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12899 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12900 TREE_OPERAND (arg0, 1));
12901 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12902 arg1, notc);
12903 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12904 if (integer_nonzerop (dandnotc))
12905 return omit_one_operand_loc (loc, type, rslt, arg0);
12908 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12909 Similarly for NE_EXPR. */
12910 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12911 && TREE_CODE (arg1) == INTEGER_CST
12912 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12914 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12915 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12916 TREE_OPERAND (arg0, 1), notd);
12917 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12918 if (integer_nonzerop (candnotd))
12919 return omit_one_operand_loc (loc, type, rslt, arg0);
12922 /* If this is a comparison of a field, we may be able to simplify it. */
12923 if ((TREE_CODE (arg0) == COMPONENT_REF
12924 || TREE_CODE (arg0) == BIT_FIELD_REF)
12925 /* Handle the constant case even without -O
12926 to make sure the warnings are given. */
12927 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12929 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12930 if (t1)
12931 return t1;
12934 /* Optimize comparisons of strlen vs zero to a compare of the
12935 first character of the string vs zero. To wit,
12936 strlen(ptr) == 0 => *ptr == 0
12937 strlen(ptr) != 0 => *ptr != 0
12938 Other cases should reduce to one of these two (or a constant)
12939 due to the return value of strlen being unsigned. */
12940 if (TREE_CODE (arg0) == CALL_EXPR
12941 && integer_zerop (arg1))
12943 tree fndecl = get_callee_fndecl (arg0);
12945 if (fndecl
12946 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12947 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12948 && call_expr_nargs (arg0) == 1
12949 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12951 tree iref = build_fold_indirect_ref_loc (loc,
12952 CALL_EXPR_ARG (arg0, 0));
12953 return fold_build2_loc (loc, code, type, iref,
12954 build_int_cst (TREE_TYPE (iref), 0));
12958 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12959 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12960 if (TREE_CODE (arg0) == RSHIFT_EXPR
12961 && integer_zerop (arg1)
12962 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12964 tree arg00 = TREE_OPERAND (arg0, 0);
12965 tree arg01 = TREE_OPERAND (arg0, 1);
12966 tree itype = TREE_TYPE (arg00);
12967 if (TREE_INT_CST_HIGH (arg01) == 0
12968 && TREE_INT_CST_LOW (arg01)
12969 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12971 if (TYPE_UNSIGNED (itype))
12973 itype = signed_type_for (itype);
12974 arg00 = fold_convert_loc (loc, itype, arg00);
12976 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12977 type, arg00, build_int_cst (itype, 0));
12981 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12982 if (integer_zerop (arg1)
12983 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12984 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12985 TREE_OPERAND (arg0, 1));
12987 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12988 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12989 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12990 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12991 build_int_cst (TREE_TYPE (arg1), 0));
12992 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12993 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12994 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12995 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12996 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12997 build_int_cst (TREE_TYPE (arg1), 0));
12999 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13000 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13001 && TREE_CODE (arg1) == INTEGER_CST
13002 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13003 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13004 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13005 TREE_OPERAND (arg0, 1), arg1));
13007 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13008 (X & C) == 0 when C is a single bit. */
13009 if (TREE_CODE (arg0) == BIT_AND_EXPR
13010 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13011 && integer_zerop (arg1)
13012 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13014 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13015 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13016 TREE_OPERAND (arg0, 1));
13017 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13018 type, tem, arg1);
13021 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13022 constant C is a power of two, i.e. a single bit. */
13023 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13024 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13025 && integer_zerop (arg1)
13026 && integer_pow2p (TREE_OPERAND (arg0, 1))
13027 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13028 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13030 tree arg00 = TREE_OPERAND (arg0, 0);
13031 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13032 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13035 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13036 when is C is a power of two, i.e. a single bit. */
13037 if (TREE_CODE (arg0) == BIT_AND_EXPR
13038 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13039 && integer_zerop (arg1)
13040 && integer_pow2p (TREE_OPERAND (arg0, 1))
13041 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13042 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13044 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13045 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13046 arg000, TREE_OPERAND (arg0, 1));
13047 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13048 tem, build_int_cst (TREE_TYPE (tem), 0));
13051 if (integer_zerop (arg1)
13052 && tree_expr_nonzero_p (arg0))
13054 tree res = constant_boolean_node (code==NE_EXPR, type);
13055 return omit_one_operand_loc (loc, type, res, arg0);
13058 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13059 if (TREE_CODE (arg0) == NEGATE_EXPR
13060 && TREE_CODE (arg1) == NEGATE_EXPR)
13061 return fold_build2_loc (loc, code, type,
13062 TREE_OPERAND (arg0, 0),
13063 TREE_OPERAND (arg1, 0));
13065 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13066 if (TREE_CODE (arg0) == BIT_AND_EXPR
13067 && TREE_CODE (arg1) == BIT_AND_EXPR)
13069 tree arg00 = TREE_OPERAND (arg0, 0);
13070 tree arg01 = TREE_OPERAND (arg0, 1);
13071 tree arg10 = TREE_OPERAND (arg1, 0);
13072 tree arg11 = TREE_OPERAND (arg1, 1);
13073 tree itype = TREE_TYPE (arg0);
13075 if (operand_equal_p (arg01, arg11, 0))
13076 return fold_build2_loc (loc, code, type,
13077 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13078 fold_build2_loc (loc,
13079 BIT_XOR_EXPR, itype,
13080 arg00, arg10),
13081 arg01),
13082 build_int_cst (itype, 0));
13084 if (operand_equal_p (arg01, arg10, 0))
13085 return fold_build2_loc (loc, code, type,
13086 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13087 fold_build2_loc (loc,
13088 BIT_XOR_EXPR, itype,
13089 arg00, arg11),
13090 arg01),
13091 build_int_cst (itype, 0));
13093 if (operand_equal_p (arg00, arg11, 0))
13094 return fold_build2_loc (loc, code, type,
13095 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13096 fold_build2_loc (loc,
13097 BIT_XOR_EXPR, itype,
13098 arg01, arg10),
13099 arg00),
13100 build_int_cst (itype, 0));
13102 if (operand_equal_p (arg00, arg10, 0))
13103 return fold_build2_loc (loc, code, type,
13104 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13105 fold_build2_loc (loc,
13106 BIT_XOR_EXPR, itype,
13107 arg01, arg11),
13108 arg00),
13109 build_int_cst (itype, 0));
13112 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13113 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13115 tree arg00 = TREE_OPERAND (arg0, 0);
13116 tree arg01 = TREE_OPERAND (arg0, 1);
13117 tree arg10 = TREE_OPERAND (arg1, 0);
13118 tree arg11 = TREE_OPERAND (arg1, 1);
13119 tree itype = TREE_TYPE (arg0);
13121 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13122 operand_equal_p guarantees no side-effects so we don't need
13123 to use omit_one_operand on Z. */
13124 if (operand_equal_p (arg01, arg11, 0))
13125 return fold_build2_loc (loc, code, type, arg00, arg10);
13126 if (operand_equal_p (arg01, arg10, 0))
13127 return fold_build2_loc (loc, code, type, arg00, arg11);
13128 if (operand_equal_p (arg00, arg11, 0))
13129 return fold_build2_loc (loc, code, type, arg01, arg10);
13130 if (operand_equal_p (arg00, arg10, 0))
13131 return fold_build2_loc (loc, code, type, arg01, arg11);
13133 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13134 if (TREE_CODE (arg01) == INTEGER_CST
13135 && TREE_CODE (arg11) == INTEGER_CST)
13136 return fold_build2_loc (loc, code, type,
13137 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
13138 fold_build2_loc (loc,
13139 BIT_XOR_EXPR, itype,
13140 arg01, arg11)),
13141 arg10);
13144 /* Attempt to simplify equality/inequality comparisons of complex
13145 values. Only lower the comparison if the result is known or
13146 can be simplified to a single scalar comparison. */
13147 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13148 || TREE_CODE (arg0) == COMPLEX_CST)
13149 && (TREE_CODE (arg1) == COMPLEX_EXPR
13150 || TREE_CODE (arg1) == COMPLEX_CST))
13152 tree real0, imag0, real1, imag1;
13153 tree rcond, icond;
13155 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13157 real0 = TREE_OPERAND (arg0, 0);
13158 imag0 = TREE_OPERAND (arg0, 1);
13160 else
13162 real0 = TREE_REALPART (arg0);
13163 imag0 = TREE_IMAGPART (arg0);
13166 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13168 real1 = TREE_OPERAND (arg1, 0);
13169 imag1 = TREE_OPERAND (arg1, 1);
13171 else
13173 real1 = TREE_REALPART (arg1);
13174 imag1 = TREE_IMAGPART (arg1);
13177 rcond = fold_binary_loc (loc, code, type, real0, real1);
13178 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13180 if (integer_zerop (rcond))
13182 if (code == EQ_EXPR)
13183 return omit_two_operands_loc (loc, type, boolean_false_node,
13184 imag0, imag1);
13185 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13187 else
13189 if (code == NE_EXPR)
13190 return omit_two_operands_loc (loc, type, boolean_true_node,
13191 imag0, imag1);
13192 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13196 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13197 if (icond && TREE_CODE (icond) == INTEGER_CST)
13199 if (integer_zerop (icond))
13201 if (code == EQ_EXPR)
13202 return omit_two_operands_loc (loc, type, boolean_false_node,
13203 real0, real1);
13204 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13206 else
13208 if (code == NE_EXPR)
13209 return omit_two_operands_loc (loc, type, boolean_true_node,
13210 real0, real1);
13211 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13216 return NULL_TREE;
13218 case LT_EXPR:
13219 case GT_EXPR:
13220 case LE_EXPR:
13221 case GE_EXPR:
13222 tem = fold_comparison (loc, code, type, op0, op1);
13223 if (tem != NULL_TREE)
13224 return tem;
13226 /* Transform comparisons of the form X +- C CMP X. */
13227 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13228 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13229 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13230 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13231 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13232 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13234 tree arg01 = TREE_OPERAND (arg0, 1);
13235 enum tree_code code0 = TREE_CODE (arg0);
13236 int is_positive;
13238 if (TREE_CODE (arg01) == REAL_CST)
13239 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13240 else
13241 is_positive = tree_int_cst_sgn (arg01);
13243 /* (X - c) > X becomes false. */
13244 if (code == GT_EXPR
13245 && ((code0 == MINUS_EXPR && is_positive >= 0)
13246 || (code0 == PLUS_EXPR && is_positive <= 0)))
13248 if (TREE_CODE (arg01) == INTEGER_CST
13249 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13250 fold_overflow_warning (("assuming signed overflow does not "
13251 "occur when assuming that (X - c) > X "
13252 "is always false"),
13253 WARN_STRICT_OVERFLOW_ALL);
13254 return constant_boolean_node (0, type);
13257 /* Likewise (X + c) < X becomes false. */
13258 if (code == LT_EXPR
13259 && ((code0 == PLUS_EXPR && is_positive >= 0)
13260 || (code0 == MINUS_EXPR && is_positive <= 0)))
13262 if (TREE_CODE (arg01) == INTEGER_CST
13263 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13264 fold_overflow_warning (("assuming signed overflow does not "
13265 "occur when assuming that "
13266 "(X + c) < X is always false"),
13267 WARN_STRICT_OVERFLOW_ALL);
13268 return constant_boolean_node (0, type);
13271 /* Convert (X - c) <= X to true. */
13272 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13273 && code == LE_EXPR
13274 && ((code0 == MINUS_EXPR && is_positive >= 0)
13275 || (code0 == PLUS_EXPR && is_positive <= 0)))
13277 if (TREE_CODE (arg01) == INTEGER_CST
13278 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13279 fold_overflow_warning (("assuming signed overflow does not "
13280 "occur when assuming that "
13281 "(X - c) <= X is always true"),
13282 WARN_STRICT_OVERFLOW_ALL);
13283 return constant_boolean_node (1, type);
13286 /* Convert (X + c) >= X to true. */
13287 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13288 && code == GE_EXPR
13289 && ((code0 == PLUS_EXPR && is_positive >= 0)
13290 || (code0 == MINUS_EXPR && is_positive <= 0)))
13292 if (TREE_CODE (arg01) == INTEGER_CST
13293 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13294 fold_overflow_warning (("assuming signed overflow does not "
13295 "occur when assuming that "
13296 "(X + c) >= X is always true"),
13297 WARN_STRICT_OVERFLOW_ALL);
13298 return constant_boolean_node (1, type);
13301 if (TREE_CODE (arg01) == INTEGER_CST)
13303 /* Convert X + c > X and X - c < X to true for integers. */
13304 if (code == GT_EXPR
13305 && ((code0 == PLUS_EXPR && is_positive > 0)
13306 || (code0 == MINUS_EXPR && is_positive < 0)))
13308 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13309 fold_overflow_warning (("assuming signed overflow does "
13310 "not occur when assuming that "
13311 "(X + c) > X is always true"),
13312 WARN_STRICT_OVERFLOW_ALL);
13313 return constant_boolean_node (1, type);
13316 if (code == LT_EXPR
13317 && ((code0 == MINUS_EXPR && is_positive > 0)
13318 || (code0 == PLUS_EXPR && is_positive < 0)))
13320 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13321 fold_overflow_warning (("assuming signed overflow does "
13322 "not occur when assuming that "
13323 "(X - c) < X is always true"),
13324 WARN_STRICT_OVERFLOW_ALL);
13325 return constant_boolean_node (1, type);
13328 /* Convert X + c <= X and X - c >= X to false for integers. */
13329 if (code == LE_EXPR
13330 && ((code0 == PLUS_EXPR && is_positive > 0)
13331 || (code0 == MINUS_EXPR && is_positive < 0)))
13333 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13334 fold_overflow_warning (("assuming signed overflow does "
13335 "not occur when assuming that "
13336 "(X + c) <= X is always false"),
13337 WARN_STRICT_OVERFLOW_ALL);
13338 return constant_boolean_node (0, type);
13341 if (code == GE_EXPR
13342 && ((code0 == MINUS_EXPR && is_positive > 0)
13343 || (code0 == PLUS_EXPR && is_positive < 0)))
13345 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13346 fold_overflow_warning (("assuming signed overflow does "
13347 "not occur when assuming that "
13348 "(X - c) >= X is always false"),
13349 WARN_STRICT_OVERFLOW_ALL);
13350 return constant_boolean_node (0, type);
13355 /* Comparisons with the highest or lowest possible integer of
13356 the specified precision will have known values. */
13358 tree arg1_type = TREE_TYPE (arg1);
13359 unsigned int width = TYPE_PRECISION (arg1_type);
13361 if (TREE_CODE (arg1) == INTEGER_CST
13362 && width <= 2 * HOST_BITS_PER_WIDE_INT
13363 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13365 HOST_WIDE_INT signed_max_hi;
13366 unsigned HOST_WIDE_INT signed_max_lo;
13367 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13369 if (width <= HOST_BITS_PER_WIDE_INT)
13371 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13372 - 1;
13373 signed_max_hi = 0;
13374 max_hi = 0;
13376 if (TYPE_UNSIGNED (arg1_type))
13378 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13379 min_lo = 0;
13380 min_hi = 0;
13382 else
13384 max_lo = signed_max_lo;
13385 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13386 min_hi = -1;
13389 else
13391 width -= HOST_BITS_PER_WIDE_INT;
13392 signed_max_lo = -1;
13393 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13394 - 1;
13395 max_lo = -1;
13396 min_lo = 0;
13398 if (TYPE_UNSIGNED (arg1_type))
13400 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13401 min_hi = 0;
13403 else
13405 max_hi = signed_max_hi;
13406 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13410 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13411 && TREE_INT_CST_LOW (arg1) == max_lo)
13412 switch (code)
13414 case GT_EXPR:
13415 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13417 case GE_EXPR:
13418 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13420 case LE_EXPR:
13421 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13423 case LT_EXPR:
13424 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13426 /* The GE_EXPR and LT_EXPR cases above are not normally
13427 reached because of previous transformations. */
13429 default:
13430 break;
13432 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13433 == max_hi
13434 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13435 switch (code)
13437 case GT_EXPR:
13438 arg1 = const_binop (PLUS_EXPR, arg1,
13439 build_int_cst (TREE_TYPE (arg1), 1), 0);
13440 return fold_build2_loc (loc, EQ_EXPR, type,
13441 fold_convert_loc (loc,
13442 TREE_TYPE (arg1), arg0),
13443 arg1);
13444 case LE_EXPR:
13445 arg1 = const_binop (PLUS_EXPR, arg1,
13446 build_int_cst (TREE_TYPE (arg1), 1), 0);
13447 return fold_build2_loc (loc, NE_EXPR, type,
13448 fold_convert_loc (loc, TREE_TYPE (arg1),
13449 arg0),
13450 arg1);
13451 default:
13452 break;
13454 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13455 == min_hi
13456 && TREE_INT_CST_LOW (arg1) == min_lo)
13457 switch (code)
13459 case LT_EXPR:
13460 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13462 case LE_EXPR:
13463 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13465 case GE_EXPR:
13466 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13468 case GT_EXPR:
13469 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13471 default:
13472 break;
13474 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13475 == min_hi
13476 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13477 switch (code)
13479 case GE_EXPR:
13480 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13481 return fold_build2_loc (loc, NE_EXPR, type,
13482 fold_convert_loc (loc,
13483 TREE_TYPE (arg1), arg0),
13484 arg1);
13485 case LT_EXPR:
13486 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13487 return fold_build2_loc (loc, EQ_EXPR, type,
13488 fold_convert_loc (loc, TREE_TYPE (arg1),
13489 arg0),
13490 arg1);
13491 default:
13492 break;
13495 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13496 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13497 && TYPE_UNSIGNED (arg1_type)
13498 /* We will flip the signedness of the comparison operator
13499 associated with the mode of arg1, so the sign bit is
13500 specified by this mode. Check that arg1 is the signed
13501 max associated with this sign bit. */
13502 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13503 /* signed_type does not work on pointer types. */
13504 && INTEGRAL_TYPE_P (arg1_type))
13506 /* The following case also applies to X < signed_max+1
13507 and X >= signed_max+1 because previous transformations. */
13508 if (code == LE_EXPR || code == GT_EXPR)
13510 tree st;
13511 st = signed_type_for (TREE_TYPE (arg1));
13512 return fold_build2_loc (loc,
13513 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13514 type, fold_convert_loc (loc, st, arg0),
13515 build_int_cst (st, 0));
13521 /* If we are comparing an ABS_EXPR with a constant, we can
13522 convert all the cases into explicit comparisons, but they may
13523 well not be faster than doing the ABS and one comparison.
13524 But ABS (X) <= C is a range comparison, which becomes a subtraction
13525 and a comparison, and is probably faster. */
13526 if (code == LE_EXPR
13527 && TREE_CODE (arg1) == INTEGER_CST
13528 && TREE_CODE (arg0) == ABS_EXPR
13529 && ! TREE_SIDE_EFFECTS (arg0)
13530 && (0 != (tem = negate_expr (arg1)))
13531 && TREE_CODE (tem) == INTEGER_CST
13532 && !TREE_OVERFLOW (tem))
13533 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13534 build2 (GE_EXPR, type,
13535 TREE_OPERAND (arg0, 0), tem),
13536 build2 (LE_EXPR, type,
13537 TREE_OPERAND (arg0, 0), arg1));
13539 /* Convert ABS_EXPR<x> >= 0 to true. */
13540 strict_overflow_p = false;
13541 if (code == GE_EXPR
13542 && (integer_zerop (arg1)
13543 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13544 && real_zerop (arg1)))
13545 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13547 if (strict_overflow_p)
13548 fold_overflow_warning (("assuming signed overflow does not occur "
13549 "when simplifying comparison of "
13550 "absolute value and zero"),
13551 WARN_STRICT_OVERFLOW_CONDITIONAL);
13552 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13555 /* Convert ABS_EXPR<x> < 0 to false. */
13556 strict_overflow_p = false;
13557 if (code == LT_EXPR
13558 && (integer_zerop (arg1) || real_zerop (arg1))
13559 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13561 if (strict_overflow_p)
13562 fold_overflow_warning (("assuming signed overflow does not occur "
13563 "when simplifying comparison of "
13564 "absolute value and zero"),
13565 WARN_STRICT_OVERFLOW_CONDITIONAL);
13566 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13569 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13570 and similarly for >= into !=. */
13571 if ((code == LT_EXPR || code == GE_EXPR)
13572 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13573 && TREE_CODE (arg1) == LSHIFT_EXPR
13574 && integer_onep (TREE_OPERAND (arg1, 0)))
13576 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13577 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13578 TREE_OPERAND (arg1, 1)),
13579 build_int_cst (TREE_TYPE (arg0), 0));
13580 goto fold_binary_exit;
13583 if ((code == LT_EXPR || code == GE_EXPR)
13584 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13585 && CONVERT_EXPR_P (arg1)
13586 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13587 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13589 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13590 fold_convert_loc (loc, TREE_TYPE (arg0),
13591 build2 (RSHIFT_EXPR,
13592 TREE_TYPE (arg0), arg0,
13593 TREE_OPERAND (TREE_OPERAND (arg1, 0),
13594 1))),
13595 build_int_cst (TREE_TYPE (arg0), 0));
13596 goto fold_binary_exit;
13599 return NULL_TREE;
13601 case UNORDERED_EXPR:
13602 case ORDERED_EXPR:
13603 case UNLT_EXPR:
13604 case UNLE_EXPR:
13605 case UNGT_EXPR:
13606 case UNGE_EXPR:
13607 case UNEQ_EXPR:
13608 case LTGT_EXPR:
13609 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13611 t1 = fold_relational_const (code, type, arg0, arg1);
13612 if (t1 != NULL_TREE)
13613 return t1;
13616 /* If the first operand is NaN, the result is constant. */
13617 if (TREE_CODE (arg0) == REAL_CST
13618 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13619 && (code != LTGT_EXPR || ! flag_trapping_math))
13621 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13622 ? integer_zero_node
13623 : integer_one_node;
13624 return omit_one_operand_loc (loc, type, t1, arg1);
13627 /* If the second operand is NaN, the result is constant. */
13628 if (TREE_CODE (arg1) == REAL_CST
13629 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13630 && (code != LTGT_EXPR || ! flag_trapping_math))
13632 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13633 ? integer_zero_node
13634 : integer_one_node;
13635 return omit_one_operand_loc (loc, type, t1, arg0);
13638 /* Simplify unordered comparison of something with itself. */
13639 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13640 && operand_equal_p (arg0, arg1, 0))
13641 return constant_boolean_node (1, type);
13643 if (code == LTGT_EXPR
13644 && !flag_trapping_math
13645 && operand_equal_p (arg0, arg1, 0))
13646 return constant_boolean_node (0, type);
13648 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13650 tree targ0 = strip_float_extensions (arg0);
13651 tree targ1 = strip_float_extensions (arg1);
13652 tree newtype = TREE_TYPE (targ0);
13654 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13655 newtype = TREE_TYPE (targ1);
13657 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13658 return fold_build2_loc (loc, code, type,
13659 fold_convert_loc (loc, newtype, targ0),
13660 fold_convert_loc (loc, newtype, targ1));
13663 return NULL_TREE;
13665 case COMPOUND_EXPR:
13666 /* When pedantic, a compound expression can be neither an lvalue
13667 nor an integer constant expression. */
13668 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13669 return NULL_TREE;
13670 /* Don't let (0, 0) be null pointer constant. */
13671 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13672 : fold_convert_loc (loc, type, arg1);
13673 return pedantic_non_lvalue_loc (loc, tem);
13675 case COMPLEX_EXPR:
13676 if ((TREE_CODE (arg0) == REAL_CST
13677 && TREE_CODE (arg1) == REAL_CST)
13678 || (TREE_CODE (arg0) == INTEGER_CST
13679 && TREE_CODE (arg1) == INTEGER_CST))
13680 return build_complex (type, arg0, arg1);
13681 return NULL_TREE;
13683 case ASSERT_EXPR:
13684 /* An ASSERT_EXPR should never be passed to fold_binary. */
13685 gcc_unreachable ();
13687 default:
13688 return NULL_TREE;
13689 } /* switch (code) */
13690 fold_binary_exit:
13691 protected_set_expr_location (tem, loc);
13692 return tem;
13695 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13696 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13697 of GOTO_EXPR. */
13699 static tree
13700 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13702 switch (TREE_CODE (*tp))
13704 case LABEL_EXPR:
13705 return *tp;
13707 case GOTO_EXPR:
13708 *walk_subtrees = 0;
13710 /* ... fall through ... */
13712 default:
13713 return NULL_TREE;
13717 /* Return whether the sub-tree ST contains a label which is accessible from
13718 outside the sub-tree. */
13720 static bool
13721 contains_label_p (tree st)
13723 return
13724 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13727 /* Fold a ternary expression of code CODE and type TYPE with operands
13728 OP0, OP1, and OP2. Return the folded expression if folding is
13729 successful. Otherwise, return NULL_TREE. */
13731 tree
13732 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13733 tree op0, tree op1, tree op2)
13735 tree tem;
13736 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13737 enum tree_code_class kind = TREE_CODE_CLASS (code);
13739 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13740 && TREE_CODE_LENGTH (code) == 3);
13742 /* Strip any conversions that don't change the mode. This is safe
13743 for every expression, except for a comparison expression because
13744 its signedness is derived from its operands. So, in the latter
13745 case, only strip conversions that don't change the signedness.
13747 Note that this is done as an internal manipulation within the
13748 constant folder, in order to find the simplest representation of
13749 the arguments so that their form can be studied. In any cases,
13750 the appropriate type conversions should be put back in the tree
13751 that will get out of the constant folder. */
13752 if (op0)
13754 arg0 = op0;
13755 STRIP_NOPS (arg0);
13758 if (op1)
13760 arg1 = op1;
13761 STRIP_NOPS (arg1);
13764 switch (code)
13766 case COMPONENT_REF:
13767 if (TREE_CODE (arg0) == CONSTRUCTOR
13768 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13770 unsigned HOST_WIDE_INT idx;
13771 tree field, value;
13772 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13773 if (field == arg1)
13774 return value;
13776 return NULL_TREE;
13778 case COND_EXPR:
13779 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13780 so all simple results must be passed through pedantic_non_lvalue. */
13781 if (TREE_CODE (arg0) == INTEGER_CST)
13783 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13784 tem = integer_zerop (arg0) ? op2 : op1;
13785 /* Only optimize constant conditions when the selected branch
13786 has the same type as the COND_EXPR. This avoids optimizing
13787 away "c ? x : throw", where the throw has a void type.
13788 Avoid throwing away that operand which contains label. */
13789 if ((!TREE_SIDE_EFFECTS (unused_op)
13790 || !contains_label_p (unused_op))
13791 && (! VOID_TYPE_P (TREE_TYPE (tem))
13792 || VOID_TYPE_P (type)))
13793 return pedantic_non_lvalue_loc (loc, tem);
13794 return NULL_TREE;
13796 if (operand_equal_p (arg1, op2, 0))
13797 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13799 /* If we have A op B ? A : C, we may be able to convert this to a
13800 simpler expression, depending on the operation and the values
13801 of B and C. Signed zeros prevent all of these transformations,
13802 for reasons given above each one.
13804 Also try swapping the arguments and inverting the conditional. */
13805 if (COMPARISON_CLASS_P (arg0)
13806 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13807 arg1, TREE_OPERAND (arg0, 1))
13808 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13810 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13811 if (tem)
13812 return tem;
13815 if (COMPARISON_CLASS_P (arg0)
13816 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13817 op2,
13818 TREE_OPERAND (arg0, 1))
13819 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13821 tem = fold_truth_not_expr (loc, arg0);
13822 if (tem && COMPARISON_CLASS_P (tem))
13824 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13825 if (tem)
13826 return tem;
13830 /* If the second operand is simpler than the third, swap them
13831 since that produces better jump optimization results. */
13832 if (truth_value_p (TREE_CODE (arg0))
13833 && tree_swap_operands_p (op1, op2, false))
13835 /* See if this can be inverted. If it can't, possibly because
13836 it was a floating-point inequality comparison, don't do
13837 anything. */
13838 tem = fold_truth_not_expr (loc, arg0);
13839 if (tem)
13840 return fold_build3_loc (loc, code, type, tem, op2, op1);
13843 /* Convert A ? 1 : 0 to simply A. */
13844 if (integer_onep (op1)
13845 && integer_zerop (op2)
13846 /* If we try to convert OP0 to our type, the
13847 call to fold will try to move the conversion inside
13848 a COND, which will recurse. In that case, the COND_EXPR
13849 is probably the best choice, so leave it alone. */
13850 && type == TREE_TYPE (arg0))
13851 return pedantic_non_lvalue_loc (loc, arg0);
13853 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13854 over COND_EXPR in cases such as floating point comparisons. */
13855 if (integer_zerop (op1)
13856 && integer_onep (op2)
13857 && truth_value_p (TREE_CODE (arg0)))
13858 return pedantic_non_lvalue_loc (loc,
13859 fold_convert_loc (loc, type,
13860 invert_truthvalue_loc (loc,
13861 arg0)));
13863 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13864 if (TREE_CODE (arg0) == LT_EXPR
13865 && integer_zerop (TREE_OPERAND (arg0, 1))
13866 && integer_zerop (op2)
13867 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13869 /* sign_bit_p only checks ARG1 bits within A's precision.
13870 If <sign bit of A> has wider type than A, bits outside
13871 of A's precision in <sign bit of A> need to be checked.
13872 If they are all 0, this optimization needs to be done
13873 in unsigned A's type, if they are all 1 in signed A's type,
13874 otherwise this can't be done. */
13875 if (TYPE_PRECISION (TREE_TYPE (tem))
13876 < TYPE_PRECISION (TREE_TYPE (arg1))
13877 && TYPE_PRECISION (TREE_TYPE (tem))
13878 < TYPE_PRECISION (type))
13880 unsigned HOST_WIDE_INT mask_lo;
13881 HOST_WIDE_INT mask_hi;
13882 int inner_width, outer_width;
13883 tree tem_type;
13885 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13886 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13887 if (outer_width > TYPE_PRECISION (type))
13888 outer_width = TYPE_PRECISION (type);
13890 if (outer_width > HOST_BITS_PER_WIDE_INT)
13892 mask_hi = ((unsigned HOST_WIDE_INT) -1
13893 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13894 mask_lo = -1;
13896 else
13898 mask_hi = 0;
13899 mask_lo = ((unsigned HOST_WIDE_INT) -1
13900 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13902 if (inner_width > HOST_BITS_PER_WIDE_INT)
13904 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13905 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13906 mask_lo = 0;
13908 else
13909 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13910 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13912 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13913 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13915 tem_type = signed_type_for (TREE_TYPE (tem));
13916 tem = fold_convert_loc (loc, tem_type, tem);
13918 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13919 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13921 tem_type = unsigned_type_for (TREE_TYPE (tem));
13922 tem = fold_convert_loc (loc, tem_type, tem);
13924 else
13925 tem = NULL;
13928 if (tem)
13929 return
13930 fold_convert_loc (loc, type,
13931 fold_build2_loc (loc, BIT_AND_EXPR,
13932 TREE_TYPE (tem), tem,
13933 fold_convert_loc (loc,
13934 TREE_TYPE (tem),
13935 arg1)));
13938 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13939 already handled above. */
13940 if (TREE_CODE (arg0) == BIT_AND_EXPR
13941 && integer_onep (TREE_OPERAND (arg0, 1))
13942 && integer_zerop (op2)
13943 && integer_pow2p (arg1))
13945 tree tem = TREE_OPERAND (arg0, 0);
13946 STRIP_NOPS (tem);
13947 if (TREE_CODE (tem) == RSHIFT_EXPR
13948 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13949 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13950 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13951 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13952 TREE_OPERAND (tem, 0), arg1);
13955 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13956 is probably obsolete because the first operand should be a
13957 truth value (that's why we have the two cases above), but let's
13958 leave it in until we can confirm this for all front-ends. */
13959 if (integer_zerop (op2)
13960 && TREE_CODE (arg0) == NE_EXPR
13961 && integer_zerop (TREE_OPERAND (arg0, 1))
13962 && integer_pow2p (arg1)
13963 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13964 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13965 arg1, OEP_ONLY_CONST))
13966 return pedantic_non_lvalue_loc (loc,
13967 fold_convert_loc (loc, type,
13968 TREE_OPERAND (arg0, 0)));
13970 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13971 if (integer_zerop (op2)
13972 && truth_value_p (TREE_CODE (arg0))
13973 && truth_value_p (TREE_CODE (arg1)))
13974 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13975 fold_convert_loc (loc, type, arg0),
13976 arg1);
13978 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13979 if (integer_onep (op2)
13980 && truth_value_p (TREE_CODE (arg0))
13981 && truth_value_p (TREE_CODE (arg1)))
13983 /* Only perform transformation if ARG0 is easily inverted. */
13984 tem = fold_truth_not_expr (loc, arg0);
13985 if (tem)
13986 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13987 fold_convert_loc (loc, type, tem),
13988 arg1);
13991 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13992 if (integer_zerop (arg1)
13993 && truth_value_p (TREE_CODE (arg0))
13994 && truth_value_p (TREE_CODE (op2)))
13996 /* Only perform transformation if ARG0 is easily inverted. */
13997 tem = fold_truth_not_expr (loc, arg0);
13998 if (tem)
13999 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14000 fold_convert_loc (loc, type, tem),
14001 op2);
14004 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14005 if (integer_onep (arg1)
14006 && truth_value_p (TREE_CODE (arg0))
14007 && truth_value_p (TREE_CODE (op2)))
14008 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14009 fold_convert_loc (loc, type, arg0),
14010 op2);
14012 return NULL_TREE;
14014 case CALL_EXPR:
14015 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14016 of fold_ternary on them. */
14017 gcc_unreachable ();
14019 case BIT_FIELD_REF:
14020 if ((TREE_CODE (arg0) == VECTOR_CST
14021 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
14022 && type == TREE_TYPE (TREE_TYPE (arg0)))
14024 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
14025 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14027 if (width != 0
14028 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
14029 && (idx % width) == 0
14030 && (idx = idx / width)
14031 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14033 tree elements = NULL_TREE;
14035 if (TREE_CODE (arg0) == VECTOR_CST)
14036 elements = TREE_VECTOR_CST_ELTS (arg0);
14037 else
14039 unsigned HOST_WIDE_INT idx;
14040 tree value;
14042 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
14043 elements = tree_cons (NULL_TREE, value, elements);
14045 while (idx-- > 0 && elements)
14046 elements = TREE_CHAIN (elements);
14047 if (elements)
14048 return TREE_VALUE (elements);
14049 else
14050 return fold_convert_loc (loc, type, integer_zero_node);
14054 /* A bit-field-ref that referenced the full argument can be stripped. */
14055 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14056 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14057 && integer_zerop (op2))
14058 return fold_convert_loc (loc, type, arg0);
14060 return NULL_TREE;
14062 default:
14063 return NULL_TREE;
14064 } /* switch (code) */
14067 /* Perform constant folding and related simplification of EXPR.
14068 The related simplifications include x*1 => x, x*0 => 0, etc.,
14069 and application of the associative law.
14070 NOP_EXPR conversions may be removed freely (as long as we
14071 are careful not to change the type of the overall expression).
14072 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14073 but we can constant-fold them if they have constant operands. */
14075 #ifdef ENABLE_FOLD_CHECKING
14076 # define fold(x) fold_1 (x)
14077 static tree fold_1 (tree);
14078 static
14079 #endif
14080 tree
14081 fold (tree expr)
14083 const tree t = expr;
14084 enum tree_code code = TREE_CODE (t);
14085 enum tree_code_class kind = TREE_CODE_CLASS (code);
14086 tree tem;
14087 location_t loc = EXPR_LOCATION (expr);
14089 /* Return right away if a constant. */
14090 if (kind == tcc_constant)
14091 return t;
14093 /* CALL_EXPR-like objects with variable numbers of operands are
14094 treated specially. */
14095 if (kind == tcc_vl_exp)
14097 if (code == CALL_EXPR)
14099 tem = fold_call_expr (loc, expr, false);
14100 return tem ? tem : expr;
14102 return expr;
14105 if (IS_EXPR_CODE_CLASS (kind))
14107 tree type = TREE_TYPE (t);
14108 tree op0, op1, op2;
14110 switch (TREE_CODE_LENGTH (code))
14112 case 1:
14113 op0 = TREE_OPERAND (t, 0);
14114 tem = fold_unary_loc (loc, code, type, op0);
14115 return tem ? tem : expr;
14116 case 2:
14117 op0 = TREE_OPERAND (t, 0);
14118 op1 = TREE_OPERAND (t, 1);
14119 tem = fold_binary_loc (loc, code, type, op0, op1);
14120 return tem ? tem : expr;
14121 case 3:
14122 op0 = TREE_OPERAND (t, 0);
14123 op1 = TREE_OPERAND (t, 1);
14124 op2 = TREE_OPERAND (t, 2);
14125 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14126 return tem ? tem : expr;
14127 default:
14128 break;
14132 switch (code)
14134 case ARRAY_REF:
14136 tree op0 = TREE_OPERAND (t, 0);
14137 tree op1 = TREE_OPERAND (t, 1);
14139 if (TREE_CODE (op1) == INTEGER_CST
14140 && TREE_CODE (op0) == CONSTRUCTOR
14141 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14143 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
14144 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
14145 unsigned HOST_WIDE_INT begin = 0;
14147 /* Find a matching index by means of a binary search. */
14148 while (begin != end)
14150 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14151 tree index = VEC_index (constructor_elt, elts, middle)->index;
14153 if (TREE_CODE (index) == INTEGER_CST
14154 && tree_int_cst_lt (index, op1))
14155 begin = middle + 1;
14156 else if (TREE_CODE (index) == INTEGER_CST
14157 && tree_int_cst_lt (op1, index))
14158 end = middle;
14159 else if (TREE_CODE (index) == RANGE_EXPR
14160 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14161 begin = middle + 1;
14162 else if (TREE_CODE (index) == RANGE_EXPR
14163 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14164 end = middle;
14165 else
14166 return VEC_index (constructor_elt, elts, middle)->value;
14170 return t;
14173 case CONST_DECL:
14174 return fold (DECL_INITIAL (t));
14176 default:
14177 return t;
14178 } /* switch (code) */
14181 #ifdef ENABLE_FOLD_CHECKING
14182 #undef fold
14184 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
14185 static void fold_check_failed (const_tree, const_tree);
14186 void print_fold_checksum (const_tree);
14188 /* When --enable-checking=fold, compute a digest of expr before
14189 and after actual fold call to see if fold did not accidentally
14190 change original expr. */
14192 tree
14193 fold (tree expr)
14195 tree ret;
14196 struct md5_ctx ctx;
14197 unsigned char checksum_before[16], checksum_after[16];
14198 htab_t ht;
14200 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14201 md5_init_ctx (&ctx);
14202 fold_checksum_tree (expr, &ctx, ht);
14203 md5_finish_ctx (&ctx, checksum_before);
14204 htab_empty (ht);
14206 ret = fold_1 (expr);
14208 md5_init_ctx (&ctx);
14209 fold_checksum_tree (expr, &ctx, ht);
14210 md5_finish_ctx (&ctx, checksum_after);
14211 htab_delete (ht);
14213 if (memcmp (checksum_before, checksum_after, 16))
14214 fold_check_failed (expr, ret);
14216 return ret;
14219 void
14220 print_fold_checksum (const_tree expr)
14222 struct md5_ctx ctx;
14223 unsigned char checksum[16], cnt;
14224 htab_t ht;
14226 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14227 md5_init_ctx (&ctx);
14228 fold_checksum_tree (expr, &ctx, ht);
14229 md5_finish_ctx (&ctx, checksum);
14230 htab_delete (ht);
14231 for (cnt = 0; cnt < 16; ++cnt)
14232 fprintf (stderr, "%02x", checksum[cnt]);
14233 putc ('\n', stderr);
14236 static void
14237 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14239 internal_error ("fold check: original tree changed by fold");
14242 static void
14243 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
14245 const void **slot;
14246 enum tree_code code;
14247 union tree_node buf;
14248 int i, len;
14250 recursive_label:
14252 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
14253 <= sizeof (struct tree_function_decl))
14254 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
14255 if (expr == NULL)
14256 return;
14257 slot = (const void **) htab_find_slot (ht, expr, INSERT);
14258 if (*slot != NULL)
14259 return;
14260 *slot = expr;
14261 code = TREE_CODE (expr);
14262 if (TREE_CODE_CLASS (code) == tcc_declaration
14263 && DECL_ASSEMBLER_NAME_SET_P (expr))
14265 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14266 memcpy ((char *) &buf, expr, tree_size (expr));
14267 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14268 expr = (tree) &buf;
14270 else if (TREE_CODE_CLASS (code) == tcc_type
14271 && (TYPE_POINTER_TO (expr)
14272 || TYPE_REFERENCE_TO (expr)
14273 || TYPE_CACHED_VALUES_P (expr)
14274 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14275 || TYPE_NEXT_VARIANT (expr)))
14277 /* Allow these fields to be modified. */
14278 tree tmp;
14279 memcpy ((char *) &buf, expr, tree_size (expr));
14280 expr = tmp = (tree) &buf;
14281 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14282 TYPE_POINTER_TO (tmp) = NULL;
14283 TYPE_REFERENCE_TO (tmp) = NULL;
14284 TYPE_NEXT_VARIANT (tmp) = NULL;
14285 if (TYPE_CACHED_VALUES_P (tmp))
14287 TYPE_CACHED_VALUES_P (tmp) = 0;
14288 TYPE_CACHED_VALUES (tmp) = NULL;
14291 md5_process_bytes (expr, tree_size (expr), ctx);
14292 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14293 if (TREE_CODE_CLASS (code) != tcc_type
14294 && TREE_CODE_CLASS (code) != tcc_declaration
14295 && code != TREE_LIST
14296 && code != SSA_NAME)
14297 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14298 switch (TREE_CODE_CLASS (code))
14300 case tcc_constant:
14301 switch (code)
14303 case STRING_CST:
14304 md5_process_bytes (TREE_STRING_POINTER (expr),
14305 TREE_STRING_LENGTH (expr), ctx);
14306 break;
14307 case COMPLEX_CST:
14308 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14309 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14310 break;
14311 case VECTOR_CST:
14312 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
14313 break;
14314 default:
14315 break;
14317 break;
14318 case tcc_exceptional:
14319 switch (code)
14321 case TREE_LIST:
14322 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14323 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14324 expr = TREE_CHAIN (expr);
14325 goto recursive_label;
14326 break;
14327 case TREE_VEC:
14328 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14329 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14330 break;
14331 default:
14332 break;
14334 break;
14335 case tcc_expression:
14336 case tcc_reference:
14337 case tcc_comparison:
14338 case tcc_unary:
14339 case tcc_binary:
14340 case tcc_statement:
14341 case tcc_vl_exp:
14342 len = TREE_OPERAND_LENGTH (expr);
14343 for (i = 0; i < len; ++i)
14344 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14345 break;
14346 case tcc_declaration:
14347 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14348 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14349 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14351 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14352 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14353 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14354 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14355 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14357 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14358 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14360 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14362 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14363 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14364 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14366 break;
14367 case tcc_type:
14368 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14369 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14370 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14371 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14372 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14373 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14374 if (INTEGRAL_TYPE_P (expr)
14375 || SCALAR_FLOAT_TYPE_P (expr))
14377 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14378 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14380 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14381 if (TREE_CODE (expr) == RECORD_TYPE
14382 || TREE_CODE (expr) == UNION_TYPE
14383 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14384 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14385 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14386 break;
14387 default:
14388 break;
14392 /* Helper function for outputting the checksum of a tree T. When
14393 debugging with gdb, you can "define mynext" to be "next" followed
14394 by "call debug_fold_checksum (op0)", then just trace down till the
14395 outputs differ. */
14397 void
14398 debug_fold_checksum (const_tree t)
14400 int i;
14401 unsigned char checksum[16];
14402 struct md5_ctx ctx;
14403 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14405 md5_init_ctx (&ctx);
14406 fold_checksum_tree (t, &ctx, ht);
14407 md5_finish_ctx (&ctx, checksum);
14408 htab_empty (ht);
14410 for (i = 0; i < 16; i++)
14411 fprintf (stderr, "%d ", checksum[i]);
14413 fprintf (stderr, "\n");
14416 #endif
14418 /* Fold a unary tree expression with code CODE of type TYPE with an
14419 operand OP0. LOC is the location of the resulting expression.
14420 Return a folded expression if successful. Otherwise, return a tree
14421 expression with code CODE of type TYPE with an operand OP0. */
14423 tree
14424 fold_build1_stat_loc (location_t loc,
14425 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14427 tree tem;
14428 #ifdef ENABLE_FOLD_CHECKING
14429 unsigned char checksum_before[16], checksum_after[16];
14430 struct md5_ctx ctx;
14431 htab_t ht;
14433 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14434 md5_init_ctx (&ctx);
14435 fold_checksum_tree (op0, &ctx, ht);
14436 md5_finish_ctx (&ctx, checksum_before);
14437 htab_empty (ht);
14438 #endif
14440 tem = fold_unary_loc (loc, code, type, op0);
14441 if (!tem)
14443 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
14444 SET_EXPR_LOCATION (tem, loc);
14447 #ifdef ENABLE_FOLD_CHECKING
14448 md5_init_ctx (&ctx);
14449 fold_checksum_tree (op0, &ctx, ht);
14450 md5_finish_ctx (&ctx, checksum_after);
14451 htab_delete (ht);
14453 if (memcmp (checksum_before, checksum_after, 16))
14454 fold_check_failed (op0, tem);
14455 #endif
14456 return tem;
14459 /* Fold a binary tree expression with code CODE of type TYPE with
14460 operands OP0 and OP1. LOC is the location of the resulting
14461 expression. Return a folded expression if successful. Otherwise,
14462 return a tree expression with code CODE of type TYPE with operands
14463 OP0 and OP1. */
14465 tree
14466 fold_build2_stat_loc (location_t loc,
14467 enum tree_code code, tree type, tree op0, tree op1
14468 MEM_STAT_DECL)
14470 tree tem;
14471 #ifdef ENABLE_FOLD_CHECKING
14472 unsigned char checksum_before_op0[16],
14473 checksum_before_op1[16],
14474 checksum_after_op0[16],
14475 checksum_after_op1[16];
14476 struct md5_ctx ctx;
14477 htab_t ht;
14479 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14480 md5_init_ctx (&ctx);
14481 fold_checksum_tree (op0, &ctx, ht);
14482 md5_finish_ctx (&ctx, checksum_before_op0);
14483 htab_empty (ht);
14485 md5_init_ctx (&ctx);
14486 fold_checksum_tree (op1, &ctx, ht);
14487 md5_finish_ctx (&ctx, checksum_before_op1);
14488 htab_empty (ht);
14489 #endif
14491 tem = fold_binary_loc (loc, code, type, op0, op1);
14492 if (!tem)
14494 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
14495 SET_EXPR_LOCATION (tem, loc);
14498 #ifdef ENABLE_FOLD_CHECKING
14499 md5_init_ctx (&ctx);
14500 fold_checksum_tree (op0, &ctx, ht);
14501 md5_finish_ctx (&ctx, checksum_after_op0);
14502 htab_empty (ht);
14504 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14505 fold_check_failed (op0, tem);
14507 md5_init_ctx (&ctx);
14508 fold_checksum_tree (op1, &ctx, ht);
14509 md5_finish_ctx (&ctx, checksum_after_op1);
14510 htab_delete (ht);
14512 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14513 fold_check_failed (op1, tem);
14514 #endif
14515 return tem;
14518 /* Fold a ternary tree expression with code CODE of type TYPE with
14519 operands OP0, OP1, and OP2. Return a folded expression if
14520 successful. Otherwise, return a tree expression with code CODE of
14521 type TYPE with operands OP0, OP1, and OP2. */
14523 tree
14524 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14525 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14527 tree tem;
14528 #ifdef ENABLE_FOLD_CHECKING
14529 unsigned char checksum_before_op0[16],
14530 checksum_before_op1[16],
14531 checksum_before_op2[16],
14532 checksum_after_op0[16],
14533 checksum_after_op1[16],
14534 checksum_after_op2[16];
14535 struct md5_ctx ctx;
14536 htab_t ht;
14538 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14539 md5_init_ctx (&ctx);
14540 fold_checksum_tree (op0, &ctx, ht);
14541 md5_finish_ctx (&ctx, checksum_before_op0);
14542 htab_empty (ht);
14544 md5_init_ctx (&ctx);
14545 fold_checksum_tree (op1, &ctx, ht);
14546 md5_finish_ctx (&ctx, checksum_before_op1);
14547 htab_empty (ht);
14549 md5_init_ctx (&ctx);
14550 fold_checksum_tree (op2, &ctx, ht);
14551 md5_finish_ctx (&ctx, checksum_before_op2);
14552 htab_empty (ht);
14553 #endif
14555 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14556 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14557 if (!tem)
14559 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14560 SET_EXPR_LOCATION (tem, loc);
14563 #ifdef ENABLE_FOLD_CHECKING
14564 md5_init_ctx (&ctx);
14565 fold_checksum_tree (op0, &ctx, ht);
14566 md5_finish_ctx (&ctx, checksum_after_op0);
14567 htab_empty (ht);
14569 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14570 fold_check_failed (op0, tem);
14572 md5_init_ctx (&ctx);
14573 fold_checksum_tree (op1, &ctx, ht);
14574 md5_finish_ctx (&ctx, checksum_after_op1);
14575 htab_empty (ht);
14577 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14578 fold_check_failed (op1, tem);
14580 md5_init_ctx (&ctx);
14581 fold_checksum_tree (op2, &ctx, ht);
14582 md5_finish_ctx (&ctx, checksum_after_op2);
14583 htab_delete (ht);
14585 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14586 fold_check_failed (op2, tem);
14587 #endif
14588 return tem;
14591 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14592 arguments in ARGARRAY, and a null static chain.
14593 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14594 of type TYPE from the given operands as constructed by build_call_array. */
14596 tree
14597 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14598 int nargs, tree *argarray)
14600 tree tem;
14601 #ifdef ENABLE_FOLD_CHECKING
14602 unsigned char checksum_before_fn[16],
14603 checksum_before_arglist[16],
14604 checksum_after_fn[16],
14605 checksum_after_arglist[16];
14606 struct md5_ctx ctx;
14607 htab_t ht;
14608 int i;
14610 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14611 md5_init_ctx (&ctx);
14612 fold_checksum_tree (fn, &ctx, ht);
14613 md5_finish_ctx (&ctx, checksum_before_fn);
14614 htab_empty (ht);
14616 md5_init_ctx (&ctx);
14617 for (i = 0; i < nargs; i++)
14618 fold_checksum_tree (argarray[i], &ctx, ht);
14619 md5_finish_ctx (&ctx, checksum_before_arglist);
14620 htab_empty (ht);
14621 #endif
14623 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14625 #ifdef ENABLE_FOLD_CHECKING
14626 md5_init_ctx (&ctx);
14627 fold_checksum_tree (fn, &ctx, ht);
14628 md5_finish_ctx (&ctx, checksum_after_fn);
14629 htab_empty (ht);
14631 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14632 fold_check_failed (fn, tem);
14634 md5_init_ctx (&ctx);
14635 for (i = 0; i < nargs; i++)
14636 fold_checksum_tree (argarray[i], &ctx, ht);
14637 md5_finish_ctx (&ctx, checksum_after_arglist);
14638 htab_delete (ht);
14640 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14641 fold_check_failed (NULL_TREE, tem);
14642 #endif
14643 return tem;
14646 /* Perform constant folding and related simplification of initializer
14647 expression EXPR. These behave identically to "fold_buildN" but ignore
14648 potential run-time traps and exceptions that fold must preserve. */
14650 #define START_FOLD_INIT \
14651 int saved_signaling_nans = flag_signaling_nans;\
14652 int saved_trapping_math = flag_trapping_math;\
14653 int saved_rounding_math = flag_rounding_math;\
14654 int saved_trapv = flag_trapv;\
14655 int saved_folding_initializer = folding_initializer;\
14656 flag_signaling_nans = 0;\
14657 flag_trapping_math = 0;\
14658 flag_rounding_math = 0;\
14659 flag_trapv = 0;\
14660 folding_initializer = 1;
14662 #define END_FOLD_INIT \
14663 flag_signaling_nans = saved_signaling_nans;\
14664 flag_trapping_math = saved_trapping_math;\
14665 flag_rounding_math = saved_rounding_math;\
14666 flag_trapv = saved_trapv;\
14667 folding_initializer = saved_folding_initializer;
14669 tree
14670 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14671 tree type, tree op)
14673 tree result;
14674 START_FOLD_INIT;
14676 result = fold_build1_loc (loc, code, type, op);
14678 END_FOLD_INIT;
14679 return result;
14682 tree
14683 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14684 tree type, tree op0, tree op1)
14686 tree result;
14687 START_FOLD_INIT;
14689 result = fold_build2_loc (loc, code, type, op0, op1);
14691 END_FOLD_INIT;
14692 return result;
14695 tree
14696 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14697 tree type, tree op0, tree op1, tree op2)
14699 tree result;
14700 START_FOLD_INIT;
14702 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14704 END_FOLD_INIT;
14705 return result;
14708 tree
14709 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14710 int nargs, tree *argarray)
14712 tree result;
14713 START_FOLD_INIT;
14715 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14717 END_FOLD_INIT;
14718 return result;
14721 #undef START_FOLD_INIT
14722 #undef END_FOLD_INIT
14724 /* Determine if first argument is a multiple of second argument. Return 0 if
14725 it is not, or we cannot easily determined it to be.
14727 An example of the sort of thing we care about (at this point; this routine
14728 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14729 fold cases do now) is discovering that
14731 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14733 is a multiple of
14735 SAVE_EXPR (J * 8)
14737 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14739 This code also handles discovering that
14741 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14743 is a multiple of 8 so we don't have to worry about dealing with a
14744 possible remainder.
14746 Note that we *look* inside a SAVE_EXPR only to determine how it was
14747 calculated; it is not safe for fold to do much of anything else with the
14748 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14749 at run time. For example, the latter example above *cannot* be implemented
14750 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14751 evaluation time of the original SAVE_EXPR is not necessarily the same at
14752 the time the new expression is evaluated. The only optimization of this
14753 sort that would be valid is changing
14755 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14757 divided by 8 to
14759 SAVE_EXPR (I) * SAVE_EXPR (J)
14761 (where the same SAVE_EXPR (J) is used in the original and the
14762 transformed version). */
14765 multiple_of_p (tree type, const_tree top, const_tree bottom)
14767 if (operand_equal_p (top, bottom, 0))
14768 return 1;
14770 if (TREE_CODE (type) != INTEGER_TYPE)
14771 return 0;
14773 switch (TREE_CODE (top))
14775 case BIT_AND_EXPR:
14776 /* Bitwise and provides a power of two multiple. If the mask is
14777 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14778 if (!integer_pow2p (bottom))
14779 return 0;
14780 /* FALLTHRU */
14782 case MULT_EXPR:
14783 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14784 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14786 case PLUS_EXPR:
14787 case MINUS_EXPR:
14788 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14789 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14791 case LSHIFT_EXPR:
14792 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14794 tree op1, t1;
14796 op1 = TREE_OPERAND (top, 1);
14797 /* const_binop may not detect overflow correctly,
14798 so check for it explicitly here. */
14799 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14800 > TREE_INT_CST_LOW (op1)
14801 && TREE_INT_CST_HIGH (op1) == 0
14802 && 0 != (t1 = fold_convert (type,
14803 const_binop (LSHIFT_EXPR,
14804 size_one_node,
14805 op1, 0)))
14806 && !TREE_OVERFLOW (t1))
14807 return multiple_of_p (type, t1, bottom);
14809 return 0;
14811 case NOP_EXPR:
14812 /* Can't handle conversions from non-integral or wider integral type. */
14813 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14814 || (TYPE_PRECISION (type)
14815 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14816 return 0;
14818 /* .. fall through ... */
14820 case SAVE_EXPR:
14821 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14823 case INTEGER_CST:
14824 if (TREE_CODE (bottom) != INTEGER_CST
14825 || integer_zerop (bottom)
14826 || (TYPE_UNSIGNED (type)
14827 && (tree_int_cst_sgn (top) < 0
14828 || tree_int_cst_sgn (bottom) < 0)))
14829 return 0;
14830 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14831 top, bottom, 0));
14833 default:
14834 return 0;
14838 /* Return true if CODE or TYPE is known to be non-negative. */
14840 static bool
14841 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14843 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14844 && truth_value_p (code))
14845 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14846 have a signed:1 type (where the value is -1 and 0). */
14847 return true;
14848 return false;
14851 /* Return true if (CODE OP0) is known to be non-negative. If the return
14852 value is based on the assumption that signed overflow is undefined,
14853 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14854 *STRICT_OVERFLOW_P. */
14856 bool
14857 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14858 bool *strict_overflow_p)
14860 if (TYPE_UNSIGNED (type))
14861 return true;
14863 switch (code)
14865 case ABS_EXPR:
14866 /* We can't return 1 if flag_wrapv is set because
14867 ABS_EXPR<INT_MIN> = INT_MIN. */
14868 if (!INTEGRAL_TYPE_P (type))
14869 return true;
14870 if (TYPE_OVERFLOW_UNDEFINED (type))
14872 *strict_overflow_p = true;
14873 return true;
14875 break;
14877 case NON_LVALUE_EXPR:
14878 case FLOAT_EXPR:
14879 case FIX_TRUNC_EXPR:
14880 return tree_expr_nonnegative_warnv_p (op0,
14881 strict_overflow_p);
14883 case NOP_EXPR:
14885 tree inner_type = TREE_TYPE (op0);
14886 tree outer_type = type;
14888 if (TREE_CODE (outer_type) == REAL_TYPE)
14890 if (TREE_CODE (inner_type) == REAL_TYPE)
14891 return tree_expr_nonnegative_warnv_p (op0,
14892 strict_overflow_p);
14893 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14895 if (TYPE_UNSIGNED (inner_type))
14896 return true;
14897 return tree_expr_nonnegative_warnv_p (op0,
14898 strict_overflow_p);
14901 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14903 if (TREE_CODE (inner_type) == REAL_TYPE)
14904 return tree_expr_nonnegative_warnv_p (op0,
14905 strict_overflow_p);
14906 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14907 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14908 && TYPE_UNSIGNED (inner_type);
14911 break;
14913 default:
14914 return tree_simple_nonnegative_warnv_p (code, type);
14917 /* We don't know sign of `t', so be conservative and return false. */
14918 return false;
14921 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14922 value is based on the assumption that signed overflow is undefined,
14923 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14924 *STRICT_OVERFLOW_P. */
14926 bool
14927 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14928 tree op1, bool *strict_overflow_p)
14930 if (TYPE_UNSIGNED (type))
14931 return true;
14933 switch (code)
14935 case POINTER_PLUS_EXPR:
14936 case PLUS_EXPR:
14937 if (FLOAT_TYPE_P (type))
14938 return (tree_expr_nonnegative_warnv_p (op0,
14939 strict_overflow_p)
14940 && tree_expr_nonnegative_warnv_p (op1,
14941 strict_overflow_p));
14943 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14944 both unsigned and at least 2 bits shorter than the result. */
14945 if (TREE_CODE (type) == INTEGER_TYPE
14946 && TREE_CODE (op0) == NOP_EXPR
14947 && TREE_CODE (op1) == NOP_EXPR)
14949 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14950 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14951 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14952 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14954 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14955 TYPE_PRECISION (inner2)) + 1;
14956 return prec < TYPE_PRECISION (type);
14959 break;
14961 case MULT_EXPR:
14962 if (FLOAT_TYPE_P (type))
14964 /* x * x for floating point x is always non-negative. */
14965 if (operand_equal_p (op0, op1, 0))
14966 return true;
14967 return (tree_expr_nonnegative_warnv_p (op0,
14968 strict_overflow_p)
14969 && tree_expr_nonnegative_warnv_p (op1,
14970 strict_overflow_p));
14973 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14974 both unsigned and their total bits is shorter than the result. */
14975 if (TREE_CODE (type) == INTEGER_TYPE
14976 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14977 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14979 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14980 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14981 : TREE_TYPE (op0);
14982 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14983 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14984 : TREE_TYPE (op1);
14986 bool unsigned0 = TYPE_UNSIGNED (inner0);
14987 bool unsigned1 = TYPE_UNSIGNED (inner1);
14989 if (TREE_CODE (op0) == INTEGER_CST)
14990 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14992 if (TREE_CODE (op1) == INTEGER_CST)
14993 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14995 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14996 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14998 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14999 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15000 : TYPE_PRECISION (inner0);
15002 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15003 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15004 : TYPE_PRECISION (inner1);
15006 return precision0 + precision1 < TYPE_PRECISION (type);
15009 return false;
15011 case BIT_AND_EXPR:
15012 case MAX_EXPR:
15013 return (tree_expr_nonnegative_warnv_p (op0,
15014 strict_overflow_p)
15015 || tree_expr_nonnegative_warnv_p (op1,
15016 strict_overflow_p));
15018 case BIT_IOR_EXPR:
15019 case BIT_XOR_EXPR:
15020 case MIN_EXPR:
15021 case RDIV_EXPR:
15022 case TRUNC_DIV_EXPR:
15023 case CEIL_DIV_EXPR:
15024 case FLOOR_DIV_EXPR:
15025 case ROUND_DIV_EXPR:
15026 return (tree_expr_nonnegative_warnv_p (op0,
15027 strict_overflow_p)
15028 && tree_expr_nonnegative_warnv_p (op1,
15029 strict_overflow_p));
15031 case TRUNC_MOD_EXPR:
15032 case CEIL_MOD_EXPR:
15033 case FLOOR_MOD_EXPR:
15034 case ROUND_MOD_EXPR:
15035 return tree_expr_nonnegative_warnv_p (op0,
15036 strict_overflow_p);
15037 default:
15038 return tree_simple_nonnegative_warnv_p (code, type);
15041 /* We don't know sign of `t', so be conservative and return false. */
15042 return false;
15045 /* Return true if T is known to be non-negative. If the return
15046 value is based on the assumption that signed overflow is undefined,
15047 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15048 *STRICT_OVERFLOW_P. */
15050 bool
15051 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15053 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15054 return true;
15056 switch (TREE_CODE (t))
15058 case INTEGER_CST:
15059 return tree_int_cst_sgn (t) >= 0;
15061 case REAL_CST:
15062 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15064 case FIXED_CST:
15065 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15067 case COND_EXPR:
15068 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15069 strict_overflow_p)
15070 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15071 strict_overflow_p));
15072 default:
15073 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15074 TREE_TYPE (t));
15076 /* We don't know sign of `t', so be conservative and return false. */
15077 return false;
15080 /* Return true if T is known to be non-negative. If the return
15081 value is based on the assumption that signed overflow is undefined,
15082 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15083 *STRICT_OVERFLOW_P. */
15085 bool
15086 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15087 tree arg0, tree arg1, bool *strict_overflow_p)
15089 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15090 switch (DECL_FUNCTION_CODE (fndecl))
15092 CASE_FLT_FN (BUILT_IN_ACOS):
15093 CASE_FLT_FN (BUILT_IN_ACOSH):
15094 CASE_FLT_FN (BUILT_IN_CABS):
15095 CASE_FLT_FN (BUILT_IN_COSH):
15096 CASE_FLT_FN (BUILT_IN_ERFC):
15097 CASE_FLT_FN (BUILT_IN_EXP):
15098 CASE_FLT_FN (BUILT_IN_EXP10):
15099 CASE_FLT_FN (BUILT_IN_EXP2):
15100 CASE_FLT_FN (BUILT_IN_FABS):
15101 CASE_FLT_FN (BUILT_IN_FDIM):
15102 CASE_FLT_FN (BUILT_IN_HYPOT):
15103 CASE_FLT_FN (BUILT_IN_POW10):
15104 CASE_INT_FN (BUILT_IN_FFS):
15105 CASE_INT_FN (BUILT_IN_PARITY):
15106 CASE_INT_FN (BUILT_IN_POPCOUNT):
15107 case BUILT_IN_BSWAP32:
15108 case BUILT_IN_BSWAP64:
15109 /* Always true. */
15110 return true;
15112 CASE_FLT_FN (BUILT_IN_SQRT):
15113 /* sqrt(-0.0) is -0.0. */
15114 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15115 return true;
15116 return tree_expr_nonnegative_warnv_p (arg0,
15117 strict_overflow_p);
15119 CASE_FLT_FN (BUILT_IN_ASINH):
15120 CASE_FLT_FN (BUILT_IN_ATAN):
15121 CASE_FLT_FN (BUILT_IN_ATANH):
15122 CASE_FLT_FN (BUILT_IN_CBRT):
15123 CASE_FLT_FN (BUILT_IN_CEIL):
15124 CASE_FLT_FN (BUILT_IN_ERF):
15125 CASE_FLT_FN (BUILT_IN_EXPM1):
15126 CASE_FLT_FN (BUILT_IN_FLOOR):
15127 CASE_FLT_FN (BUILT_IN_FMOD):
15128 CASE_FLT_FN (BUILT_IN_FREXP):
15129 CASE_FLT_FN (BUILT_IN_LCEIL):
15130 CASE_FLT_FN (BUILT_IN_LDEXP):
15131 CASE_FLT_FN (BUILT_IN_LFLOOR):
15132 CASE_FLT_FN (BUILT_IN_LLCEIL):
15133 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15134 CASE_FLT_FN (BUILT_IN_LLRINT):
15135 CASE_FLT_FN (BUILT_IN_LLROUND):
15136 CASE_FLT_FN (BUILT_IN_LRINT):
15137 CASE_FLT_FN (BUILT_IN_LROUND):
15138 CASE_FLT_FN (BUILT_IN_MODF):
15139 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15140 CASE_FLT_FN (BUILT_IN_RINT):
15141 CASE_FLT_FN (BUILT_IN_ROUND):
15142 CASE_FLT_FN (BUILT_IN_SCALB):
15143 CASE_FLT_FN (BUILT_IN_SCALBLN):
15144 CASE_FLT_FN (BUILT_IN_SCALBN):
15145 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15146 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15147 CASE_FLT_FN (BUILT_IN_SINH):
15148 CASE_FLT_FN (BUILT_IN_TANH):
15149 CASE_FLT_FN (BUILT_IN_TRUNC):
15150 /* True if the 1st argument is nonnegative. */
15151 return tree_expr_nonnegative_warnv_p (arg0,
15152 strict_overflow_p);
15154 CASE_FLT_FN (BUILT_IN_FMAX):
15155 /* True if the 1st OR 2nd arguments are nonnegative. */
15156 return (tree_expr_nonnegative_warnv_p (arg0,
15157 strict_overflow_p)
15158 || (tree_expr_nonnegative_warnv_p (arg1,
15159 strict_overflow_p)));
15161 CASE_FLT_FN (BUILT_IN_FMIN):
15162 /* True if the 1st AND 2nd arguments are nonnegative. */
15163 return (tree_expr_nonnegative_warnv_p (arg0,
15164 strict_overflow_p)
15165 && (tree_expr_nonnegative_warnv_p (arg1,
15166 strict_overflow_p)));
15168 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15169 /* True if the 2nd argument is nonnegative. */
15170 return tree_expr_nonnegative_warnv_p (arg1,
15171 strict_overflow_p);
15173 CASE_FLT_FN (BUILT_IN_POWI):
15174 /* True if the 1st argument is nonnegative or the second
15175 argument is an even integer. */
15176 if (TREE_CODE (arg1) == INTEGER_CST
15177 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15178 return true;
15179 return tree_expr_nonnegative_warnv_p (arg0,
15180 strict_overflow_p);
15182 CASE_FLT_FN (BUILT_IN_POW):
15183 /* True if the 1st argument is nonnegative or the second
15184 argument is an even integer valued real. */
15185 if (TREE_CODE (arg1) == REAL_CST)
15187 REAL_VALUE_TYPE c;
15188 HOST_WIDE_INT n;
15190 c = TREE_REAL_CST (arg1);
15191 n = real_to_integer (&c);
15192 if ((n & 1) == 0)
15194 REAL_VALUE_TYPE cint;
15195 real_from_integer (&cint, VOIDmode, n,
15196 n < 0 ? -1 : 0, 0);
15197 if (real_identical (&c, &cint))
15198 return true;
15201 return tree_expr_nonnegative_warnv_p (arg0,
15202 strict_overflow_p);
15204 default:
15205 break;
15207 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15208 type);
15211 /* Return true if T is known to be non-negative. If the return
15212 value is based on the assumption that signed overflow is undefined,
15213 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15214 *STRICT_OVERFLOW_P. */
15216 bool
15217 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15219 enum tree_code code = TREE_CODE (t);
15220 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15221 return true;
15223 switch (code)
15225 case TARGET_EXPR:
15227 tree temp = TARGET_EXPR_SLOT (t);
15228 t = TARGET_EXPR_INITIAL (t);
15230 /* If the initializer is non-void, then it's a normal expression
15231 that will be assigned to the slot. */
15232 if (!VOID_TYPE_P (t))
15233 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15235 /* Otherwise, the initializer sets the slot in some way. One common
15236 way is an assignment statement at the end of the initializer. */
15237 while (1)
15239 if (TREE_CODE (t) == BIND_EXPR)
15240 t = expr_last (BIND_EXPR_BODY (t));
15241 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15242 || TREE_CODE (t) == TRY_CATCH_EXPR)
15243 t = expr_last (TREE_OPERAND (t, 0));
15244 else if (TREE_CODE (t) == STATEMENT_LIST)
15245 t = expr_last (t);
15246 else
15247 break;
15249 if (TREE_CODE (t) == MODIFY_EXPR
15250 && TREE_OPERAND (t, 0) == temp)
15251 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15252 strict_overflow_p);
15254 return false;
15257 case CALL_EXPR:
15259 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15260 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15262 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15263 get_callee_fndecl (t),
15264 arg0,
15265 arg1,
15266 strict_overflow_p);
15268 case COMPOUND_EXPR:
15269 case MODIFY_EXPR:
15270 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15271 strict_overflow_p);
15272 case BIND_EXPR:
15273 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15274 strict_overflow_p);
15275 case SAVE_EXPR:
15276 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15277 strict_overflow_p);
15279 default:
15280 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15281 TREE_TYPE (t));
15284 /* We don't know sign of `t', so be conservative and return false. */
15285 return false;
15288 /* Return true if T is known to be non-negative. If the return
15289 value is based on the assumption that signed overflow is undefined,
15290 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15291 *STRICT_OVERFLOW_P. */
15293 bool
15294 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15296 enum tree_code code;
15297 if (t == error_mark_node)
15298 return false;
15300 code = TREE_CODE (t);
15301 switch (TREE_CODE_CLASS (code))
15303 case tcc_binary:
15304 case tcc_comparison:
15305 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15306 TREE_TYPE (t),
15307 TREE_OPERAND (t, 0),
15308 TREE_OPERAND (t, 1),
15309 strict_overflow_p);
15311 case tcc_unary:
15312 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15313 TREE_TYPE (t),
15314 TREE_OPERAND (t, 0),
15315 strict_overflow_p);
15317 case tcc_constant:
15318 case tcc_declaration:
15319 case tcc_reference:
15320 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15322 default:
15323 break;
15326 switch (code)
15328 case TRUTH_AND_EXPR:
15329 case TRUTH_OR_EXPR:
15330 case TRUTH_XOR_EXPR:
15331 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15332 TREE_TYPE (t),
15333 TREE_OPERAND (t, 0),
15334 TREE_OPERAND (t, 1),
15335 strict_overflow_p);
15336 case TRUTH_NOT_EXPR:
15337 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15338 TREE_TYPE (t),
15339 TREE_OPERAND (t, 0),
15340 strict_overflow_p);
15342 case COND_EXPR:
15343 case CONSTRUCTOR:
15344 case OBJ_TYPE_REF:
15345 case ASSERT_EXPR:
15346 case ADDR_EXPR:
15347 case WITH_SIZE_EXPR:
15348 case SSA_NAME:
15349 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15351 default:
15352 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15356 /* Return true if `t' is known to be non-negative. Handle warnings
15357 about undefined signed overflow. */
15359 bool
15360 tree_expr_nonnegative_p (tree t)
15362 bool ret, strict_overflow_p;
15364 strict_overflow_p = false;
15365 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15366 if (strict_overflow_p)
15367 fold_overflow_warning (("assuming signed overflow does not occur when "
15368 "determining that expression is always "
15369 "non-negative"),
15370 WARN_STRICT_OVERFLOW_MISC);
15371 return ret;
15375 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15376 For floating point we further ensure that T is not denormal.
15377 Similar logic is present in nonzero_address in rtlanal.h.
15379 If the return value is based on the assumption that signed overflow
15380 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15381 change *STRICT_OVERFLOW_P. */
15383 bool
15384 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15385 bool *strict_overflow_p)
15387 switch (code)
15389 case ABS_EXPR:
15390 return tree_expr_nonzero_warnv_p (op0,
15391 strict_overflow_p);
15393 case NOP_EXPR:
15395 tree inner_type = TREE_TYPE (op0);
15396 tree outer_type = type;
15398 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15399 && tree_expr_nonzero_warnv_p (op0,
15400 strict_overflow_p));
15402 break;
15404 case NON_LVALUE_EXPR:
15405 return tree_expr_nonzero_warnv_p (op0,
15406 strict_overflow_p);
15408 default:
15409 break;
15412 return false;
15415 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15416 For floating point we further ensure that T is not denormal.
15417 Similar logic is present in nonzero_address in rtlanal.h.
15419 If the return value is based on the assumption that signed overflow
15420 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15421 change *STRICT_OVERFLOW_P. */
15423 bool
15424 tree_binary_nonzero_warnv_p (enum tree_code code,
15425 tree type,
15426 tree op0,
15427 tree op1, bool *strict_overflow_p)
15429 bool sub_strict_overflow_p;
15430 switch (code)
15432 case POINTER_PLUS_EXPR:
15433 case PLUS_EXPR:
15434 if (TYPE_OVERFLOW_UNDEFINED (type))
15436 /* With the presence of negative values it is hard
15437 to say something. */
15438 sub_strict_overflow_p = false;
15439 if (!tree_expr_nonnegative_warnv_p (op0,
15440 &sub_strict_overflow_p)
15441 || !tree_expr_nonnegative_warnv_p (op1,
15442 &sub_strict_overflow_p))
15443 return false;
15444 /* One of operands must be positive and the other non-negative. */
15445 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15446 overflows, on a twos-complement machine the sum of two
15447 nonnegative numbers can never be zero. */
15448 return (tree_expr_nonzero_warnv_p (op0,
15449 strict_overflow_p)
15450 || tree_expr_nonzero_warnv_p (op1,
15451 strict_overflow_p));
15453 break;
15455 case MULT_EXPR:
15456 if (TYPE_OVERFLOW_UNDEFINED (type))
15458 if (tree_expr_nonzero_warnv_p (op0,
15459 strict_overflow_p)
15460 && tree_expr_nonzero_warnv_p (op1,
15461 strict_overflow_p))
15463 *strict_overflow_p = true;
15464 return true;
15467 break;
15469 case MIN_EXPR:
15470 sub_strict_overflow_p = false;
15471 if (tree_expr_nonzero_warnv_p (op0,
15472 &sub_strict_overflow_p)
15473 && tree_expr_nonzero_warnv_p (op1,
15474 &sub_strict_overflow_p))
15476 if (sub_strict_overflow_p)
15477 *strict_overflow_p = true;
15479 break;
15481 case MAX_EXPR:
15482 sub_strict_overflow_p = false;
15483 if (tree_expr_nonzero_warnv_p (op0,
15484 &sub_strict_overflow_p))
15486 if (sub_strict_overflow_p)
15487 *strict_overflow_p = true;
15489 /* When both operands are nonzero, then MAX must be too. */
15490 if (tree_expr_nonzero_warnv_p (op1,
15491 strict_overflow_p))
15492 return true;
15494 /* MAX where operand 0 is positive is positive. */
15495 return tree_expr_nonnegative_warnv_p (op0,
15496 strict_overflow_p);
15498 /* MAX where operand 1 is positive is positive. */
15499 else if (tree_expr_nonzero_warnv_p (op1,
15500 &sub_strict_overflow_p)
15501 && tree_expr_nonnegative_warnv_p (op1,
15502 &sub_strict_overflow_p))
15504 if (sub_strict_overflow_p)
15505 *strict_overflow_p = true;
15506 return true;
15508 break;
15510 case BIT_IOR_EXPR:
15511 return (tree_expr_nonzero_warnv_p (op1,
15512 strict_overflow_p)
15513 || tree_expr_nonzero_warnv_p (op0,
15514 strict_overflow_p));
15516 default:
15517 break;
15520 return false;
15523 /* Return true when T is an address and is known to be nonzero.
15524 For floating point we further ensure that T is not denormal.
15525 Similar logic is present in nonzero_address in rtlanal.h.
15527 If the return value is based on the assumption that signed overflow
15528 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15529 change *STRICT_OVERFLOW_P. */
15531 bool
15532 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15534 bool sub_strict_overflow_p;
15535 switch (TREE_CODE (t))
15537 case INTEGER_CST:
15538 return !integer_zerop (t);
15540 case ADDR_EXPR:
15542 tree base = get_base_address (TREE_OPERAND (t, 0));
15544 if (!base)
15545 return false;
15547 /* Weak declarations may link to NULL. Other things may also be NULL
15548 so protect with -fdelete-null-pointer-checks; but not variables
15549 allocated on the stack. */
15550 if (DECL_P (base)
15551 && (flag_delete_null_pointer_checks
15552 || (TREE_CODE (base) == VAR_DECL && !TREE_STATIC (base))))
15553 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15555 /* Constants are never weak. */
15556 if (CONSTANT_CLASS_P (base))
15557 return true;
15559 return false;
15562 case COND_EXPR:
15563 sub_strict_overflow_p = false;
15564 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15565 &sub_strict_overflow_p)
15566 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15567 &sub_strict_overflow_p))
15569 if (sub_strict_overflow_p)
15570 *strict_overflow_p = true;
15571 return true;
15573 break;
15575 default:
15576 break;
15578 return false;
15581 /* Return true when T is an address and is known to be nonzero.
15582 For floating point we further ensure that T is not denormal.
15583 Similar logic is present in nonzero_address in rtlanal.h.
15585 If the return value is based on the assumption that signed overflow
15586 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15587 change *STRICT_OVERFLOW_P. */
15589 bool
15590 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15592 tree type = TREE_TYPE (t);
15593 enum tree_code code;
15595 /* Doing something useful for floating point would need more work. */
15596 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15597 return false;
15599 code = TREE_CODE (t);
15600 switch (TREE_CODE_CLASS (code))
15602 case tcc_unary:
15603 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15604 strict_overflow_p);
15605 case tcc_binary:
15606 case tcc_comparison:
15607 return tree_binary_nonzero_warnv_p (code, type,
15608 TREE_OPERAND (t, 0),
15609 TREE_OPERAND (t, 1),
15610 strict_overflow_p);
15611 case tcc_constant:
15612 case tcc_declaration:
15613 case tcc_reference:
15614 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15616 default:
15617 break;
15620 switch (code)
15622 case TRUTH_NOT_EXPR:
15623 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15624 strict_overflow_p);
15626 case TRUTH_AND_EXPR:
15627 case TRUTH_OR_EXPR:
15628 case TRUTH_XOR_EXPR:
15629 return tree_binary_nonzero_warnv_p (code, type,
15630 TREE_OPERAND (t, 0),
15631 TREE_OPERAND (t, 1),
15632 strict_overflow_p);
15634 case COND_EXPR:
15635 case CONSTRUCTOR:
15636 case OBJ_TYPE_REF:
15637 case ASSERT_EXPR:
15638 case ADDR_EXPR:
15639 case WITH_SIZE_EXPR:
15640 case SSA_NAME:
15641 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15643 case COMPOUND_EXPR:
15644 case MODIFY_EXPR:
15645 case BIND_EXPR:
15646 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15647 strict_overflow_p);
15649 case SAVE_EXPR:
15650 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15651 strict_overflow_p);
15653 case CALL_EXPR:
15654 return alloca_call_p (t);
15656 default:
15657 break;
15659 return false;
15662 /* Return true when T is an address and is known to be nonzero.
15663 Handle warnings about undefined signed overflow. */
15665 bool
15666 tree_expr_nonzero_p (tree t)
15668 bool ret, strict_overflow_p;
15670 strict_overflow_p = false;
15671 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15672 if (strict_overflow_p)
15673 fold_overflow_warning (("assuming signed overflow does not occur when "
15674 "determining that expression is always "
15675 "non-zero"),
15676 WARN_STRICT_OVERFLOW_MISC);
15677 return ret;
15680 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15681 attempt to fold the expression to a constant without modifying TYPE,
15682 OP0 or OP1.
15684 If the expression could be simplified to a constant, then return
15685 the constant. If the expression would not be simplified to a
15686 constant, then return NULL_TREE. */
15688 tree
15689 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15691 tree tem = fold_binary (code, type, op0, op1);
15692 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15695 /* Given the components of a unary expression CODE, TYPE and OP0,
15696 attempt to fold the expression to a constant without modifying
15697 TYPE or OP0.
15699 If the expression could be simplified to a constant, then return
15700 the constant. If the expression would not be simplified to a
15701 constant, then return NULL_TREE. */
15703 tree
15704 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15706 tree tem = fold_unary (code, type, op0);
15707 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15710 /* If EXP represents referencing an element in a constant string
15711 (either via pointer arithmetic or array indexing), return the
15712 tree representing the value accessed, otherwise return NULL. */
15714 tree
15715 fold_read_from_constant_string (tree exp)
15717 if ((TREE_CODE (exp) == INDIRECT_REF
15718 || TREE_CODE (exp) == ARRAY_REF)
15719 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15721 tree exp1 = TREE_OPERAND (exp, 0);
15722 tree index;
15723 tree string;
15724 location_t loc = EXPR_LOCATION (exp);
15726 if (TREE_CODE (exp) == INDIRECT_REF)
15727 string = string_constant (exp1, &index);
15728 else
15730 tree low_bound = array_ref_low_bound (exp);
15731 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15733 /* Optimize the special-case of a zero lower bound.
15735 We convert the low_bound to sizetype to avoid some problems
15736 with constant folding. (E.g. suppose the lower bound is 1,
15737 and its mode is QI. Without the conversion,l (ARRAY
15738 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15739 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15740 if (! integer_zerop (low_bound))
15741 index = size_diffop_loc (loc, index,
15742 fold_convert_loc (loc, sizetype, low_bound));
15744 string = exp1;
15747 if (string
15748 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15749 && TREE_CODE (string) == STRING_CST
15750 && TREE_CODE (index) == INTEGER_CST
15751 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15752 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15753 == MODE_INT)
15754 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15755 return build_int_cst_type (TREE_TYPE (exp),
15756 (TREE_STRING_POINTER (string)
15757 [TREE_INT_CST_LOW (index)]));
15759 return NULL;
15762 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15763 an integer constant, real, or fixed-point constant.
15765 TYPE is the type of the result. */
15767 static tree
15768 fold_negate_const (tree arg0, tree type)
15770 tree t = NULL_TREE;
15772 switch (TREE_CODE (arg0))
15774 case INTEGER_CST:
15776 unsigned HOST_WIDE_INT low;
15777 HOST_WIDE_INT high;
15778 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15779 TREE_INT_CST_HIGH (arg0),
15780 &low, &high);
15781 t = force_fit_type_double (type, low, high, 1,
15782 (overflow | TREE_OVERFLOW (arg0))
15783 && !TYPE_UNSIGNED (type));
15784 break;
15787 case REAL_CST:
15788 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15789 break;
15791 case FIXED_CST:
15793 FIXED_VALUE_TYPE f;
15794 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15795 &(TREE_FIXED_CST (arg0)), NULL,
15796 TYPE_SATURATING (type));
15797 t = build_fixed (type, f);
15798 /* Propagate overflow flags. */
15799 if (overflow_p | TREE_OVERFLOW (arg0))
15800 TREE_OVERFLOW (t) = 1;
15801 break;
15804 default:
15805 gcc_unreachable ();
15808 return t;
15811 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15812 an integer constant or real constant.
15814 TYPE is the type of the result. */
15816 tree
15817 fold_abs_const (tree arg0, tree type)
15819 tree t = NULL_TREE;
15821 switch (TREE_CODE (arg0))
15823 case INTEGER_CST:
15824 /* If the value is unsigned, then the absolute value is
15825 the same as the ordinary value. */
15826 if (TYPE_UNSIGNED (type))
15827 t = arg0;
15828 /* Similarly, if the value is non-negative. */
15829 else if (INT_CST_LT (integer_minus_one_node, arg0))
15830 t = arg0;
15831 /* If the value is negative, then the absolute value is
15832 its negation. */
15833 else
15835 unsigned HOST_WIDE_INT low;
15836 HOST_WIDE_INT high;
15837 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15838 TREE_INT_CST_HIGH (arg0),
15839 &low, &high);
15840 t = force_fit_type_double (type, low, high, -1,
15841 overflow | TREE_OVERFLOW (arg0));
15843 break;
15845 case REAL_CST:
15846 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15847 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15848 else
15849 t = arg0;
15850 break;
15852 default:
15853 gcc_unreachable ();
15856 return t;
15859 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15860 constant. TYPE is the type of the result. */
15862 static tree
15863 fold_not_const (tree arg0, tree type)
15865 tree t = NULL_TREE;
15867 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15869 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15870 ~TREE_INT_CST_HIGH (arg0), 0,
15871 TREE_OVERFLOW (arg0));
15873 return t;
15876 /* Given CODE, a relational operator, the target type, TYPE and two
15877 constant operands OP0 and OP1, return the result of the
15878 relational operation. If the result is not a compile time
15879 constant, then return NULL_TREE. */
15881 static tree
15882 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15884 int result, invert;
15886 /* From here on, the only cases we handle are when the result is
15887 known to be a constant. */
15889 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15891 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15892 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15894 /* Handle the cases where either operand is a NaN. */
15895 if (real_isnan (c0) || real_isnan (c1))
15897 switch (code)
15899 case EQ_EXPR:
15900 case ORDERED_EXPR:
15901 result = 0;
15902 break;
15904 case NE_EXPR:
15905 case UNORDERED_EXPR:
15906 case UNLT_EXPR:
15907 case UNLE_EXPR:
15908 case UNGT_EXPR:
15909 case UNGE_EXPR:
15910 case UNEQ_EXPR:
15911 result = 1;
15912 break;
15914 case LT_EXPR:
15915 case LE_EXPR:
15916 case GT_EXPR:
15917 case GE_EXPR:
15918 case LTGT_EXPR:
15919 if (flag_trapping_math)
15920 return NULL_TREE;
15921 result = 0;
15922 break;
15924 default:
15925 gcc_unreachable ();
15928 return constant_boolean_node (result, type);
15931 return constant_boolean_node (real_compare (code, c0, c1), type);
15934 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15936 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15937 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15938 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15941 /* Handle equality/inequality of complex constants. */
15942 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15944 tree rcond = fold_relational_const (code, type,
15945 TREE_REALPART (op0),
15946 TREE_REALPART (op1));
15947 tree icond = fold_relational_const (code, type,
15948 TREE_IMAGPART (op0),
15949 TREE_IMAGPART (op1));
15950 if (code == EQ_EXPR)
15951 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15952 else if (code == NE_EXPR)
15953 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15954 else
15955 return NULL_TREE;
15958 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15960 To compute GT, swap the arguments and do LT.
15961 To compute GE, do LT and invert the result.
15962 To compute LE, swap the arguments, do LT and invert the result.
15963 To compute NE, do EQ and invert the result.
15965 Therefore, the code below must handle only EQ and LT. */
15967 if (code == LE_EXPR || code == GT_EXPR)
15969 tree tem = op0;
15970 op0 = op1;
15971 op1 = tem;
15972 code = swap_tree_comparison (code);
15975 /* Note that it is safe to invert for real values here because we
15976 have already handled the one case that it matters. */
15978 invert = 0;
15979 if (code == NE_EXPR || code == GE_EXPR)
15981 invert = 1;
15982 code = invert_tree_comparison (code, false);
15985 /* Compute a result for LT or EQ if args permit;
15986 Otherwise return T. */
15987 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15989 if (code == EQ_EXPR)
15990 result = tree_int_cst_equal (op0, op1);
15991 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15992 result = INT_CST_LT_UNSIGNED (op0, op1);
15993 else
15994 result = INT_CST_LT (op0, op1);
15996 else
15997 return NULL_TREE;
15999 if (invert)
16000 result ^= 1;
16001 return constant_boolean_node (result, type);
16004 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16005 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16006 itself. */
16008 tree
16009 fold_build_cleanup_point_expr (tree type, tree expr)
16011 /* If the expression does not have side effects then we don't have to wrap
16012 it with a cleanup point expression. */
16013 if (!TREE_SIDE_EFFECTS (expr))
16014 return expr;
16016 /* If the expression is a return, check to see if the expression inside the
16017 return has no side effects or the right hand side of the modify expression
16018 inside the return. If either don't have side effects set we don't need to
16019 wrap the expression in a cleanup point expression. Note we don't check the
16020 left hand side of the modify because it should always be a return decl. */
16021 if (TREE_CODE (expr) == RETURN_EXPR)
16023 tree op = TREE_OPERAND (expr, 0);
16024 if (!op || !TREE_SIDE_EFFECTS (op))
16025 return expr;
16026 op = TREE_OPERAND (op, 1);
16027 if (!TREE_SIDE_EFFECTS (op))
16028 return expr;
16031 return build1 (CLEANUP_POINT_EXPR, type, expr);
16034 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16035 of an indirection through OP0, or NULL_TREE if no simplification is
16036 possible. */
16038 tree
16039 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16041 tree sub = op0;
16042 tree subtype;
16044 STRIP_NOPS (sub);
16045 subtype = TREE_TYPE (sub);
16046 if (!POINTER_TYPE_P (subtype))
16047 return NULL_TREE;
16049 if (TREE_CODE (sub) == ADDR_EXPR)
16051 tree op = TREE_OPERAND (sub, 0);
16052 tree optype = TREE_TYPE (op);
16053 /* *&CONST_DECL -> to the value of the const decl. */
16054 if (TREE_CODE (op) == CONST_DECL)
16055 return DECL_INITIAL (op);
16056 /* *&p => p; make sure to handle *&"str"[cst] here. */
16057 if (type == optype)
16059 tree fop = fold_read_from_constant_string (op);
16060 if (fop)
16061 return fop;
16062 else
16063 return op;
16065 /* *(foo *)&fooarray => fooarray[0] */
16066 else if (TREE_CODE (optype) == ARRAY_TYPE
16067 && type == TREE_TYPE (optype))
16069 tree type_domain = TYPE_DOMAIN (optype);
16070 tree min_val = size_zero_node;
16071 if (type_domain && TYPE_MIN_VALUE (type_domain))
16072 min_val = TYPE_MIN_VALUE (type_domain);
16073 op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
16074 SET_EXPR_LOCATION (op0, loc);
16075 return op0;
16077 /* *(foo *)&complexfoo => __real__ complexfoo */
16078 else if (TREE_CODE (optype) == COMPLEX_TYPE
16079 && type == TREE_TYPE (optype))
16080 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16081 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16082 else if (TREE_CODE (optype) == VECTOR_TYPE
16083 && type == TREE_TYPE (optype))
16085 tree part_width = TYPE_SIZE (type);
16086 tree index = bitsize_int (0);
16087 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16091 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16092 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16093 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16095 tree op00 = TREE_OPERAND (sub, 0);
16096 tree op01 = TREE_OPERAND (sub, 1);
16097 tree op00type;
16099 STRIP_NOPS (op00);
16100 op00type = TREE_TYPE (op00);
16101 if (TREE_CODE (op00) == ADDR_EXPR
16102 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
16103 && type == TREE_TYPE (TREE_TYPE (op00type)))
16105 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16106 tree part_width = TYPE_SIZE (type);
16107 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16108 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16109 tree index = bitsize_int (indexi);
16111 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
16112 return fold_build3_loc (loc,
16113 BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
16114 part_width, index);
16120 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16121 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16122 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16124 tree op00 = TREE_OPERAND (sub, 0);
16125 tree op01 = TREE_OPERAND (sub, 1);
16126 tree op00type;
16128 STRIP_NOPS (op00);
16129 op00type = TREE_TYPE (op00);
16130 if (TREE_CODE (op00) == ADDR_EXPR
16131 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
16132 && type == TREE_TYPE (TREE_TYPE (op00type)))
16134 tree size = TYPE_SIZE_UNIT (type);
16135 if (tree_int_cst_equal (size, op01))
16136 return fold_build1_loc (loc, IMAGPART_EXPR, type,
16137 TREE_OPERAND (op00, 0));
16141 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16142 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16143 && type == TREE_TYPE (TREE_TYPE (subtype)))
16145 tree type_domain;
16146 tree min_val = size_zero_node;
16147 sub = build_fold_indirect_ref_loc (loc, sub);
16148 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16149 if (type_domain && TYPE_MIN_VALUE (type_domain))
16150 min_val = TYPE_MIN_VALUE (type_domain);
16151 op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
16152 SET_EXPR_LOCATION (op0, loc);
16153 return op0;
16156 return NULL_TREE;
16159 /* Builds an expression for an indirection through T, simplifying some
16160 cases. */
16162 tree
16163 build_fold_indirect_ref_loc (location_t loc, tree t)
16165 tree type = TREE_TYPE (TREE_TYPE (t));
16166 tree sub = fold_indirect_ref_1 (loc, type, t);
16168 if (sub)
16169 return sub;
16171 t = build1 (INDIRECT_REF, type, t);
16172 SET_EXPR_LOCATION (t, loc);
16173 return t;
16176 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16178 tree
16179 fold_indirect_ref_loc (location_t loc, tree t)
16181 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16183 if (sub)
16184 return sub;
16185 else
16186 return t;
16189 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16190 whose result is ignored. The type of the returned tree need not be
16191 the same as the original expression. */
16193 tree
16194 fold_ignored_result (tree t)
16196 if (!TREE_SIDE_EFFECTS (t))
16197 return integer_zero_node;
16199 for (;;)
16200 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16202 case tcc_unary:
16203 t = TREE_OPERAND (t, 0);
16204 break;
16206 case tcc_binary:
16207 case tcc_comparison:
16208 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16209 t = TREE_OPERAND (t, 0);
16210 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16211 t = TREE_OPERAND (t, 1);
16212 else
16213 return t;
16214 break;
16216 case tcc_expression:
16217 switch (TREE_CODE (t))
16219 case COMPOUND_EXPR:
16220 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16221 return t;
16222 t = TREE_OPERAND (t, 0);
16223 break;
16225 case COND_EXPR:
16226 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16227 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16228 return t;
16229 t = TREE_OPERAND (t, 0);
16230 break;
16232 default:
16233 return t;
16235 break;
16237 default:
16238 return t;
16242 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16243 This can only be applied to objects of a sizetype. */
16245 tree
16246 round_up_loc (location_t loc, tree value, int divisor)
16248 tree div = NULL_TREE;
16250 gcc_assert (divisor > 0);
16251 if (divisor == 1)
16252 return value;
16254 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16255 have to do anything. Only do this when we are not given a const,
16256 because in that case, this check is more expensive than just
16257 doing it. */
16258 if (TREE_CODE (value) != INTEGER_CST)
16260 div = build_int_cst (TREE_TYPE (value), divisor);
16262 if (multiple_of_p (TREE_TYPE (value), value, div))
16263 return value;
16266 /* If divisor is a power of two, simplify this to bit manipulation. */
16267 if (divisor == (divisor & -divisor))
16269 if (TREE_CODE (value) == INTEGER_CST)
16271 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
16272 unsigned HOST_WIDE_INT high;
16273 bool overflow_p;
16275 if ((low & (divisor - 1)) == 0)
16276 return value;
16278 overflow_p = TREE_OVERFLOW (value);
16279 high = TREE_INT_CST_HIGH (value);
16280 low &= ~(divisor - 1);
16281 low += divisor;
16282 if (low == 0)
16284 high++;
16285 if (high == 0)
16286 overflow_p = true;
16289 return force_fit_type_double (TREE_TYPE (value), low, high,
16290 -1, overflow_p);
16292 else
16294 tree t;
16296 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16297 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16298 t = build_int_cst (TREE_TYPE (value), -divisor);
16299 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16302 else
16304 if (!div)
16305 div = build_int_cst (TREE_TYPE (value), divisor);
16306 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16307 value = size_binop_loc (loc, MULT_EXPR, value, div);
16310 return value;
16313 /* Likewise, but round down. */
16315 tree
16316 round_down_loc (location_t loc, tree value, int divisor)
16318 tree div = NULL_TREE;
16320 gcc_assert (divisor > 0);
16321 if (divisor == 1)
16322 return value;
16324 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16325 have to do anything. Only do this when we are not given a const,
16326 because in that case, this check is more expensive than just
16327 doing it. */
16328 if (TREE_CODE (value) != INTEGER_CST)
16330 div = build_int_cst (TREE_TYPE (value), divisor);
16332 if (multiple_of_p (TREE_TYPE (value), value, div))
16333 return value;
16336 /* If divisor is a power of two, simplify this to bit manipulation. */
16337 if (divisor == (divisor & -divisor))
16339 tree t;
16341 t = build_int_cst (TREE_TYPE (value), -divisor);
16342 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16344 else
16346 if (!div)
16347 div = build_int_cst (TREE_TYPE (value), divisor);
16348 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16349 value = size_binop_loc (loc, MULT_EXPR, value, div);
16352 return value;
16355 /* Returns the pointer to the base of the object addressed by EXP and
16356 extracts the information about the offset of the access, storing it
16357 to PBITPOS and POFFSET. */
16359 static tree
16360 split_address_to_core_and_offset (tree exp,
16361 HOST_WIDE_INT *pbitpos, tree *poffset)
16363 tree core;
16364 enum machine_mode mode;
16365 int unsignedp, volatilep;
16366 HOST_WIDE_INT bitsize;
16367 location_t loc = EXPR_LOCATION (exp);
16369 if (TREE_CODE (exp) == ADDR_EXPR)
16371 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16372 poffset, &mode, &unsignedp, &volatilep,
16373 false);
16374 core = build_fold_addr_expr_loc (loc, core);
16376 else
16378 core = exp;
16379 *pbitpos = 0;
16380 *poffset = NULL_TREE;
16383 return core;
16386 /* Returns true if addresses of E1 and E2 differ by a constant, false
16387 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16389 bool
16390 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16392 tree core1, core2;
16393 HOST_WIDE_INT bitpos1, bitpos2;
16394 tree toffset1, toffset2, tdiff, type;
16396 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16397 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16399 if (bitpos1 % BITS_PER_UNIT != 0
16400 || bitpos2 % BITS_PER_UNIT != 0
16401 || !operand_equal_p (core1, core2, 0))
16402 return false;
16404 if (toffset1 && toffset2)
16406 type = TREE_TYPE (toffset1);
16407 if (type != TREE_TYPE (toffset2))
16408 toffset2 = fold_convert (type, toffset2);
16410 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16411 if (!cst_and_fits_in_hwi (tdiff))
16412 return false;
16414 *diff = int_cst_value (tdiff);
16416 else if (toffset1 || toffset2)
16418 /* If only one of the offsets is non-constant, the difference cannot
16419 be a constant. */
16420 return false;
16422 else
16423 *diff = 0;
16425 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16426 return true;
16429 /* Simplify the floating point expression EXP when the sign of the
16430 result is not significant. Return NULL_TREE if no simplification
16431 is possible. */
16433 tree
16434 fold_strip_sign_ops (tree exp)
16436 tree arg0, arg1;
16437 location_t loc = EXPR_LOCATION (exp);
16439 switch (TREE_CODE (exp))
16441 case ABS_EXPR:
16442 case NEGATE_EXPR:
16443 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16444 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16446 case MULT_EXPR:
16447 case RDIV_EXPR:
16448 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16449 return NULL_TREE;
16450 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16451 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16452 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16453 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16454 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16455 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16456 break;
16458 case COMPOUND_EXPR:
16459 arg0 = TREE_OPERAND (exp, 0);
16460 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16461 if (arg1)
16462 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16463 break;
16465 case COND_EXPR:
16466 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16467 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16468 if (arg0 || arg1)
16469 return fold_build3_loc (loc,
16470 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16471 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16472 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16473 break;
16475 case CALL_EXPR:
16477 const enum built_in_function fcode = builtin_mathfn_code (exp);
16478 switch (fcode)
16480 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16481 /* Strip copysign function call, return the 1st argument. */
16482 arg0 = CALL_EXPR_ARG (exp, 0);
16483 arg1 = CALL_EXPR_ARG (exp, 1);
16484 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16486 default:
16487 /* Strip sign ops from the argument of "odd" math functions. */
16488 if (negate_mathfn_p (fcode))
16490 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16491 if (arg0)
16492 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16494 break;
16497 break;
16499 default:
16500 break;
16502 return NULL_TREE;