sh.c (shift_insns_rtx, [...]): Truncate shift counts to avoid out-of-bounds array...
[official-gcc.git] / gcc / fold-const.c
blobca07cf16720427edfad48c1e39044ede0fccbdc9
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "toplev.h"
63 #include "intl.h"
64 #include "ggc.h"
65 #include "hashtab.h"
66 #include "langhooks.h"
67 #include "md5.h"
68 #include "gimple.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
71 otherwise. */
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
78 COMPCODE_FALSE = 0,
79 COMPCODE_LT = 1,
80 COMPCODE_EQ = 2,
81 COMPCODE_LE = 3,
82 COMPCODE_GT = 4,
83 COMPCODE_LTGT = 5,
84 COMPCODE_GE = 6,
85 COMPCODE_ORD = 7,
86 COMPCODE_UNORD = 8,
87 COMPCODE_UNLT = 9,
88 COMPCODE_UNEQ = 10,
89 COMPCODE_UNLE = 11,
90 COMPCODE_UNGT = 12,
91 COMPCODE_NE = 13,
92 COMPCODE_UNGE = 14,
93 COMPCODE_TRUE = 15
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand (tree, tree, tree);
110 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (tree, tree, HOST_WIDE_INT, HOST_WIDE_INT, int);
112 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
113 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
115 tree *, tree *);
116 static int all_ones_mask_p (const_tree, int);
117 static tree sign_bit_p (tree, const_tree);
118 static int simple_operand_p (const_tree);
119 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
120 static tree range_predecessor (tree);
121 static tree range_successor (tree);
122 static tree make_range (tree, int *, tree *, tree *, bool *);
123 static tree build_range_check (tree, tree, int, tree, tree);
124 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
125 tree);
126 static tree fold_range_test (enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree fold_truthop (enum tree_code, tree, tree, tree);
130 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
134 tree, tree,
135 tree, tree, int);
136 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
137 tree, tree, tree);
138 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
139 static tree fold_div_compare (enum tree_code, tree, tree, tree);
140 static bool reorder_operands_p (const_tree, const_tree);
141 static tree fold_negate_const (tree, tree);
142 static tree fold_not_const (tree, tree);
143 static tree fold_relational_const (enum tree_code, tree, tree, tree);
144 static tree fold_convert_const (enum tree_code, tree, tree);
147 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
148 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
149 and SUM1. Then this yields nonzero if overflow occurred during the
150 addition.
152 Overflow occurs if A and B have the same sign, but A and SUM differ in
153 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
154 sign. */
155 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
157 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
158 We do that by representing the two-word integer in 4 words, with only
159 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
160 number. The value of the word is LOWPART + HIGHPART * BASE. */
162 #define LOWPART(x) \
163 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
164 #define HIGHPART(x) \
165 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
166 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
168 /* Unpack a two-word integer into 4 words.
169 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
170 WORDS points to the array of HOST_WIDE_INTs. */
172 static void
173 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
175 words[0] = LOWPART (low);
176 words[1] = HIGHPART (low);
177 words[2] = LOWPART (hi);
178 words[3] = HIGHPART (hi);
181 /* Pack an array of 4 words into a two-word integer.
182 WORDS points to the array of words.
183 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
185 static void
186 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
187 HOST_WIDE_INT *hi)
189 *low = words[0] + words[1] * BASE;
190 *hi = words[2] + words[3] * BASE;
193 /* Force the double-word integer L1, H1 to be within the range of the
194 integer type TYPE. Stores the properly truncated and sign-extended
195 double-word integer in *LV, *HV. Returns true if the operation
196 overflows, that is, argument and result are different. */
199 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
200 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
202 unsigned HOST_WIDE_INT low0 = l1;
203 HOST_WIDE_INT high0 = h1;
204 unsigned int prec;
205 int sign_extended_type;
207 if (POINTER_TYPE_P (type)
208 || TREE_CODE (type) == OFFSET_TYPE)
209 prec = POINTER_SIZE;
210 else
211 prec = TYPE_PRECISION (type);
213 /* Size types *are* sign extended. */
214 sign_extended_type = (!TYPE_UNSIGNED (type)
215 || (TREE_CODE (type) == INTEGER_TYPE
216 && TYPE_IS_SIZETYPE (type)));
218 /* First clear all bits that are beyond the type's precision. */
219 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
221 else if (prec > HOST_BITS_PER_WIDE_INT)
222 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
223 else
225 h1 = 0;
226 if (prec < HOST_BITS_PER_WIDE_INT)
227 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
230 /* Then do sign extension if necessary. */
231 if (!sign_extended_type)
232 /* No sign extension */;
233 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
234 /* Correct width already. */;
235 else if (prec > HOST_BITS_PER_WIDE_INT)
237 /* Sign extend top half? */
238 if (h1 & ((unsigned HOST_WIDE_INT)1
239 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
240 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
242 else if (prec == HOST_BITS_PER_WIDE_INT)
244 if ((HOST_WIDE_INT)l1 < 0)
245 h1 = -1;
247 else
249 /* Sign extend bottom half? */
250 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
252 h1 = -1;
253 l1 |= (HOST_WIDE_INT)(-1) << prec;
257 *lv = l1;
258 *hv = h1;
260 /* If the value didn't fit, signal overflow. */
261 return l1 != low0 || h1 != high0;
264 /* We force the double-int HIGH:LOW to the range of the type TYPE by
265 sign or zero extending it.
266 OVERFLOWABLE indicates if we are interested
267 in overflow of the value, when >0 we are only interested in signed
268 overflow, for <0 we are interested in any overflow. OVERFLOWED
269 indicates whether overflow has already occurred. CONST_OVERFLOWED
270 indicates whether constant overflow has already occurred. We force
271 T's value to be within range of T's type (by setting to 0 or 1 all
272 the bits outside the type's range). We set TREE_OVERFLOWED if,
273 OVERFLOWED is nonzero,
274 or OVERFLOWABLE is >0 and signed overflow occurs
275 or OVERFLOWABLE is <0 and any overflow occurs
276 We return a new tree node for the extended double-int. The node
277 is shared if no overflow flags are set. */
279 tree
280 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
281 HOST_WIDE_INT high, int overflowable,
282 bool overflowed)
284 int sign_extended_type;
285 bool overflow;
287 /* Size types *are* sign extended. */
288 sign_extended_type = (!TYPE_UNSIGNED (type)
289 || (TREE_CODE (type) == INTEGER_TYPE
290 && TYPE_IS_SIZETYPE (type)));
292 overflow = fit_double_type (low, high, &low, &high, type);
294 /* If we need to set overflow flags, return a new unshared node. */
295 if (overflowed || overflow)
297 if (overflowed
298 || overflowable < 0
299 || (overflowable > 0 && sign_extended_type))
301 tree t = make_node (INTEGER_CST);
302 TREE_INT_CST_LOW (t) = low;
303 TREE_INT_CST_HIGH (t) = high;
304 TREE_TYPE (t) = type;
305 TREE_OVERFLOW (t) = 1;
306 return t;
310 /* Else build a shared node. */
311 return build_int_cst_wide (type, low, high);
314 /* Add two doubleword integers with doubleword result.
315 Return nonzero if the operation overflows according to UNSIGNED_P.
316 Each argument is given as two `HOST_WIDE_INT' pieces.
317 One argument is L1 and H1; the other, L2 and H2.
318 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
321 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
322 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
323 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
324 bool unsigned_p)
326 unsigned HOST_WIDE_INT l;
327 HOST_WIDE_INT h;
329 l = l1 + l2;
330 h = h1 + h2 + (l < l1);
332 *lv = l;
333 *hv = h;
335 if (unsigned_p)
336 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
337 else
338 return OVERFLOW_SUM_SIGN (h1, h2, h);
341 /* Negate a doubleword integer with doubleword result.
342 Return nonzero if the operation overflows, assuming it's signed.
343 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
344 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
347 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
348 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
350 if (l1 == 0)
352 *lv = 0;
353 *hv = - h1;
354 return (*hv & h1) < 0;
356 else
358 *lv = -l1;
359 *hv = ~h1;
360 return 0;
364 /* Multiply two doubleword integers with doubleword result.
365 Return nonzero if the operation overflows according to UNSIGNED_P.
366 Each argument is given as two `HOST_WIDE_INT' pieces.
367 One argument is L1 and H1; the other, L2 and H2.
368 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
371 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
372 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
373 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
374 bool unsigned_p)
376 HOST_WIDE_INT arg1[4];
377 HOST_WIDE_INT arg2[4];
378 HOST_WIDE_INT prod[4 * 2];
379 unsigned HOST_WIDE_INT carry;
380 int i, j, k;
381 unsigned HOST_WIDE_INT toplow, neglow;
382 HOST_WIDE_INT tophigh, neghigh;
384 encode (arg1, l1, h1);
385 encode (arg2, l2, h2);
387 memset (prod, 0, sizeof prod);
389 for (i = 0; i < 4; i++)
391 carry = 0;
392 for (j = 0; j < 4; j++)
394 k = i + j;
395 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
396 carry += arg1[i] * arg2[j];
397 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
398 carry += prod[k];
399 prod[k] = LOWPART (carry);
400 carry = HIGHPART (carry);
402 prod[i + 4] = carry;
405 decode (prod, lv, hv);
406 decode (prod + 4, &toplow, &tophigh);
408 /* Unsigned overflow is immediate. */
409 if (unsigned_p)
410 return (toplow | tophigh) != 0;
412 /* Check for signed overflow by calculating the signed representation of the
413 top half of the result; it should agree with the low half's sign bit. */
414 if (h1 < 0)
416 neg_double (l2, h2, &neglow, &neghigh);
417 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
419 if (h2 < 0)
421 neg_double (l1, h1, &neglow, &neghigh);
422 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
424 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
427 /* Shift the doubleword integer in L1, H1 left by COUNT places
428 keeping only PREC bits of result.
429 Shift right if COUNT is negative.
430 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
431 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
433 void
434 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
435 HOST_WIDE_INT count, unsigned int prec,
436 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
438 unsigned HOST_WIDE_INT signmask;
440 if (count < 0)
442 rshift_double (l1, h1, -count, prec, lv, hv, arith);
443 return;
446 if (SHIFT_COUNT_TRUNCATED)
447 count %= prec;
449 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
451 /* Shifting by the host word size is undefined according to the
452 ANSI standard, so we must handle this as a special case. */
453 *hv = 0;
454 *lv = 0;
456 else if (count >= HOST_BITS_PER_WIDE_INT)
458 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
459 *lv = 0;
461 else
463 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
464 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
465 *lv = l1 << count;
468 /* Sign extend all bits that are beyond the precision. */
470 signmask = -((prec > HOST_BITS_PER_WIDE_INT
471 ? ((unsigned HOST_WIDE_INT) *hv
472 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
473 : (*lv >> (prec - 1))) & 1);
475 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
477 else if (prec >= HOST_BITS_PER_WIDE_INT)
479 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
480 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
482 else
484 *hv = signmask;
485 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
486 *lv |= signmask << prec;
490 /* Shift the doubleword integer in L1, H1 right by COUNT places
491 keeping only PREC bits of result. COUNT must be positive.
492 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
493 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
495 void
496 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
497 HOST_WIDE_INT count, unsigned int prec,
498 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
499 int arith)
501 unsigned HOST_WIDE_INT signmask;
503 signmask = (arith
504 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
505 : 0);
507 if (SHIFT_COUNT_TRUNCATED)
508 count %= prec;
510 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
512 /* Shifting by the host word size is undefined according to the
513 ANSI standard, so we must handle this as a special case. */
514 *hv = 0;
515 *lv = 0;
517 else if (count >= HOST_BITS_PER_WIDE_INT)
519 *hv = 0;
520 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
522 else
524 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
525 *lv = ((l1 >> count)
526 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
529 /* Zero / sign extend all bits that are beyond the precision. */
531 if (count >= (HOST_WIDE_INT)prec)
533 *hv = signmask;
534 *lv = signmask;
536 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
538 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
540 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
541 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
543 else
545 *hv = signmask;
546 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
547 *lv |= signmask << (prec - count);
551 /* Rotate the doubleword integer in L1, H1 left by COUNT places
552 keeping only PREC bits of result.
553 Rotate right if COUNT is negative.
554 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
556 void
557 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
558 HOST_WIDE_INT count, unsigned int prec,
559 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
561 unsigned HOST_WIDE_INT s1l, s2l;
562 HOST_WIDE_INT s1h, s2h;
564 count %= prec;
565 if (count < 0)
566 count += prec;
568 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
569 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
570 *lv = s1l | s2l;
571 *hv = s1h | s2h;
574 /* Rotate the doubleword integer in L1, H1 left by COUNT places
575 keeping only PREC bits of result. COUNT must be positive.
576 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
578 void
579 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
580 HOST_WIDE_INT count, unsigned int prec,
581 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
583 unsigned HOST_WIDE_INT s1l, s2l;
584 HOST_WIDE_INT s1h, s2h;
586 count %= prec;
587 if (count < 0)
588 count += prec;
590 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
591 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
592 *lv = s1l | s2l;
593 *hv = s1h | s2h;
596 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
597 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
598 CODE is a tree code for a kind of division, one of
599 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
600 or EXACT_DIV_EXPR
601 It controls how the quotient is rounded to an integer.
602 Return nonzero if the operation overflows.
603 UNS nonzero says do unsigned division. */
606 div_and_round_double (enum tree_code code, int uns,
607 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
608 HOST_WIDE_INT hnum_orig,
609 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
610 HOST_WIDE_INT hden_orig,
611 unsigned HOST_WIDE_INT *lquo,
612 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
613 HOST_WIDE_INT *hrem)
615 int quo_neg = 0;
616 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
617 HOST_WIDE_INT den[4], quo[4];
618 int i, j;
619 unsigned HOST_WIDE_INT work;
620 unsigned HOST_WIDE_INT carry = 0;
621 unsigned HOST_WIDE_INT lnum = lnum_orig;
622 HOST_WIDE_INT hnum = hnum_orig;
623 unsigned HOST_WIDE_INT lden = lden_orig;
624 HOST_WIDE_INT hden = hden_orig;
625 int overflow = 0;
627 if (hden == 0 && lden == 0)
628 overflow = 1, lden = 1;
630 /* Calculate quotient sign and convert operands to unsigned. */
631 if (!uns)
633 if (hnum < 0)
635 quo_neg = ~ quo_neg;
636 /* (minimum integer) / (-1) is the only overflow case. */
637 if (neg_double (lnum, hnum, &lnum, &hnum)
638 && ((HOST_WIDE_INT) lden & hden) == -1)
639 overflow = 1;
641 if (hden < 0)
643 quo_neg = ~ quo_neg;
644 neg_double (lden, hden, &lden, &hden);
648 if (hnum == 0 && hden == 0)
649 { /* single precision */
650 *hquo = *hrem = 0;
651 /* This unsigned division rounds toward zero. */
652 *lquo = lnum / lden;
653 goto finish_up;
656 if (hnum == 0)
657 { /* trivial case: dividend < divisor */
658 /* hden != 0 already checked. */
659 *hquo = *lquo = 0;
660 *hrem = hnum;
661 *lrem = lnum;
662 goto finish_up;
665 memset (quo, 0, sizeof quo);
667 memset (num, 0, sizeof num); /* to zero 9th element */
668 memset (den, 0, sizeof den);
670 encode (num, lnum, hnum);
671 encode (den, lden, hden);
673 /* Special code for when the divisor < BASE. */
674 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
676 /* hnum != 0 already checked. */
677 for (i = 4 - 1; i >= 0; i--)
679 work = num[i] + carry * BASE;
680 quo[i] = work / lden;
681 carry = work % lden;
684 else
686 /* Full double precision division,
687 with thanks to Don Knuth's "Seminumerical Algorithms". */
688 int num_hi_sig, den_hi_sig;
689 unsigned HOST_WIDE_INT quo_est, scale;
691 /* Find the highest nonzero divisor digit. */
692 for (i = 4 - 1;; i--)
693 if (den[i] != 0)
695 den_hi_sig = i;
696 break;
699 /* Insure that the first digit of the divisor is at least BASE/2.
700 This is required by the quotient digit estimation algorithm. */
702 scale = BASE / (den[den_hi_sig] + 1);
703 if (scale > 1)
704 { /* scale divisor and dividend */
705 carry = 0;
706 for (i = 0; i <= 4 - 1; i++)
708 work = (num[i] * scale) + carry;
709 num[i] = LOWPART (work);
710 carry = HIGHPART (work);
713 num[4] = carry;
714 carry = 0;
715 for (i = 0; i <= 4 - 1; i++)
717 work = (den[i] * scale) + carry;
718 den[i] = LOWPART (work);
719 carry = HIGHPART (work);
720 if (den[i] != 0) den_hi_sig = i;
724 num_hi_sig = 4;
726 /* Main loop */
727 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
729 /* Guess the next quotient digit, quo_est, by dividing the first
730 two remaining dividend digits by the high order quotient digit.
731 quo_est is never low and is at most 2 high. */
732 unsigned HOST_WIDE_INT tmp;
734 num_hi_sig = i + den_hi_sig + 1;
735 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
736 if (num[num_hi_sig] != den[den_hi_sig])
737 quo_est = work / den[den_hi_sig];
738 else
739 quo_est = BASE - 1;
741 /* Refine quo_est so it's usually correct, and at most one high. */
742 tmp = work - quo_est * den[den_hi_sig];
743 if (tmp < BASE
744 && (den[den_hi_sig - 1] * quo_est
745 > (tmp * BASE + num[num_hi_sig - 2])))
746 quo_est--;
748 /* Try QUO_EST as the quotient digit, by multiplying the
749 divisor by QUO_EST and subtracting from the remaining dividend.
750 Keep in mind that QUO_EST is the I - 1st digit. */
752 carry = 0;
753 for (j = 0; j <= den_hi_sig; j++)
755 work = quo_est * den[j] + carry;
756 carry = HIGHPART (work);
757 work = num[i + j] - LOWPART (work);
758 num[i + j] = LOWPART (work);
759 carry += HIGHPART (work) != 0;
762 /* If quo_est was high by one, then num[i] went negative and
763 we need to correct things. */
764 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
766 quo_est--;
767 carry = 0; /* add divisor back in */
768 for (j = 0; j <= den_hi_sig; j++)
770 work = num[i + j] + den[j] + carry;
771 carry = HIGHPART (work);
772 num[i + j] = LOWPART (work);
775 num [num_hi_sig] += carry;
778 /* Store the quotient digit. */
779 quo[i] = quo_est;
783 decode (quo, lquo, hquo);
785 finish_up:
786 /* If result is negative, make it so. */
787 if (quo_neg)
788 neg_double (*lquo, *hquo, lquo, hquo);
790 /* Compute trial remainder: rem = num - (quo * den) */
791 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
792 neg_double (*lrem, *hrem, lrem, hrem);
793 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
795 switch (code)
797 case TRUNC_DIV_EXPR:
798 case TRUNC_MOD_EXPR: /* round toward zero */
799 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
800 return overflow;
802 case FLOOR_DIV_EXPR:
803 case FLOOR_MOD_EXPR: /* round toward negative infinity */
804 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
806 /* quo = quo - 1; */
807 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
808 lquo, hquo);
810 else
811 return overflow;
812 break;
814 case CEIL_DIV_EXPR:
815 case CEIL_MOD_EXPR: /* round toward positive infinity */
816 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
818 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
819 lquo, hquo);
821 else
822 return overflow;
823 break;
825 case ROUND_DIV_EXPR:
826 case ROUND_MOD_EXPR: /* round to closest integer */
828 unsigned HOST_WIDE_INT labs_rem = *lrem;
829 HOST_WIDE_INT habs_rem = *hrem;
830 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
831 HOST_WIDE_INT habs_den = hden, htwice;
833 /* Get absolute values. */
834 if (*hrem < 0)
835 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
836 if (hden < 0)
837 neg_double (lden, hden, &labs_den, &habs_den);
839 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
840 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
841 labs_rem, habs_rem, &ltwice, &htwice);
843 if (((unsigned HOST_WIDE_INT) habs_den
844 < (unsigned HOST_WIDE_INT) htwice)
845 || (((unsigned HOST_WIDE_INT) habs_den
846 == (unsigned HOST_WIDE_INT) htwice)
847 && (labs_den <= ltwice)))
849 if (*hquo < 0)
850 /* quo = quo - 1; */
851 add_double (*lquo, *hquo,
852 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
853 else
854 /* quo = quo + 1; */
855 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
856 lquo, hquo);
858 else
859 return overflow;
861 break;
863 default:
864 gcc_unreachable ();
867 /* Compute true remainder: rem = num - (quo * den) */
868 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
869 neg_double (*lrem, *hrem, lrem, hrem);
870 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
871 return overflow;
874 /* If ARG2 divides ARG1 with zero remainder, carries out the division
875 of type CODE and returns the quotient.
876 Otherwise returns NULL_TREE. */
878 tree
879 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
881 unsigned HOST_WIDE_INT int1l, int2l;
882 HOST_WIDE_INT int1h, int2h;
883 unsigned HOST_WIDE_INT quol, reml;
884 HOST_WIDE_INT quoh, remh;
885 tree type = TREE_TYPE (arg1);
886 int uns = TYPE_UNSIGNED (type);
888 int1l = TREE_INT_CST_LOW (arg1);
889 int1h = TREE_INT_CST_HIGH (arg1);
890 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
891 &obj[some_exotic_number]. */
892 if (POINTER_TYPE_P (type))
894 uns = false;
895 type = signed_type_for (type);
896 fit_double_type (int1l, int1h, &int1l, &int1h,
897 type);
899 else
900 fit_double_type (int1l, int1h, &int1l, &int1h, type);
901 int2l = TREE_INT_CST_LOW (arg2);
902 int2h = TREE_INT_CST_HIGH (arg2);
904 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
905 &quol, &quoh, &reml, &remh);
906 if (remh != 0 || reml != 0)
907 return NULL_TREE;
909 return build_int_cst_wide (type, quol, quoh);
912 /* This is nonzero if we should defer warnings about undefined
913 overflow. This facility exists because these warnings are a
914 special case. The code to estimate loop iterations does not want
915 to issue any warnings, since it works with expressions which do not
916 occur in user code. Various bits of cleanup code call fold(), but
917 only use the result if it has certain characteristics (e.g., is a
918 constant); that code only wants to issue a warning if the result is
919 used. */
921 static int fold_deferring_overflow_warnings;
923 /* If a warning about undefined overflow is deferred, this is the
924 warning. Note that this may cause us to turn two warnings into
925 one, but that is fine since it is sufficient to only give one
926 warning per expression. */
928 static const char* fold_deferred_overflow_warning;
930 /* If a warning about undefined overflow is deferred, this is the
931 level at which the warning should be emitted. */
933 static enum warn_strict_overflow_code fold_deferred_overflow_code;
935 /* Start deferring overflow warnings. We could use a stack here to
936 permit nested calls, but at present it is not necessary. */
938 void
939 fold_defer_overflow_warnings (void)
941 ++fold_deferring_overflow_warnings;
944 /* Stop deferring overflow warnings. If there is a pending warning,
945 and ISSUE is true, then issue the warning if appropriate. STMT is
946 the statement with which the warning should be associated (used for
947 location information); STMT may be NULL. CODE is the level of the
948 warning--a warn_strict_overflow_code value. This function will use
949 the smaller of CODE and the deferred code when deciding whether to
950 issue the warning. CODE may be zero to mean to always use the
951 deferred code. */
953 void
954 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
956 const char *warnmsg;
957 location_t locus;
959 gcc_assert (fold_deferring_overflow_warnings > 0);
960 --fold_deferring_overflow_warnings;
961 if (fold_deferring_overflow_warnings > 0)
963 if (fold_deferred_overflow_warning != NULL
964 && code != 0
965 && code < (int) fold_deferred_overflow_code)
966 fold_deferred_overflow_code = code;
967 return;
970 warnmsg = fold_deferred_overflow_warning;
971 fold_deferred_overflow_warning = NULL;
973 if (!issue || warnmsg == NULL)
974 return;
976 if (gimple_no_warning_p (stmt))
977 return;
979 /* Use the smallest code level when deciding to issue the
980 warning. */
981 if (code == 0 || code > (int) fold_deferred_overflow_code)
982 code = fold_deferred_overflow_code;
984 if (!issue_strict_overflow_warning (code))
985 return;
987 if (stmt == NULL)
988 locus = input_location;
989 else
990 locus = gimple_location (stmt);
991 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
994 /* Stop deferring overflow warnings, ignoring any deferred
995 warnings. */
997 void
998 fold_undefer_and_ignore_overflow_warnings (void)
1000 fold_undefer_overflow_warnings (false, NULL, 0);
1003 /* Whether we are deferring overflow warnings. */
1005 bool
1006 fold_deferring_overflow_warnings_p (void)
1008 return fold_deferring_overflow_warnings > 0;
1011 /* This is called when we fold something based on the fact that signed
1012 overflow is undefined. */
1014 static void
1015 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1017 if (fold_deferring_overflow_warnings > 0)
1019 if (fold_deferred_overflow_warning == NULL
1020 || wc < fold_deferred_overflow_code)
1022 fold_deferred_overflow_warning = gmsgid;
1023 fold_deferred_overflow_code = wc;
1026 else if (issue_strict_overflow_warning (wc))
1027 warning (OPT_Wstrict_overflow, gmsgid);
1030 /* Return true if the built-in mathematical function specified by CODE
1031 is odd, i.e. -f(x) == f(-x). */
1033 static bool
1034 negate_mathfn_p (enum built_in_function code)
1036 switch (code)
1038 CASE_FLT_FN (BUILT_IN_ASIN):
1039 CASE_FLT_FN (BUILT_IN_ASINH):
1040 CASE_FLT_FN (BUILT_IN_ATAN):
1041 CASE_FLT_FN (BUILT_IN_ATANH):
1042 CASE_FLT_FN (BUILT_IN_CASIN):
1043 CASE_FLT_FN (BUILT_IN_CASINH):
1044 CASE_FLT_FN (BUILT_IN_CATAN):
1045 CASE_FLT_FN (BUILT_IN_CATANH):
1046 CASE_FLT_FN (BUILT_IN_CBRT):
1047 CASE_FLT_FN (BUILT_IN_CPROJ):
1048 CASE_FLT_FN (BUILT_IN_CSIN):
1049 CASE_FLT_FN (BUILT_IN_CSINH):
1050 CASE_FLT_FN (BUILT_IN_CTAN):
1051 CASE_FLT_FN (BUILT_IN_CTANH):
1052 CASE_FLT_FN (BUILT_IN_ERF):
1053 CASE_FLT_FN (BUILT_IN_LLROUND):
1054 CASE_FLT_FN (BUILT_IN_LROUND):
1055 CASE_FLT_FN (BUILT_IN_ROUND):
1056 CASE_FLT_FN (BUILT_IN_SIN):
1057 CASE_FLT_FN (BUILT_IN_SINH):
1058 CASE_FLT_FN (BUILT_IN_TAN):
1059 CASE_FLT_FN (BUILT_IN_TANH):
1060 CASE_FLT_FN (BUILT_IN_TRUNC):
1061 return true;
1063 CASE_FLT_FN (BUILT_IN_LLRINT):
1064 CASE_FLT_FN (BUILT_IN_LRINT):
1065 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1066 CASE_FLT_FN (BUILT_IN_RINT):
1067 return !flag_rounding_math;
1069 default:
1070 break;
1072 return false;
1075 /* Check whether we may negate an integer constant T without causing
1076 overflow. */
1078 bool
1079 may_negate_without_overflow_p (const_tree t)
1081 unsigned HOST_WIDE_INT val;
1082 unsigned int prec;
1083 tree type;
1085 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1087 type = TREE_TYPE (t);
1088 if (TYPE_UNSIGNED (type))
1089 return false;
1091 prec = TYPE_PRECISION (type);
1092 if (prec > HOST_BITS_PER_WIDE_INT)
1094 if (TREE_INT_CST_LOW (t) != 0)
1095 return true;
1096 prec -= HOST_BITS_PER_WIDE_INT;
1097 val = TREE_INT_CST_HIGH (t);
1099 else
1100 val = TREE_INT_CST_LOW (t);
1101 if (prec < HOST_BITS_PER_WIDE_INT)
1102 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1103 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1106 /* Determine whether an expression T can be cheaply negated using
1107 the function negate_expr without introducing undefined overflow. */
1109 static bool
1110 negate_expr_p (tree t)
1112 tree type;
1114 if (t == 0)
1115 return false;
1117 type = TREE_TYPE (t);
1119 STRIP_SIGN_NOPS (t);
1120 switch (TREE_CODE (t))
1122 case INTEGER_CST:
1123 if (TYPE_OVERFLOW_WRAPS (type))
1124 return true;
1126 /* Check that -CST will not overflow type. */
1127 return may_negate_without_overflow_p (t);
1128 case BIT_NOT_EXPR:
1129 return (INTEGRAL_TYPE_P (type)
1130 && TYPE_OVERFLOW_WRAPS (type));
1132 case FIXED_CST:
1133 case REAL_CST:
1134 case NEGATE_EXPR:
1135 return true;
1137 case COMPLEX_CST:
1138 return negate_expr_p (TREE_REALPART (t))
1139 && negate_expr_p (TREE_IMAGPART (t));
1141 case COMPLEX_EXPR:
1142 return negate_expr_p (TREE_OPERAND (t, 0))
1143 && negate_expr_p (TREE_OPERAND (t, 1));
1145 case CONJ_EXPR:
1146 return negate_expr_p (TREE_OPERAND (t, 0));
1148 case PLUS_EXPR:
1149 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1150 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1151 return false;
1152 /* -(A + B) -> (-B) - A. */
1153 if (negate_expr_p (TREE_OPERAND (t, 1))
1154 && reorder_operands_p (TREE_OPERAND (t, 0),
1155 TREE_OPERAND (t, 1)))
1156 return true;
1157 /* -(A + B) -> (-A) - B. */
1158 return negate_expr_p (TREE_OPERAND (t, 0));
1160 case MINUS_EXPR:
1161 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1162 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1163 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1164 && reorder_operands_p (TREE_OPERAND (t, 0),
1165 TREE_OPERAND (t, 1));
1167 case MULT_EXPR:
1168 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1169 break;
1171 /* Fall through. */
1173 case RDIV_EXPR:
1174 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1175 return negate_expr_p (TREE_OPERAND (t, 1))
1176 || negate_expr_p (TREE_OPERAND (t, 0));
1177 break;
1179 case TRUNC_DIV_EXPR:
1180 case ROUND_DIV_EXPR:
1181 case FLOOR_DIV_EXPR:
1182 case CEIL_DIV_EXPR:
1183 case EXACT_DIV_EXPR:
1184 /* In general we can't negate A / B, because if A is INT_MIN and
1185 B is 1, we may turn this into INT_MIN / -1 which is undefined
1186 and actually traps on some architectures. But if overflow is
1187 undefined, we can negate, because - (INT_MIN / 1) is an
1188 overflow. */
1189 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1190 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1191 break;
1192 return negate_expr_p (TREE_OPERAND (t, 1))
1193 || negate_expr_p (TREE_OPERAND (t, 0));
1195 case NOP_EXPR:
1196 /* Negate -((double)float) as (double)(-float). */
1197 if (TREE_CODE (type) == REAL_TYPE)
1199 tree tem = strip_float_extensions (t);
1200 if (tem != t)
1201 return negate_expr_p (tem);
1203 break;
1205 case CALL_EXPR:
1206 /* Negate -f(x) as f(-x). */
1207 if (negate_mathfn_p (builtin_mathfn_code (t)))
1208 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1209 break;
1211 case RSHIFT_EXPR:
1212 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1213 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1215 tree op1 = TREE_OPERAND (t, 1);
1216 if (TREE_INT_CST_HIGH (op1) == 0
1217 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1218 == TREE_INT_CST_LOW (op1))
1219 return true;
1221 break;
1223 default:
1224 break;
1226 return false;
1229 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1230 simplification is possible.
1231 If negate_expr_p would return true for T, NULL_TREE will never be
1232 returned. */
1234 static tree
1235 fold_negate_expr (tree t)
1237 tree type = TREE_TYPE (t);
1238 tree tem;
1240 switch (TREE_CODE (t))
1242 /* Convert - (~A) to A + 1. */
1243 case BIT_NOT_EXPR:
1244 if (INTEGRAL_TYPE_P (type))
1245 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1246 build_int_cst (type, 1));
1247 break;
1249 case INTEGER_CST:
1250 tem = fold_negate_const (t, type);
1251 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1252 || !TYPE_OVERFLOW_TRAPS (type))
1253 return tem;
1254 break;
1256 case REAL_CST:
1257 tem = fold_negate_const (t, type);
1258 /* Two's complement FP formats, such as c4x, may overflow. */
1259 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1260 return tem;
1261 break;
1263 case FIXED_CST:
1264 tem = fold_negate_const (t, type);
1265 return tem;
1267 case COMPLEX_CST:
1269 tree rpart = negate_expr (TREE_REALPART (t));
1270 tree ipart = negate_expr (TREE_IMAGPART (t));
1272 if ((TREE_CODE (rpart) == REAL_CST
1273 && TREE_CODE (ipart) == REAL_CST)
1274 || (TREE_CODE (rpart) == INTEGER_CST
1275 && TREE_CODE (ipart) == INTEGER_CST))
1276 return build_complex (type, rpart, ipart);
1278 break;
1280 case COMPLEX_EXPR:
1281 if (negate_expr_p (t))
1282 return fold_build2 (COMPLEX_EXPR, type,
1283 fold_negate_expr (TREE_OPERAND (t, 0)),
1284 fold_negate_expr (TREE_OPERAND (t, 1)));
1285 break;
1287 case CONJ_EXPR:
1288 if (negate_expr_p (t))
1289 return fold_build1 (CONJ_EXPR, type,
1290 fold_negate_expr (TREE_OPERAND (t, 0)));
1291 break;
1293 case NEGATE_EXPR:
1294 return TREE_OPERAND (t, 0);
1296 case PLUS_EXPR:
1297 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1298 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1300 /* -(A + B) -> (-B) - A. */
1301 if (negate_expr_p (TREE_OPERAND (t, 1))
1302 && reorder_operands_p (TREE_OPERAND (t, 0),
1303 TREE_OPERAND (t, 1)))
1305 tem = negate_expr (TREE_OPERAND (t, 1));
1306 return fold_build2 (MINUS_EXPR, type,
1307 tem, TREE_OPERAND (t, 0));
1310 /* -(A + B) -> (-A) - B. */
1311 if (negate_expr_p (TREE_OPERAND (t, 0)))
1313 tem = negate_expr (TREE_OPERAND (t, 0));
1314 return fold_build2 (MINUS_EXPR, type,
1315 tem, TREE_OPERAND (t, 1));
1318 break;
1320 case MINUS_EXPR:
1321 /* - (A - B) -> B - A */
1322 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1323 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1324 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1325 return fold_build2 (MINUS_EXPR, type,
1326 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1327 break;
1329 case MULT_EXPR:
1330 if (TYPE_UNSIGNED (type))
1331 break;
1333 /* Fall through. */
1335 case RDIV_EXPR:
1336 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1338 tem = TREE_OPERAND (t, 1);
1339 if (negate_expr_p (tem))
1340 return fold_build2 (TREE_CODE (t), type,
1341 TREE_OPERAND (t, 0), negate_expr (tem));
1342 tem = TREE_OPERAND (t, 0);
1343 if (negate_expr_p (tem))
1344 return fold_build2 (TREE_CODE (t), type,
1345 negate_expr (tem), TREE_OPERAND (t, 1));
1347 break;
1349 case TRUNC_DIV_EXPR:
1350 case ROUND_DIV_EXPR:
1351 case FLOOR_DIV_EXPR:
1352 case CEIL_DIV_EXPR:
1353 case EXACT_DIV_EXPR:
1354 /* In general we can't negate A / B, because if A is INT_MIN and
1355 B is 1, we may turn this into INT_MIN / -1 which is undefined
1356 and actually traps on some architectures. But if overflow is
1357 undefined, we can negate, because - (INT_MIN / 1) is an
1358 overflow. */
1359 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1361 const char * const warnmsg = G_("assuming signed overflow does not "
1362 "occur when negating a division");
1363 tem = TREE_OPERAND (t, 1);
1364 if (negate_expr_p (tem))
1366 if (INTEGRAL_TYPE_P (type)
1367 && (TREE_CODE (tem) != INTEGER_CST
1368 || integer_onep (tem)))
1369 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1370 return fold_build2 (TREE_CODE (t), type,
1371 TREE_OPERAND (t, 0), negate_expr (tem));
1373 tem = TREE_OPERAND (t, 0);
1374 if (negate_expr_p (tem))
1376 if (INTEGRAL_TYPE_P (type)
1377 && (TREE_CODE (tem) != INTEGER_CST
1378 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1379 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1380 return fold_build2 (TREE_CODE (t), type,
1381 negate_expr (tem), TREE_OPERAND (t, 1));
1384 break;
1386 case NOP_EXPR:
1387 /* Convert -((double)float) into (double)(-float). */
1388 if (TREE_CODE (type) == REAL_TYPE)
1390 tem = strip_float_extensions (t);
1391 if (tem != t && negate_expr_p (tem))
1392 return fold_convert (type, negate_expr (tem));
1394 break;
1396 case CALL_EXPR:
1397 /* Negate -f(x) as f(-x). */
1398 if (negate_mathfn_p (builtin_mathfn_code (t))
1399 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1401 tree fndecl, arg;
1403 fndecl = get_callee_fndecl (t);
1404 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1405 return build_call_expr (fndecl, 1, arg);
1407 break;
1409 case RSHIFT_EXPR:
1410 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1411 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1413 tree op1 = TREE_OPERAND (t, 1);
1414 if (TREE_INT_CST_HIGH (op1) == 0
1415 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1416 == TREE_INT_CST_LOW (op1))
1418 tree ntype = TYPE_UNSIGNED (type)
1419 ? signed_type_for (type)
1420 : unsigned_type_for (type);
1421 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1422 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1423 return fold_convert (type, temp);
1426 break;
1428 default:
1429 break;
1432 return NULL_TREE;
1435 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1436 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1437 return NULL_TREE. */
1439 static tree
1440 negate_expr (tree t)
1442 tree type, tem;
1444 if (t == NULL_TREE)
1445 return NULL_TREE;
1447 type = TREE_TYPE (t);
1448 STRIP_SIGN_NOPS (t);
1450 tem = fold_negate_expr (t);
1451 if (!tem)
1452 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1453 return fold_convert (type, tem);
1456 /* Split a tree IN into a constant, literal and variable parts that could be
1457 combined with CODE to make IN. "constant" means an expression with
1458 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1459 commutative arithmetic operation. Store the constant part into *CONP,
1460 the literal in *LITP and return the variable part. If a part isn't
1461 present, set it to null. If the tree does not decompose in this way,
1462 return the entire tree as the variable part and the other parts as null.
1464 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1465 case, we negate an operand that was subtracted. Except if it is a
1466 literal for which we use *MINUS_LITP instead.
1468 If NEGATE_P is true, we are negating all of IN, again except a literal
1469 for which we use *MINUS_LITP instead.
1471 If IN is itself a literal or constant, return it as appropriate.
1473 Note that we do not guarantee that any of the three values will be the
1474 same type as IN, but they will have the same signedness and mode. */
1476 static tree
1477 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1478 tree *minus_litp, int negate_p)
1480 tree var = 0;
1482 *conp = 0;
1483 *litp = 0;
1484 *minus_litp = 0;
1486 /* Strip any conversions that don't change the machine mode or signedness. */
1487 STRIP_SIGN_NOPS (in);
1489 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1490 || TREE_CODE (in) == FIXED_CST)
1491 *litp = in;
1492 else if (TREE_CODE (in) == code
1493 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1494 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1495 /* We can associate addition and subtraction together (even
1496 though the C standard doesn't say so) for integers because
1497 the value is not affected. For reals, the value might be
1498 affected, so we can't. */
1499 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1500 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1502 tree op0 = TREE_OPERAND (in, 0);
1503 tree op1 = TREE_OPERAND (in, 1);
1504 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1505 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1507 /* First see if either of the operands is a literal, then a constant. */
1508 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1509 || TREE_CODE (op0) == FIXED_CST)
1510 *litp = op0, op0 = 0;
1511 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1512 || TREE_CODE (op1) == FIXED_CST)
1513 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1515 if (op0 != 0 && TREE_CONSTANT (op0))
1516 *conp = op0, op0 = 0;
1517 else if (op1 != 0 && TREE_CONSTANT (op1))
1518 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1520 /* If we haven't dealt with either operand, this is not a case we can
1521 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1522 if (op0 != 0 && op1 != 0)
1523 var = in;
1524 else if (op0 != 0)
1525 var = op0;
1526 else
1527 var = op1, neg_var_p = neg1_p;
1529 /* Now do any needed negations. */
1530 if (neg_litp_p)
1531 *minus_litp = *litp, *litp = 0;
1532 if (neg_conp_p)
1533 *conp = negate_expr (*conp);
1534 if (neg_var_p)
1535 var = negate_expr (var);
1537 else if (TREE_CONSTANT (in))
1538 *conp = in;
1539 else
1540 var = in;
1542 if (negate_p)
1544 if (*litp)
1545 *minus_litp = *litp, *litp = 0;
1546 else if (*minus_litp)
1547 *litp = *minus_litp, *minus_litp = 0;
1548 *conp = negate_expr (*conp);
1549 var = negate_expr (var);
1552 return var;
1555 /* Re-associate trees split by the above function. T1 and T2 are either
1556 expressions to associate or null. Return the new expression, if any. If
1557 we build an operation, do it in TYPE and with CODE. */
1559 static tree
1560 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1562 if (t1 == 0)
1563 return t2;
1564 else if (t2 == 0)
1565 return t1;
1567 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1568 try to fold this since we will have infinite recursion. But do
1569 deal with any NEGATE_EXPRs. */
1570 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1571 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1573 if (code == PLUS_EXPR)
1575 if (TREE_CODE (t1) == NEGATE_EXPR)
1576 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1577 fold_convert (type, TREE_OPERAND (t1, 0)));
1578 else if (TREE_CODE (t2) == NEGATE_EXPR)
1579 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1580 fold_convert (type, TREE_OPERAND (t2, 0)));
1581 else if (integer_zerop (t2))
1582 return fold_convert (type, t1);
1584 else if (code == MINUS_EXPR)
1586 if (integer_zerop (t2))
1587 return fold_convert (type, t1);
1590 return build2 (code, type, fold_convert (type, t1),
1591 fold_convert (type, t2));
1594 return fold_build2 (code, type, fold_convert (type, t1),
1595 fold_convert (type, t2));
1598 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1599 for use in int_const_binop, size_binop and size_diffop. */
1601 static bool
1602 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1604 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1605 return false;
1606 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1607 return false;
1609 switch (code)
1611 case LSHIFT_EXPR:
1612 case RSHIFT_EXPR:
1613 case LROTATE_EXPR:
1614 case RROTATE_EXPR:
1615 return true;
1617 default:
1618 break;
1621 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1622 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1623 && TYPE_MODE (type1) == TYPE_MODE (type2);
1627 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1628 to produce a new constant. Return NULL_TREE if we don't know how
1629 to evaluate CODE at compile-time.
1631 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1633 tree
1634 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1636 unsigned HOST_WIDE_INT int1l, int2l;
1637 HOST_WIDE_INT int1h, int2h;
1638 unsigned HOST_WIDE_INT low;
1639 HOST_WIDE_INT hi;
1640 unsigned HOST_WIDE_INT garbagel;
1641 HOST_WIDE_INT garbageh;
1642 tree t;
1643 tree type = TREE_TYPE (arg1);
1644 int uns = TYPE_UNSIGNED (type);
1645 int is_sizetype
1646 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1647 int overflow = 0;
1649 int1l = TREE_INT_CST_LOW (arg1);
1650 int1h = TREE_INT_CST_HIGH (arg1);
1651 int2l = TREE_INT_CST_LOW (arg2);
1652 int2h = TREE_INT_CST_HIGH (arg2);
1654 switch (code)
1656 case BIT_IOR_EXPR:
1657 low = int1l | int2l, hi = int1h | int2h;
1658 break;
1660 case BIT_XOR_EXPR:
1661 low = int1l ^ int2l, hi = int1h ^ int2h;
1662 break;
1664 case BIT_AND_EXPR:
1665 low = int1l & int2l, hi = int1h & int2h;
1666 break;
1668 case RSHIFT_EXPR:
1669 int2l = -int2l;
1670 case LSHIFT_EXPR:
1671 /* It's unclear from the C standard whether shifts can overflow.
1672 The following code ignores overflow; perhaps a C standard
1673 interpretation ruling is needed. */
1674 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1675 &low, &hi, !uns);
1676 break;
1678 case RROTATE_EXPR:
1679 int2l = - int2l;
1680 case LROTATE_EXPR:
1681 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1682 &low, &hi);
1683 break;
1685 case PLUS_EXPR:
1686 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1687 break;
1689 case MINUS_EXPR:
1690 neg_double (int2l, int2h, &low, &hi);
1691 add_double (int1l, int1h, low, hi, &low, &hi);
1692 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1693 break;
1695 case MULT_EXPR:
1696 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1697 break;
1699 case TRUNC_DIV_EXPR:
1700 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1701 case EXACT_DIV_EXPR:
1702 /* This is a shortcut for a common special case. */
1703 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1704 && !TREE_OVERFLOW (arg1)
1705 && !TREE_OVERFLOW (arg2)
1706 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1708 if (code == CEIL_DIV_EXPR)
1709 int1l += int2l - 1;
1711 low = int1l / int2l, hi = 0;
1712 break;
1715 /* ... fall through ... */
1717 case ROUND_DIV_EXPR:
1718 if (int2h == 0 && int2l == 0)
1719 return NULL_TREE;
1720 if (int2h == 0 && int2l == 1)
1722 low = int1l, hi = int1h;
1723 break;
1725 if (int1l == int2l && int1h == int2h
1726 && ! (int1l == 0 && int1h == 0))
1728 low = 1, hi = 0;
1729 break;
1731 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1732 &low, &hi, &garbagel, &garbageh);
1733 break;
1735 case TRUNC_MOD_EXPR:
1736 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1737 /* This is a shortcut for a common special case. */
1738 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1739 && !TREE_OVERFLOW (arg1)
1740 && !TREE_OVERFLOW (arg2)
1741 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1743 if (code == CEIL_MOD_EXPR)
1744 int1l += int2l - 1;
1745 low = int1l % int2l, hi = 0;
1746 break;
1749 /* ... fall through ... */
1751 case ROUND_MOD_EXPR:
1752 if (int2h == 0 && int2l == 0)
1753 return NULL_TREE;
1754 overflow = div_and_round_double (code, uns,
1755 int1l, int1h, int2l, int2h,
1756 &garbagel, &garbageh, &low, &hi);
1757 break;
1759 case MIN_EXPR:
1760 case MAX_EXPR:
1761 if (uns)
1762 low = (((unsigned HOST_WIDE_INT) int1h
1763 < (unsigned HOST_WIDE_INT) int2h)
1764 || (((unsigned HOST_WIDE_INT) int1h
1765 == (unsigned HOST_WIDE_INT) int2h)
1766 && int1l < int2l));
1767 else
1768 low = (int1h < int2h
1769 || (int1h == int2h && int1l < int2l));
1771 if (low == (code == MIN_EXPR))
1772 low = int1l, hi = int1h;
1773 else
1774 low = int2l, hi = int2h;
1775 break;
1777 default:
1778 return NULL_TREE;
1781 if (notrunc)
1783 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1785 /* Propagate overflow flags ourselves. */
1786 if (((!uns || is_sizetype) && overflow)
1787 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1789 t = copy_node (t);
1790 TREE_OVERFLOW (t) = 1;
1793 else
1794 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1795 ((!uns || is_sizetype) && overflow)
1796 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1798 return t;
1801 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1802 constant. We assume ARG1 and ARG2 have the same data type, or at least
1803 are the same kind of constant and the same machine mode. Return zero if
1804 combining the constants is not allowed in the current operating mode.
1806 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1808 static tree
1809 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1811 /* Sanity check for the recursive cases. */
1812 if (!arg1 || !arg2)
1813 return NULL_TREE;
1815 STRIP_NOPS (arg1);
1816 STRIP_NOPS (arg2);
1818 if (TREE_CODE (arg1) == INTEGER_CST)
1819 return int_const_binop (code, arg1, arg2, notrunc);
1821 if (TREE_CODE (arg1) == REAL_CST)
1823 enum machine_mode mode;
1824 REAL_VALUE_TYPE d1;
1825 REAL_VALUE_TYPE d2;
1826 REAL_VALUE_TYPE value;
1827 REAL_VALUE_TYPE result;
1828 bool inexact;
1829 tree t, type;
1831 /* The following codes are handled by real_arithmetic. */
1832 switch (code)
1834 case PLUS_EXPR:
1835 case MINUS_EXPR:
1836 case MULT_EXPR:
1837 case RDIV_EXPR:
1838 case MIN_EXPR:
1839 case MAX_EXPR:
1840 break;
1842 default:
1843 return NULL_TREE;
1846 d1 = TREE_REAL_CST (arg1);
1847 d2 = TREE_REAL_CST (arg2);
1849 type = TREE_TYPE (arg1);
1850 mode = TYPE_MODE (type);
1852 /* Don't perform operation if we honor signaling NaNs and
1853 either operand is a NaN. */
1854 if (HONOR_SNANS (mode)
1855 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1856 return NULL_TREE;
1858 /* Don't perform operation if it would raise a division
1859 by zero exception. */
1860 if (code == RDIV_EXPR
1861 && REAL_VALUES_EQUAL (d2, dconst0)
1862 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1863 return NULL_TREE;
1865 /* If either operand is a NaN, just return it. Otherwise, set up
1866 for floating-point trap; we return an overflow. */
1867 if (REAL_VALUE_ISNAN (d1))
1868 return arg1;
1869 else if (REAL_VALUE_ISNAN (d2))
1870 return arg2;
1872 inexact = real_arithmetic (&value, code, &d1, &d2);
1873 real_convert (&result, mode, &value);
1875 /* Don't constant fold this floating point operation if
1876 the result has overflowed and flag_trapping_math. */
1877 if (flag_trapping_math
1878 && MODE_HAS_INFINITIES (mode)
1879 && REAL_VALUE_ISINF (result)
1880 && !REAL_VALUE_ISINF (d1)
1881 && !REAL_VALUE_ISINF (d2))
1882 return NULL_TREE;
1884 /* Don't constant fold this floating point operation if the
1885 result may dependent upon the run-time rounding mode and
1886 flag_rounding_math is set, or if GCC's software emulation
1887 is unable to accurately represent the result. */
1888 if ((flag_rounding_math
1889 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1890 && (inexact || !real_identical (&result, &value)))
1891 return NULL_TREE;
1893 t = build_real (type, result);
1895 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1896 return t;
1899 if (TREE_CODE (arg1) == FIXED_CST)
1901 FIXED_VALUE_TYPE f1;
1902 FIXED_VALUE_TYPE f2;
1903 FIXED_VALUE_TYPE result;
1904 tree t, type;
1905 int sat_p;
1906 bool overflow_p;
1908 /* The following codes are handled by fixed_arithmetic. */
1909 switch (code)
1911 case PLUS_EXPR:
1912 case MINUS_EXPR:
1913 case MULT_EXPR:
1914 case TRUNC_DIV_EXPR:
1915 f2 = TREE_FIXED_CST (arg2);
1916 break;
1918 case LSHIFT_EXPR:
1919 case RSHIFT_EXPR:
1920 f2.data.high = TREE_INT_CST_HIGH (arg2);
1921 f2.data.low = TREE_INT_CST_LOW (arg2);
1922 f2.mode = SImode;
1923 break;
1925 default:
1926 return NULL_TREE;
1929 f1 = TREE_FIXED_CST (arg1);
1930 type = TREE_TYPE (arg1);
1931 sat_p = TYPE_SATURATING (type);
1932 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1933 t = build_fixed (type, result);
1934 /* Propagate overflow flags. */
1935 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1936 TREE_OVERFLOW (t) = 1;
1937 return t;
1940 if (TREE_CODE (arg1) == COMPLEX_CST)
1942 tree type = TREE_TYPE (arg1);
1943 tree r1 = TREE_REALPART (arg1);
1944 tree i1 = TREE_IMAGPART (arg1);
1945 tree r2 = TREE_REALPART (arg2);
1946 tree i2 = TREE_IMAGPART (arg2);
1947 tree real, imag;
1949 switch (code)
1951 case PLUS_EXPR:
1952 case MINUS_EXPR:
1953 real = const_binop (code, r1, r2, notrunc);
1954 imag = const_binop (code, i1, i2, notrunc);
1955 break;
1957 case MULT_EXPR:
1958 real = const_binop (MINUS_EXPR,
1959 const_binop (MULT_EXPR, r1, r2, notrunc),
1960 const_binop (MULT_EXPR, i1, i2, notrunc),
1961 notrunc);
1962 imag = const_binop (PLUS_EXPR,
1963 const_binop (MULT_EXPR, r1, i2, notrunc),
1964 const_binop (MULT_EXPR, i1, r2, notrunc),
1965 notrunc);
1966 break;
1968 case RDIV_EXPR:
1970 tree magsquared
1971 = const_binop (PLUS_EXPR,
1972 const_binop (MULT_EXPR, r2, r2, notrunc),
1973 const_binop (MULT_EXPR, i2, i2, notrunc),
1974 notrunc);
1975 tree t1
1976 = const_binop (PLUS_EXPR,
1977 const_binop (MULT_EXPR, r1, r2, notrunc),
1978 const_binop (MULT_EXPR, i1, i2, notrunc),
1979 notrunc);
1980 tree t2
1981 = const_binop (MINUS_EXPR,
1982 const_binop (MULT_EXPR, i1, r2, notrunc),
1983 const_binop (MULT_EXPR, r1, i2, notrunc),
1984 notrunc);
1986 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1987 code = TRUNC_DIV_EXPR;
1989 real = const_binop (code, t1, magsquared, notrunc);
1990 imag = const_binop (code, t2, magsquared, notrunc);
1992 break;
1994 default:
1995 return NULL_TREE;
1998 if (real && imag)
1999 return build_complex (type, real, imag);
2002 if (TREE_CODE (arg1) == VECTOR_CST)
2004 tree type = TREE_TYPE(arg1);
2005 int count = TYPE_VECTOR_SUBPARTS (type), i;
2006 tree elements1, elements2, list = NULL_TREE;
2008 if(TREE_CODE(arg2) != VECTOR_CST)
2009 return NULL_TREE;
2011 elements1 = TREE_VECTOR_CST_ELTS (arg1);
2012 elements2 = TREE_VECTOR_CST_ELTS (arg2);
2014 for (i = 0; i < count; i++)
2016 tree elem1, elem2, elem;
2018 /* The trailing elements can be empty and should be treated as 0 */
2019 if(!elements1)
2020 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2021 else
2023 elem1 = TREE_VALUE(elements1);
2024 elements1 = TREE_CHAIN (elements1);
2027 if(!elements2)
2028 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2029 else
2031 elem2 = TREE_VALUE(elements2);
2032 elements2 = TREE_CHAIN (elements2);
2035 elem = const_binop (code, elem1, elem2, notrunc);
2037 /* It is possible that const_binop cannot handle the given
2038 code and return NULL_TREE */
2039 if(elem == NULL_TREE)
2040 return NULL_TREE;
2042 list = tree_cons (NULL_TREE, elem, list);
2044 return build_vector(type, nreverse(list));
2046 return NULL_TREE;
2049 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2050 indicates which particular sizetype to create. */
2052 tree
2053 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2055 return build_int_cst (sizetype_tab[(int) kind], number);
2058 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2059 is a tree code. The type of the result is taken from the operands.
2060 Both must be equivalent integer types, ala int_binop_types_match_p.
2061 If the operands are constant, so is the result. */
2063 tree
2064 size_binop (enum tree_code code, tree arg0, tree arg1)
2066 tree type = TREE_TYPE (arg0);
2068 if (arg0 == error_mark_node || arg1 == error_mark_node)
2069 return error_mark_node;
2071 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2072 TREE_TYPE (arg1)));
2074 /* Handle the special case of two integer constants faster. */
2075 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2077 /* And some specific cases even faster than that. */
2078 if (code == PLUS_EXPR)
2080 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2081 return arg1;
2082 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2083 return arg0;
2085 else if (code == MINUS_EXPR)
2087 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2088 return arg0;
2090 else if (code == MULT_EXPR)
2092 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2093 return arg1;
2096 /* Handle general case of two integer constants. */
2097 return int_const_binop (code, arg0, arg1, 0);
2100 return fold_build2 (code, type, arg0, arg1);
2103 /* Given two values, either both of sizetype or both of bitsizetype,
2104 compute the difference between the two values. Return the value
2105 in signed type corresponding to the type of the operands. */
2107 tree
2108 size_diffop (tree arg0, tree arg1)
2110 tree type = TREE_TYPE (arg0);
2111 tree ctype;
2113 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2114 TREE_TYPE (arg1)));
2116 /* If the type is already signed, just do the simple thing. */
2117 if (!TYPE_UNSIGNED (type))
2118 return size_binop (MINUS_EXPR, arg0, arg1);
2120 if (type == sizetype)
2121 ctype = ssizetype;
2122 else if (type == bitsizetype)
2123 ctype = sbitsizetype;
2124 else
2125 ctype = signed_type_for (type);
2127 /* If either operand is not a constant, do the conversions to the signed
2128 type and subtract. The hardware will do the right thing with any
2129 overflow in the subtraction. */
2130 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2131 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2132 fold_convert (ctype, arg1));
2134 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2135 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2136 overflow) and negate (which can't either). Special-case a result
2137 of zero while we're here. */
2138 if (tree_int_cst_equal (arg0, arg1))
2139 return build_int_cst (ctype, 0);
2140 else if (tree_int_cst_lt (arg1, arg0))
2141 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2142 else
2143 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2144 fold_convert (ctype, size_binop (MINUS_EXPR,
2145 arg1, arg0)));
2148 /* A subroutine of fold_convert_const handling conversions of an
2149 INTEGER_CST to another integer type. */
2151 static tree
2152 fold_convert_const_int_from_int (tree type, const_tree arg1)
2154 tree t;
2156 /* Given an integer constant, make new constant with new type,
2157 appropriately sign-extended or truncated. */
2158 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2159 TREE_INT_CST_HIGH (arg1),
2160 /* Don't set the overflow when
2161 converting from a pointer, */
2162 !POINTER_TYPE_P (TREE_TYPE (arg1))
2163 /* or to a sizetype with same signedness
2164 and the precision is unchanged.
2165 ??? sizetype is always sign-extended,
2166 but its signedness depends on the
2167 frontend. Thus we see spurious overflows
2168 here if we do not check this. */
2169 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2170 == TYPE_PRECISION (type))
2171 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2172 == TYPE_UNSIGNED (type))
2173 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2174 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2175 || (TREE_CODE (type) == INTEGER_TYPE
2176 && TYPE_IS_SIZETYPE (type)))),
2177 (TREE_INT_CST_HIGH (arg1) < 0
2178 && (TYPE_UNSIGNED (type)
2179 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2180 | TREE_OVERFLOW (arg1));
2182 return t;
2185 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2186 to an integer type. */
2188 static tree
2189 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2191 int overflow = 0;
2192 tree t;
2194 /* The following code implements the floating point to integer
2195 conversion rules required by the Java Language Specification,
2196 that IEEE NaNs are mapped to zero and values that overflow
2197 the target precision saturate, i.e. values greater than
2198 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2199 are mapped to INT_MIN. These semantics are allowed by the
2200 C and C++ standards that simply state that the behavior of
2201 FP-to-integer conversion is unspecified upon overflow. */
2203 HOST_WIDE_INT high, low;
2204 REAL_VALUE_TYPE r;
2205 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2207 switch (code)
2209 case FIX_TRUNC_EXPR:
2210 real_trunc (&r, VOIDmode, &x);
2211 break;
2213 default:
2214 gcc_unreachable ();
2217 /* If R is NaN, return zero and show we have an overflow. */
2218 if (REAL_VALUE_ISNAN (r))
2220 overflow = 1;
2221 high = 0;
2222 low = 0;
2225 /* See if R is less than the lower bound or greater than the
2226 upper bound. */
2228 if (! overflow)
2230 tree lt = TYPE_MIN_VALUE (type);
2231 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2232 if (REAL_VALUES_LESS (r, l))
2234 overflow = 1;
2235 high = TREE_INT_CST_HIGH (lt);
2236 low = TREE_INT_CST_LOW (lt);
2240 if (! overflow)
2242 tree ut = TYPE_MAX_VALUE (type);
2243 if (ut)
2245 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2246 if (REAL_VALUES_LESS (u, r))
2248 overflow = 1;
2249 high = TREE_INT_CST_HIGH (ut);
2250 low = TREE_INT_CST_LOW (ut);
2255 if (! overflow)
2256 REAL_VALUE_TO_INT (&low, &high, r);
2258 t = force_fit_type_double (type, low, high, -1,
2259 overflow | TREE_OVERFLOW (arg1));
2260 return t;
2263 /* A subroutine of fold_convert_const handling conversions of a
2264 FIXED_CST to an integer type. */
2266 static tree
2267 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2269 tree t;
2270 double_int temp, temp_trunc;
2271 unsigned int mode;
2273 /* Right shift FIXED_CST to temp by fbit. */
2274 temp = TREE_FIXED_CST (arg1).data;
2275 mode = TREE_FIXED_CST (arg1).mode;
2276 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2278 lshift_double (temp.low, temp.high,
2279 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2280 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2282 /* Left shift temp to temp_trunc by fbit. */
2283 lshift_double (temp.low, temp.high,
2284 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2285 &temp_trunc.low, &temp_trunc.high,
2286 SIGNED_FIXED_POINT_MODE_P (mode));
2288 else
2290 temp.low = 0;
2291 temp.high = 0;
2292 temp_trunc.low = 0;
2293 temp_trunc.high = 0;
2296 /* If FIXED_CST is negative, we need to round the value toward 0.
2297 By checking if the fractional bits are not zero to add 1 to temp. */
2298 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2299 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2301 double_int one;
2302 one.low = 1;
2303 one.high = 0;
2304 temp = double_int_add (temp, one);
2307 /* Given a fixed-point constant, make new constant with new type,
2308 appropriately sign-extended or truncated. */
2309 t = force_fit_type_double (type, temp.low, temp.high, -1,
2310 (temp.high < 0
2311 && (TYPE_UNSIGNED (type)
2312 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2313 | TREE_OVERFLOW (arg1));
2315 return t;
2318 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2319 to another floating point type. */
2321 static tree
2322 fold_convert_const_real_from_real (tree type, const_tree arg1)
2324 REAL_VALUE_TYPE value;
2325 tree t;
2327 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2328 t = build_real (type, value);
2330 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2331 return t;
2334 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2335 to a floating point type. */
2337 static tree
2338 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2340 REAL_VALUE_TYPE value;
2341 tree t;
2343 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2344 t = build_real (type, value);
2346 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2347 return t;
2350 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2351 to another fixed-point type. */
2353 static tree
2354 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2356 FIXED_VALUE_TYPE value;
2357 tree t;
2358 bool overflow_p;
2360 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2361 TYPE_SATURATING (type));
2362 t = build_fixed (type, value);
2364 /* Propagate overflow flags. */
2365 if (overflow_p | TREE_OVERFLOW (arg1))
2366 TREE_OVERFLOW (t) = 1;
2367 return t;
2370 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2371 to a fixed-point type. */
2373 static tree
2374 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2376 FIXED_VALUE_TYPE value;
2377 tree t;
2378 bool overflow_p;
2380 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2381 TREE_INT_CST (arg1),
2382 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2383 TYPE_SATURATING (type));
2384 t = build_fixed (type, value);
2386 /* Propagate overflow flags. */
2387 if (overflow_p | TREE_OVERFLOW (arg1))
2388 TREE_OVERFLOW (t) = 1;
2389 return t;
2392 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2393 to a fixed-point type. */
2395 static tree
2396 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2398 FIXED_VALUE_TYPE value;
2399 tree t;
2400 bool overflow_p;
2402 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2403 &TREE_REAL_CST (arg1),
2404 TYPE_SATURATING (type));
2405 t = build_fixed (type, value);
2407 /* Propagate overflow flags. */
2408 if (overflow_p | TREE_OVERFLOW (arg1))
2409 TREE_OVERFLOW (t) = 1;
2410 return t;
2413 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2414 type TYPE. If no simplification can be done return NULL_TREE. */
2416 static tree
2417 fold_convert_const (enum tree_code code, tree type, tree arg1)
2419 if (TREE_TYPE (arg1) == type)
2420 return arg1;
2422 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2423 || TREE_CODE (type) == OFFSET_TYPE)
2425 if (TREE_CODE (arg1) == INTEGER_CST)
2426 return fold_convert_const_int_from_int (type, arg1);
2427 else if (TREE_CODE (arg1) == REAL_CST)
2428 return fold_convert_const_int_from_real (code, type, arg1);
2429 else if (TREE_CODE (arg1) == FIXED_CST)
2430 return fold_convert_const_int_from_fixed (type, arg1);
2432 else if (TREE_CODE (type) == REAL_TYPE)
2434 if (TREE_CODE (arg1) == INTEGER_CST)
2435 return build_real_from_int_cst (type, arg1);
2436 else if (TREE_CODE (arg1) == REAL_CST)
2437 return fold_convert_const_real_from_real (type, arg1);
2438 else if (TREE_CODE (arg1) == FIXED_CST)
2439 return fold_convert_const_real_from_fixed (type, arg1);
2441 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2443 if (TREE_CODE (arg1) == FIXED_CST)
2444 return fold_convert_const_fixed_from_fixed (type, arg1);
2445 else if (TREE_CODE (arg1) == INTEGER_CST)
2446 return fold_convert_const_fixed_from_int (type, arg1);
2447 else if (TREE_CODE (arg1) == REAL_CST)
2448 return fold_convert_const_fixed_from_real (type, arg1);
2450 return NULL_TREE;
2453 /* Construct a vector of zero elements of vector type TYPE. */
2455 static tree
2456 build_zero_vector (tree type)
2458 tree elem, list;
2459 int i, units;
2461 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2462 units = TYPE_VECTOR_SUBPARTS (type);
2464 list = NULL_TREE;
2465 for (i = 0; i < units; i++)
2466 list = tree_cons (NULL_TREE, elem, list);
2467 return build_vector (type, list);
2470 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2472 bool
2473 fold_convertible_p (const_tree type, const_tree arg)
2475 tree orig = TREE_TYPE (arg);
2477 if (type == orig)
2478 return true;
2480 if (TREE_CODE (arg) == ERROR_MARK
2481 || TREE_CODE (type) == ERROR_MARK
2482 || TREE_CODE (orig) == ERROR_MARK)
2483 return false;
2485 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2486 return true;
2488 switch (TREE_CODE (type))
2490 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2491 case POINTER_TYPE: case REFERENCE_TYPE:
2492 case OFFSET_TYPE:
2493 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2494 || TREE_CODE (orig) == OFFSET_TYPE)
2495 return true;
2496 return (TREE_CODE (orig) == VECTOR_TYPE
2497 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2499 case REAL_TYPE:
2500 case FIXED_POINT_TYPE:
2501 case COMPLEX_TYPE:
2502 case VECTOR_TYPE:
2503 case VOID_TYPE:
2504 return TREE_CODE (type) == TREE_CODE (orig);
2506 default:
2507 return false;
2511 /* Convert expression ARG to type TYPE. Used by the middle-end for
2512 simple conversions in preference to calling the front-end's convert. */
2514 tree
2515 fold_convert (tree type, tree arg)
2517 tree orig = TREE_TYPE (arg);
2518 tree tem;
2520 if (type == orig)
2521 return arg;
2523 if (TREE_CODE (arg) == ERROR_MARK
2524 || TREE_CODE (type) == ERROR_MARK
2525 || TREE_CODE (orig) == ERROR_MARK)
2526 return error_mark_node;
2528 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2529 return fold_build1 (NOP_EXPR, type, arg);
2531 switch (TREE_CODE (type))
2533 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2534 case POINTER_TYPE: case REFERENCE_TYPE:
2535 case OFFSET_TYPE:
2536 if (TREE_CODE (arg) == INTEGER_CST)
2538 tem = fold_convert_const (NOP_EXPR, type, arg);
2539 if (tem != NULL_TREE)
2540 return tem;
2542 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2543 || TREE_CODE (orig) == OFFSET_TYPE)
2544 return fold_build1 (NOP_EXPR, type, arg);
2545 if (TREE_CODE (orig) == COMPLEX_TYPE)
2547 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2548 return fold_convert (type, tem);
2550 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2551 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2552 return fold_build1 (NOP_EXPR, type, arg);
2554 case REAL_TYPE:
2555 if (TREE_CODE (arg) == INTEGER_CST)
2557 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2558 if (tem != NULL_TREE)
2559 return tem;
2561 else if (TREE_CODE (arg) == REAL_CST)
2563 tem = fold_convert_const (NOP_EXPR, type, arg);
2564 if (tem != NULL_TREE)
2565 return tem;
2567 else if (TREE_CODE (arg) == FIXED_CST)
2569 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2570 if (tem != NULL_TREE)
2571 return tem;
2574 switch (TREE_CODE (orig))
2576 case INTEGER_TYPE:
2577 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2578 case POINTER_TYPE: case REFERENCE_TYPE:
2579 return fold_build1 (FLOAT_EXPR, type, arg);
2581 case REAL_TYPE:
2582 return fold_build1 (NOP_EXPR, type, arg);
2584 case FIXED_POINT_TYPE:
2585 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2587 case COMPLEX_TYPE:
2588 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2589 return fold_convert (type, tem);
2591 default:
2592 gcc_unreachable ();
2595 case FIXED_POINT_TYPE:
2596 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2597 || TREE_CODE (arg) == REAL_CST)
2599 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2600 if (tem != NULL_TREE)
2601 return tem;
2604 switch (TREE_CODE (orig))
2606 case FIXED_POINT_TYPE:
2607 case INTEGER_TYPE:
2608 case ENUMERAL_TYPE:
2609 case BOOLEAN_TYPE:
2610 case REAL_TYPE:
2611 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2613 case COMPLEX_TYPE:
2614 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2615 return fold_convert (type, tem);
2617 default:
2618 gcc_unreachable ();
2621 case COMPLEX_TYPE:
2622 switch (TREE_CODE (orig))
2624 case INTEGER_TYPE:
2625 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2626 case POINTER_TYPE: case REFERENCE_TYPE:
2627 case REAL_TYPE:
2628 case FIXED_POINT_TYPE:
2629 return build2 (COMPLEX_EXPR, type,
2630 fold_convert (TREE_TYPE (type), arg),
2631 fold_convert (TREE_TYPE (type), integer_zero_node));
2632 case COMPLEX_TYPE:
2634 tree rpart, ipart;
2636 if (TREE_CODE (arg) == COMPLEX_EXPR)
2638 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2639 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2640 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2643 arg = save_expr (arg);
2644 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2645 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2646 rpart = fold_convert (TREE_TYPE (type), rpart);
2647 ipart = fold_convert (TREE_TYPE (type), ipart);
2648 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2651 default:
2652 gcc_unreachable ();
2655 case VECTOR_TYPE:
2656 if (integer_zerop (arg))
2657 return build_zero_vector (type);
2658 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2659 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2660 || TREE_CODE (orig) == VECTOR_TYPE);
2661 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2663 case VOID_TYPE:
2664 tem = fold_ignored_result (arg);
2665 if (TREE_CODE (tem) == MODIFY_EXPR)
2666 return tem;
2667 return fold_build1 (NOP_EXPR, type, tem);
2669 default:
2670 gcc_unreachable ();
2674 /* Return false if expr can be assumed not to be an lvalue, true
2675 otherwise. */
2677 static bool
2678 maybe_lvalue_p (const_tree x)
2680 /* We only need to wrap lvalue tree codes. */
2681 switch (TREE_CODE (x))
2683 case VAR_DECL:
2684 case PARM_DECL:
2685 case RESULT_DECL:
2686 case LABEL_DECL:
2687 case FUNCTION_DECL:
2688 case SSA_NAME:
2690 case COMPONENT_REF:
2691 case INDIRECT_REF:
2692 case ALIGN_INDIRECT_REF:
2693 case MISALIGNED_INDIRECT_REF:
2694 case ARRAY_REF:
2695 case ARRAY_RANGE_REF:
2696 case BIT_FIELD_REF:
2697 case OBJ_TYPE_REF:
2699 case REALPART_EXPR:
2700 case IMAGPART_EXPR:
2701 case PREINCREMENT_EXPR:
2702 case PREDECREMENT_EXPR:
2703 case SAVE_EXPR:
2704 case TRY_CATCH_EXPR:
2705 case WITH_CLEANUP_EXPR:
2706 case COMPOUND_EXPR:
2707 case MODIFY_EXPR:
2708 case TARGET_EXPR:
2709 case COND_EXPR:
2710 case BIND_EXPR:
2711 case MIN_EXPR:
2712 case MAX_EXPR:
2713 break;
2715 default:
2716 /* Assume the worst for front-end tree codes. */
2717 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2718 break;
2719 return false;
2722 return true;
2725 /* Return an expr equal to X but certainly not valid as an lvalue. */
2727 tree
2728 non_lvalue (tree x)
2730 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2731 us. */
2732 if (in_gimple_form)
2733 return x;
2735 if (! maybe_lvalue_p (x))
2736 return x;
2737 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2740 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2741 Zero means allow extended lvalues. */
2743 int pedantic_lvalues;
2745 /* When pedantic, return an expr equal to X but certainly not valid as a
2746 pedantic lvalue. Otherwise, return X. */
2748 static tree
2749 pedantic_non_lvalue (tree x)
2751 if (pedantic_lvalues)
2752 return non_lvalue (x);
2753 else
2754 return x;
2757 /* Given a tree comparison code, return the code that is the logical inverse
2758 of the given code. It is not safe to do this for floating-point
2759 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2760 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2762 enum tree_code
2763 invert_tree_comparison (enum tree_code code, bool honor_nans)
2765 if (honor_nans && flag_trapping_math)
2766 return ERROR_MARK;
2768 switch (code)
2770 case EQ_EXPR:
2771 return NE_EXPR;
2772 case NE_EXPR:
2773 return EQ_EXPR;
2774 case GT_EXPR:
2775 return honor_nans ? UNLE_EXPR : LE_EXPR;
2776 case GE_EXPR:
2777 return honor_nans ? UNLT_EXPR : LT_EXPR;
2778 case LT_EXPR:
2779 return honor_nans ? UNGE_EXPR : GE_EXPR;
2780 case LE_EXPR:
2781 return honor_nans ? UNGT_EXPR : GT_EXPR;
2782 case LTGT_EXPR:
2783 return UNEQ_EXPR;
2784 case UNEQ_EXPR:
2785 return LTGT_EXPR;
2786 case UNGT_EXPR:
2787 return LE_EXPR;
2788 case UNGE_EXPR:
2789 return LT_EXPR;
2790 case UNLT_EXPR:
2791 return GE_EXPR;
2792 case UNLE_EXPR:
2793 return GT_EXPR;
2794 case ORDERED_EXPR:
2795 return UNORDERED_EXPR;
2796 case UNORDERED_EXPR:
2797 return ORDERED_EXPR;
2798 default:
2799 gcc_unreachable ();
2803 /* Similar, but return the comparison that results if the operands are
2804 swapped. This is safe for floating-point. */
2806 enum tree_code
2807 swap_tree_comparison (enum tree_code code)
2809 switch (code)
2811 case EQ_EXPR:
2812 case NE_EXPR:
2813 case ORDERED_EXPR:
2814 case UNORDERED_EXPR:
2815 case LTGT_EXPR:
2816 case UNEQ_EXPR:
2817 return code;
2818 case GT_EXPR:
2819 return LT_EXPR;
2820 case GE_EXPR:
2821 return LE_EXPR;
2822 case LT_EXPR:
2823 return GT_EXPR;
2824 case LE_EXPR:
2825 return GE_EXPR;
2826 case UNGT_EXPR:
2827 return UNLT_EXPR;
2828 case UNGE_EXPR:
2829 return UNLE_EXPR;
2830 case UNLT_EXPR:
2831 return UNGT_EXPR;
2832 case UNLE_EXPR:
2833 return UNGE_EXPR;
2834 default:
2835 gcc_unreachable ();
2840 /* Convert a comparison tree code from an enum tree_code representation
2841 into a compcode bit-based encoding. This function is the inverse of
2842 compcode_to_comparison. */
2844 static enum comparison_code
2845 comparison_to_compcode (enum tree_code code)
2847 switch (code)
2849 case LT_EXPR:
2850 return COMPCODE_LT;
2851 case EQ_EXPR:
2852 return COMPCODE_EQ;
2853 case LE_EXPR:
2854 return COMPCODE_LE;
2855 case GT_EXPR:
2856 return COMPCODE_GT;
2857 case NE_EXPR:
2858 return COMPCODE_NE;
2859 case GE_EXPR:
2860 return COMPCODE_GE;
2861 case ORDERED_EXPR:
2862 return COMPCODE_ORD;
2863 case UNORDERED_EXPR:
2864 return COMPCODE_UNORD;
2865 case UNLT_EXPR:
2866 return COMPCODE_UNLT;
2867 case UNEQ_EXPR:
2868 return COMPCODE_UNEQ;
2869 case UNLE_EXPR:
2870 return COMPCODE_UNLE;
2871 case UNGT_EXPR:
2872 return COMPCODE_UNGT;
2873 case LTGT_EXPR:
2874 return COMPCODE_LTGT;
2875 case UNGE_EXPR:
2876 return COMPCODE_UNGE;
2877 default:
2878 gcc_unreachable ();
2882 /* Convert a compcode bit-based encoding of a comparison operator back
2883 to GCC's enum tree_code representation. This function is the
2884 inverse of comparison_to_compcode. */
2886 static enum tree_code
2887 compcode_to_comparison (enum comparison_code code)
2889 switch (code)
2891 case COMPCODE_LT:
2892 return LT_EXPR;
2893 case COMPCODE_EQ:
2894 return EQ_EXPR;
2895 case COMPCODE_LE:
2896 return LE_EXPR;
2897 case COMPCODE_GT:
2898 return GT_EXPR;
2899 case COMPCODE_NE:
2900 return NE_EXPR;
2901 case COMPCODE_GE:
2902 return GE_EXPR;
2903 case COMPCODE_ORD:
2904 return ORDERED_EXPR;
2905 case COMPCODE_UNORD:
2906 return UNORDERED_EXPR;
2907 case COMPCODE_UNLT:
2908 return UNLT_EXPR;
2909 case COMPCODE_UNEQ:
2910 return UNEQ_EXPR;
2911 case COMPCODE_UNLE:
2912 return UNLE_EXPR;
2913 case COMPCODE_UNGT:
2914 return UNGT_EXPR;
2915 case COMPCODE_LTGT:
2916 return LTGT_EXPR;
2917 case COMPCODE_UNGE:
2918 return UNGE_EXPR;
2919 default:
2920 gcc_unreachable ();
2924 /* Return a tree for the comparison which is the combination of
2925 doing the AND or OR (depending on CODE) of the two operations LCODE
2926 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2927 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2928 if this makes the transformation invalid. */
2930 tree
2931 combine_comparisons (enum tree_code code, enum tree_code lcode,
2932 enum tree_code rcode, tree truth_type,
2933 tree ll_arg, tree lr_arg)
2935 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2936 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2937 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2938 enum comparison_code compcode;
2940 switch (code)
2942 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2943 compcode = lcompcode & rcompcode;
2944 break;
2946 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2947 compcode = lcompcode | rcompcode;
2948 break;
2950 default:
2951 return NULL_TREE;
2954 if (!honor_nans)
2956 /* Eliminate unordered comparisons, as well as LTGT and ORD
2957 which are not used unless the mode has NaNs. */
2958 compcode &= ~COMPCODE_UNORD;
2959 if (compcode == COMPCODE_LTGT)
2960 compcode = COMPCODE_NE;
2961 else if (compcode == COMPCODE_ORD)
2962 compcode = COMPCODE_TRUE;
2964 else if (flag_trapping_math)
2966 /* Check that the original operation and the optimized ones will trap
2967 under the same condition. */
2968 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2969 && (lcompcode != COMPCODE_EQ)
2970 && (lcompcode != COMPCODE_ORD);
2971 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2972 && (rcompcode != COMPCODE_EQ)
2973 && (rcompcode != COMPCODE_ORD);
2974 bool trap = (compcode & COMPCODE_UNORD) == 0
2975 && (compcode != COMPCODE_EQ)
2976 && (compcode != COMPCODE_ORD);
2978 /* In a short-circuited boolean expression the LHS might be
2979 such that the RHS, if evaluated, will never trap. For
2980 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2981 if neither x nor y is NaN. (This is a mixed blessing: for
2982 example, the expression above will never trap, hence
2983 optimizing it to x < y would be invalid). */
2984 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2985 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2986 rtrap = false;
2988 /* If the comparison was short-circuited, and only the RHS
2989 trapped, we may now generate a spurious trap. */
2990 if (rtrap && !ltrap
2991 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2992 return NULL_TREE;
2994 /* If we changed the conditions that cause a trap, we lose. */
2995 if ((ltrap || rtrap) != trap)
2996 return NULL_TREE;
2999 if (compcode == COMPCODE_TRUE)
3000 return constant_boolean_node (true, truth_type);
3001 else if (compcode == COMPCODE_FALSE)
3002 return constant_boolean_node (false, truth_type);
3003 else
3004 return fold_build2 (compcode_to_comparison (compcode),
3005 truth_type, ll_arg, lr_arg);
3008 /* Return nonzero if two operands (typically of the same tree node)
3009 are necessarily equal. If either argument has side-effects this
3010 function returns zero. FLAGS modifies behavior as follows:
3012 If OEP_ONLY_CONST is set, only return nonzero for constants.
3013 This function tests whether the operands are indistinguishable;
3014 it does not test whether they are equal using C's == operation.
3015 The distinction is important for IEEE floating point, because
3016 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3017 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3019 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3020 even though it may hold multiple values during a function.
3021 This is because a GCC tree node guarantees that nothing else is
3022 executed between the evaluation of its "operands" (which may often
3023 be evaluated in arbitrary order). Hence if the operands themselves
3024 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3025 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3026 unset means assuming isochronic (or instantaneous) tree equivalence.
3027 Unless comparing arbitrary expression trees, such as from different
3028 statements, this flag can usually be left unset.
3030 If OEP_PURE_SAME is set, then pure functions with identical arguments
3031 are considered the same. It is used when the caller has other ways
3032 to ensure that global memory is unchanged in between. */
3035 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3037 /* If either is ERROR_MARK, they aren't equal. */
3038 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3039 return 0;
3041 /* Check equality of integer constants before bailing out due to
3042 precision differences. */
3043 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3044 return tree_int_cst_equal (arg0, arg1);
3046 /* If both types don't have the same signedness, then we can't consider
3047 them equal. We must check this before the STRIP_NOPS calls
3048 because they may change the signedness of the arguments. As pointers
3049 strictly don't have a signedness, require either two pointers or
3050 two non-pointers as well. */
3051 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3052 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3053 return 0;
3055 /* If both types don't have the same precision, then it is not safe
3056 to strip NOPs. */
3057 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3058 return 0;
3060 STRIP_NOPS (arg0);
3061 STRIP_NOPS (arg1);
3063 /* In case both args are comparisons but with different comparison
3064 code, try to swap the comparison operands of one arg to produce
3065 a match and compare that variant. */
3066 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3067 && COMPARISON_CLASS_P (arg0)
3068 && COMPARISON_CLASS_P (arg1))
3070 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3072 if (TREE_CODE (arg0) == swap_code)
3073 return operand_equal_p (TREE_OPERAND (arg0, 0),
3074 TREE_OPERAND (arg1, 1), flags)
3075 && operand_equal_p (TREE_OPERAND (arg0, 1),
3076 TREE_OPERAND (arg1, 0), flags);
3079 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3080 /* This is needed for conversions and for COMPONENT_REF.
3081 Might as well play it safe and always test this. */
3082 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3083 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3084 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3085 return 0;
3087 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3088 We don't care about side effects in that case because the SAVE_EXPR
3089 takes care of that for us. In all other cases, two expressions are
3090 equal if they have no side effects. If we have two identical
3091 expressions with side effects that should be treated the same due
3092 to the only side effects being identical SAVE_EXPR's, that will
3093 be detected in the recursive calls below. */
3094 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3095 && (TREE_CODE (arg0) == SAVE_EXPR
3096 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3097 return 1;
3099 /* Next handle constant cases, those for which we can return 1 even
3100 if ONLY_CONST is set. */
3101 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3102 switch (TREE_CODE (arg0))
3104 case INTEGER_CST:
3105 return tree_int_cst_equal (arg0, arg1);
3107 case FIXED_CST:
3108 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3109 TREE_FIXED_CST (arg1));
3111 case REAL_CST:
3112 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3113 TREE_REAL_CST (arg1)))
3114 return 1;
3117 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3119 /* If we do not distinguish between signed and unsigned zero,
3120 consider them equal. */
3121 if (real_zerop (arg0) && real_zerop (arg1))
3122 return 1;
3124 return 0;
3126 case VECTOR_CST:
3128 tree v1, v2;
3130 v1 = TREE_VECTOR_CST_ELTS (arg0);
3131 v2 = TREE_VECTOR_CST_ELTS (arg1);
3132 while (v1 && v2)
3134 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3135 flags))
3136 return 0;
3137 v1 = TREE_CHAIN (v1);
3138 v2 = TREE_CHAIN (v2);
3141 return v1 == v2;
3144 case COMPLEX_CST:
3145 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3146 flags)
3147 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3148 flags));
3150 case STRING_CST:
3151 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3152 && ! memcmp (TREE_STRING_POINTER (arg0),
3153 TREE_STRING_POINTER (arg1),
3154 TREE_STRING_LENGTH (arg0)));
3156 case ADDR_EXPR:
3157 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3159 default:
3160 break;
3163 if (flags & OEP_ONLY_CONST)
3164 return 0;
3166 /* Define macros to test an operand from arg0 and arg1 for equality and a
3167 variant that allows null and views null as being different from any
3168 non-null value. In the latter case, if either is null, the both
3169 must be; otherwise, do the normal comparison. */
3170 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3171 TREE_OPERAND (arg1, N), flags)
3173 #define OP_SAME_WITH_NULL(N) \
3174 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3175 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3177 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3179 case tcc_unary:
3180 /* Two conversions are equal only if signedness and modes match. */
3181 switch (TREE_CODE (arg0))
3183 CASE_CONVERT:
3184 case FIX_TRUNC_EXPR:
3185 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3186 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3187 return 0;
3188 break;
3189 default:
3190 break;
3193 return OP_SAME (0);
3196 case tcc_comparison:
3197 case tcc_binary:
3198 if (OP_SAME (0) && OP_SAME (1))
3199 return 1;
3201 /* For commutative ops, allow the other order. */
3202 return (commutative_tree_code (TREE_CODE (arg0))
3203 && operand_equal_p (TREE_OPERAND (arg0, 0),
3204 TREE_OPERAND (arg1, 1), flags)
3205 && operand_equal_p (TREE_OPERAND (arg0, 1),
3206 TREE_OPERAND (arg1, 0), flags));
3208 case tcc_reference:
3209 /* If either of the pointer (or reference) expressions we are
3210 dereferencing contain a side effect, these cannot be equal. */
3211 if (TREE_SIDE_EFFECTS (arg0)
3212 || TREE_SIDE_EFFECTS (arg1))
3213 return 0;
3215 switch (TREE_CODE (arg0))
3217 case INDIRECT_REF:
3218 case ALIGN_INDIRECT_REF:
3219 case MISALIGNED_INDIRECT_REF:
3220 case REALPART_EXPR:
3221 case IMAGPART_EXPR:
3222 return OP_SAME (0);
3224 case ARRAY_REF:
3225 case ARRAY_RANGE_REF:
3226 /* Operands 2 and 3 may be null.
3227 Compare the array index by value if it is constant first as we
3228 may have different types but same value here. */
3229 return (OP_SAME (0)
3230 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3231 TREE_OPERAND (arg1, 1))
3232 || OP_SAME (1))
3233 && OP_SAME_WITH_NULL (2)
3234 && OP_SAME_WITH_NULL (3));
3236 case COMPONENT_REF:
3237 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3238 may be NULL when we're called to compare MEM_EXPRs. */
3239 return OP_SAME_WITH_NULL (0)
3240 && OP_SAME (1)
3241 && OP_SAME_WITH_NULL (2);
3243 case BIT_FIELD_REF:
3244 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3246 default:
3247 return 0;
3250 case tcc_expression:
3251 switch (TREE_CODE (arg0))
3253 case ADDR_EXPR:
3254 case TRUTH_NOT_EXPR:
3255 return OP_SAME (0);
3257 case TRUTH_ANDIF_EXPR:
3258 case TRUTH_ORIF_EXPR:
3259 return OP_SAME (0) && OP_SAME (1);
3261 case TRUTH_AND_EXPR:
3262 case TRUTH_OR_EXPR:
3263 case TRUTH_XOR_EXPR:
3264 if (OP_SAME (0) && OP_SAME (1))
3265 return 1;
3267 /* Otherwise take into account this is a commutative operation. */
3268 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3269 TREE_OPERAND (arg1, 1), flags)
3270 && operand_equal_p (TREE_OPERAND (arg0, 1),
3271 TREE_OPERAND (arg1, 0), flags));
3273 case COND_EXPR:
3274 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3276 default:
3277 return 0;
3280 case tcc_vl_exp:
3281 switch (TREE_CODE (arg0))
3283 case CALL_EXPR:
3284 /* If the CALL_EXPRs call different functions, then they
3285 clearly can not be equal. */
3286 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3287 flags))
3288 return 0;
3291 unsigned int cef = call_expr_flags (arg0);
3292 if (flags & OEP_PURE_SAME)
3293 cef &= ECF_CONST | ECF_PURE;
3294 else
3295 cef &= ECF_CONST;
3296 if (!cef)
3297 return 0;
3300 /* Now see if all the arguments are the same. */
3302 const_call_expr_arg_iterator iter0, iter1;
3303 const_tree a0, a1;
3304 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3305 a1 = first_const_call_expr_arg (arg1, &iter1);
3306 a0 && a1;
3307 a0 = next_const_call_expr_arg (&iter0),
3308 a1 = next_const_call_expr_arg (&iter1))
3309 if (! operand_equal_p (a0, a1, flags))
3310 return 0;
3312 /* If we get here and both argument lists are exhausted
3313 then the CALL_EXPRs are equal. */
3314 return ! (a0 || a1);
3316 default:
3317 return 0;
3320 case tcc_declaration:
3321 /* Consider __builtin_sqrt equal to sqrt. */
3322 return (TREE_CODE (arg0) == FUNCTION_DECL
3323 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3324 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3325 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3327 default:
3328 return 0;
3331 #undef OP_SAME
3332 #undef OP_SAME_WITH_NULL
3335 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3336 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3338 When in doubt, return 0. */
3340 static int
3341 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3343 int unsignedp1, unsignedpo;
3344 tree primarg0, primarg1, primother;
3345 unsigned int correct_width;
3347 if (operand_equal_p (arg0, arg1, 0))
3348 return 1;
3350 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3351 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3352 return 0;
3354 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3355 and see if the inner values are the same. This removes any
3356 signedness comparison, which doesn't matter here. */
3357 primarg0 = arg0, primarg1 = arg1;
3358 STRIP_NOPS (primarg0);
3359 STRIP_NOPS (primarg1);
3360 if (operand_equal_p (primarg0, primarg1, 0))
3361 return 1;
3363 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3364 actual comparison operand, ARG0.
3366 First throw away any conversions to wider types
3367 already present in the operands. */
3369 primarg1 = get_narrower (arg1, &unsignedp1);
3370 primother = get_narrower (other, &unsignedpo);
3372 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3373 if (unsignedp1 == unsignedpo
3374 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3375 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3377 tree type = TREE_TYPE (arg0);
3379 /* Make sure shorter operand is extended the right way
3380 to match the longer operand. */
3381 primarg1 = fold_convert (signed_or_unsigned_type_for
3382 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3384 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3385 return 1;
3388 return 0;
3391 /* See if ARG is an expression that is either a comparison or is performing
3392 arithmetic on comparisons. The comparisons must only be comparing
3393 two different values, which will be stored in *CVAL1 and *CVAL2; if
3394 they are nonzero it means that some operands have already been found.
3395 No variables may be used anywhere else in the expression except in the
3396 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3397 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3399 If this is true, return 1. Otherwise, return zero. */
3401 static int
3402 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3404 enum tree_code code = TREE_CODE (arg);
3405 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3407 /* We can handle some of the tcc_expression cases here. */
3408 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3409 tclass = tcc_unary;
3410 else if (tclass == tcc_expression
3411 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3412 || code == COMPOUND_EXPR))
3413 tclass = tcc_binary;
3415 else if (tclass == tcc_expression && code == SAVE_EXPR
3416 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3418 /* If we've already found a CVAL1 or CVAL2, this expression is
3419 two complex to handle. */
3420 if (*cval1 || *cval2)
3421 return 0;
3423 tclass = tcc_unary;
3424 *save_p = 1;
3427 switch (tclass)
3429 case tcc_unary:
3430 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3432 case tcc_binary:
3433 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3434 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3435 cval1, cval2, save_p));
3437 case tcc_constant:
3438 return 1;
3440 case tcc_expression:
3441 if (code == COND_EXPR)
3442 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3443 cval1, cval2, save_p)
3444 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3445 cval1, cval2, save_p)
3446 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3447 cval1, cval2, save_p));
3448 return 0;
3450 case tcc_comparison:
3451 /* First see if we can handle the first operand, then the second. For
3452 the second operand, we know *CVAL1 can't be zero. It must be that
3453 one side of the comparison is each of the values; test for the
3454 case where this isn't true by failing if the two operands
3455 are the same. */
3457 if (operand_equal_p (TREE_OPERAND (arg, 0),
3458 TREE_OPERAND (arg, 1), 0))
3459 return 0;
3461 if (*cval1 == 0)
3462 *cval1 = TREE_OPERAND (arg, 0);
3463 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3465 else if (*cval2 == 0)
3466 *cval2 = TREE_OPERAND (arg, 0);
3467 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3469 else
3470 return 0;
3472 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3474 else if (*cval2 == 0)
3475 *cval2 = TREE_OPERAND (arg, 1);
3476 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3478 else
3479 return 0;
3481 return 1;
3483 default:
3484 return 0;
3488 /* ARG is a tree that is known to contain just arithmetic operations and
3489 comparisons. Evaluate the operations in the tree substituting NEW0 for
3490 any occurrence of OLD0 as an operand of a comparison and likewise for
3491 NEW1 and OLD1. */
3493 static tree
3494 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3496 tree type = TREE_TYPE (arg);
3497 enum tree_code code = TREE_CODE (arg);
3498 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3500 /* We can handle some of the tcc_expression cases here. */
3501 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3502 tclass = tcc_unary;
3503 else if (tclass == tcc_expression
3504 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3505 tclass = tcc_binary;
3507 switch (tclass)
3509 case tcc_unary:
3510 return fold_build1 (code, type,
3511 eval_subst (TREE_OPERAND (arg, 0),
3512 old0, new0, old1, new1));
3514 case tcc_binary:
3515 return fold_build2 (code, type,
3516 eval_subst (TREE_OPERAND (arg, 0),
3517 old0, new0, old1, new1),
3518 eval_subst (TREE_OPERAND (arg, 1),
3519 old0, new0, old1, new1));
3521 case tcc_expression:
3522 switch (code)
3524 case SAVE_EXPR:
3525 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3527 case COMPOUND_EXPR:
3528 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3530 case COND_EXPR:
3531 return fold_build3 (code, type,
3532 eval_subst (TREE_OPERAND (arg, 0),
3533 old0, new0, old1, new1),
3534 eval_subst (TREE_OPERAND (arg, 1),
3535 old0, new0, old1, new1),
3536 eval_subst (TREE_OPERAND (arg, 2),
3537 old0, new0, old1, new1));
3538 default:
3539 break;
3541 /* Fall through - ??? */
3543 case tcc_comparison:
3545 tree arg0 = TREE_OPERAND (arg, 0);
3546 tree arg1 = TREE_OPERAND (arg, 1);
3548 /* We need to check both for exact equality and tree equality. The
3549 former will be true if the operand has a side-effect. In that
3550 case, we know the operand occurred exactly once. */
3552 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3553 arg0 = new0;
3554 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3555 arg0 = new1;
3557 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3558 arg1 = new0;
3559 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3560 arg1 = new1;
3562 return fold_build2 (code, type, arg0, arg1);
3565 default:
3566 return arg;
3570 /* Return a tree for the case when the result of an expression is RESULT
3571 converted to TYPE and OMITTED was previously an operand of the expression
3572 but is now not needed (e.g., we folded OMITTED * 0).
3574 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3575 the conversion of RESULT to TYPE. */
3577 tree
3578 omit_one_operand (tree type, tree result, tree omitted)
3580 tree t = fold_convert (type, result);
3582 /* If the resulting operand is an empty statement, just return the omitted
3583 statement casted to void. */
3584 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3585 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3587 if (TREE_SIDE_EFFECTS (omitted))
3588 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3590 return non_lvalue (t);
3593 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3595 static tree
3596 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3598 tree t = fold_convert (type, result);
3600 /* If the resulting operand is an empty statement, just return the omitted
3601 statement casted to void. */
3602 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3603 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3605 if (TREE_SIDE_EFFECTS (omitted))
3606 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3608 return pedantic_non_lvalue (t);
3611 /* Return a tree for the case when the result of an expression is RESULT
3612 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3613 of the expression but are now not needed.
3615 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3616 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3617 evaluated before OMITTED2. Otherwise, if neither has side effects,
3618 just do the conversion of RESULT to TYPE. */
3620 tree
3621 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3623 tree t = fold_convert (type, result);
3625 if (TREE_SIDE_EFFECTS (omitted2))
3626 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3627 if (TREE_SIDE_EFFECTS (omitted1))
3628 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3630 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3634 /* Return a simplified tree node for the truth-negation of ARG. This
3635 never alters ARG itself. We assume that ARG is an operation that
3636 returns a truth value (0 or 1).
3638 FIXME: one would think we would fold the result, but it causes
3639 problems with the dominator optimizer. */
3641 tree
3642 fold_truth_not_expr (tree arg)
3644 tree t, type = TREE_TYPE (arg);
3645 enum tree_code code = TREE_CODE (arg);
3647 /* If this is a comparison, we can simply invert it, except for
3648 floating-point non-equality comparisons, in which case we just
3649 enclose a TRUTH_NOT_EXPR around what we have. */
3651 if (TREE_CODE_CLASS (code) == tcc_comparison)
3653 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3654 if (FLOAT_TYPE_P (op_type)
3655 && flag_trapping_math
3656 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3657 && code != NE_EXPR && code != EQ_EXPR)
3658 return NULL_TREE;
3660 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3661 if (code == ERROR_MARK)
3662 return NULL_TREE;
3664 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3665 if (EXPR_HAS_LOCATION (arg))
3666 SET_EXPR_LOCATION (t, EXPR_LOCATION (arg));
3667 return t;
3670 switch (code)
3672 case INTEGER_CST:
3673 return constant_boolean_node (integer_zerop (arg), type);
3675 case TRUTH_AND_EXPR:
3676 t = build2 (TRUTH_OR_EXPR, type,
3677 invert_truthvalue (TREE_OPERAND (arg, 0)),
3678 invert_truthvalue (TREE_OPERAND (arg, 1)));
3679 break;
3681 case TRUTH_OR_EXPR:
3682 t = build2 (TRUTH_AND_EXPR, type,
3683 invert_truthvalue (TREE_OPERAND (arg, 0)),
3684 invert_truthvalue (TREE_OPERAND (arg, 1)));
3685 break;
3687 case TRUTH_XOR_EXPR:
3688 /* Here we can invert either operand. We invert the first operand
3689 unless the second operand is a TRUTH_NOT_EXPR in which case our
3690 result is the XOR of the first operand with the inside of the
3691 negation of the second operand. */
3693 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3694 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3695 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3696 else
3697 t = build2 (TRUTH_XOR_EXPR, type,
3698 invert_truthvalue (TREE_OPERAND (arg, 0)),
3699 TREE_OPERAND (arg, 1));
3700 break;
3702 case TRUTH_ANDIF_EXPR:
3703 t = build2 (TRUTH_ORIF_EXPR, type,
3704 invert_truthvalue (TREE_OPERAND (arg, 0)),
3705 invert_truthvalue (TREE_OPERAND (arg, 1)));
3706 break;
3708 case TRUTH_ORIF_EXPR:
3709 t = build2 (TRUTH_ANDIF_EXPR, type,
3710 invert_truthvalue (TREE_OPERAND (arg, 0)),
3711 invert_truthvalue (TREE_OPERAND (arg, 1)));
3712 break;
3714 case TRUTH_NOT_EXPR:
3715 return TREE_OPERAND (arg, 0);
3717 case COND_EXPR:
3719 tree arg1 = TREE_OPERAND (arg, 1);
3720 tree arg2 = TREE_OPERAND (arg, 2);
3721 /* A COND_EXPR may have a throw as one operand, which
3722 then has void type. Just leave void operands
3723 as they are. */
3724 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3725 VOID_TYPE_P (TREE_TYPE (arg1))
3726 ? arg1 : invert_truthvalue (arg1),
3727 VOID_TYPE_P (TREE_TYPE (arg2))
3728 ? arg2 : invert_truthvalue (arg2));
3729 break;
3732 case COMPOUND_EXPR:
3733 t = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3734 invert_truthvalue (TREE_OPERAND (arg, 1)));
3735 break;
3737 case NON_LVALUE_EXPR:
3738 return invert_truthvalue (TREE_OPERAND (arg, 0));
3740 case NOP_EXPR:
3741 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3743 t = build1 (TRUTH_NOT_EXPR, type, arg);
3744 break;
3747 /* ... fall through ... */
3749 case CONVERT_EXPR:
3750 case FLOAT_EXPR:
3751 t = build1 (TREE_CODE (arg), type,
3752 invert_truthvalue (TREE_OPERAND (arg, 0)));
3753 break;
3755 case BIT_AND_EXPR:
3756 if (!integer_onep (TREE_OPERAND (arg, 1)))
3757 return NULL_TREE;
3758 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3759 break;
3761 case SAVE_EXPR:
3762 t = build1 (TRUTH_NOT_EXPR, type, arg);
3763 break;
3765 case CLEANUP_POINT_EXPR:
3766 t = build1 (CLEANUP_POINT_EXPR, type,
3767 invert_truthvalue (TREE_OPERAND (arg, 0)));
3768 break;
3770 default:
3771 t = NULL_TREE;
3772 break;
3775 if (t && EXPR_HAS_LOCATION (arg))
3776 SET_EXPR_LOCATION (t, EXPR_LOCATION (arg));
3778 return t;
3781 /* Return a simplified tree node for the truth-negation of ARG. This
3782 never alters ARG itself. We assume that ARG is an operation that
3783 returns a truth value (0 or 1).
3785 FIXME: one would think we would fold the result, but it causes
3786 problems with the dominator optimizer. */
3788 tree
3789 invert_truthvalue (tree arg)
3791 tree tem;
3793 if (TREE_CODE (arg) == ERROR_MARK)
3794 return arg;
3796 tem = fold_truth_not_expr (arg);
3797 if (!tem)
3798 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3800 return tem;
3803 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3804 operands are another bit-wise operation with a common input. If so,
3805 distribute the bit operations to save an operation and possibly two if
3806 constants are involved. For example, convert
3807 (A | B) & (A | C) into A | (B & C)
3808 Further simplification will occur if B and C are constants.
3810 If this optimization cannot be done, 0 will be returned. */
3812 static tree
3813 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3815 tree common;
3816 tree left, right;
3818 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3819 || TREE_CODE (arg0) == code
3820 || (TREE_CODE (arg0) != BIT_AND_EXPR
3821 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3822 return 0;
3824 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3826 common = TREE_OPERAND (arg0, 0);
3827 left = TREE_OPERAND (arg0, 1);
3828 right = TREE_OPERAND (arg1, 1);
3830 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3832 common = TREE_OPERAND (arg0, 0);
3833 left = TREE_OPERAND (arg0, 1);
3834 right = TREE_OPERAND (arg1, 0);
3836 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3838 common = TREE_OPERAND (arg0, 1);
3839 left = TREE_OPERAND (arg0, 0);
3840 right = TREE_OPERAND (arg1, 1);
3842 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3844 common = TREE_OPERAND (arg0, 1);
3845 left = TREE_OPERAND (arg0, 0);
3846 right = TREE_OPERAND (arg1, 0);
3848 else
3849 return 0;
3851 common = fold_convert (type, common);
3852 left = fold_convert (type, left);
3853 right = fold_convert (type, right);
3854 return fold_build2 (TREE_CODE (arg0), type, common,
3855 fold_build2 (code, type, left, right));
3858 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3859 with code CODE. This optimization is unsafe. */
3860 static tree
3861 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3863 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3864 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3866 /* (A / C) +- (B / C) -> (A +- B) / C. */
3867 if (mul0 == mul1
3868 && operand_equal_p (TREE_OPERAND (arg0, 1),
3869 TREE_OPERAND (arg1, 1), 0))
3870 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3871 fold_build2 (code, type,
3872 TREE_OPERAND (arg0, 0),
3873 TREE_OPERAND (arg1, 0)),
3874 TREE_OPERAND (arg0, 1));
3876 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3877 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3878 TREE_OPERAND (arg1, 0), 0)
3879 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3880 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3882 REAL_VALUE_TYPE r0, r1;
3883 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3884 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3885 if (!mul0)
3886 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3887 if (!mul1)
3888 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3889 real_arithmetic (&r0, code, &r0, &r1);
3890 return fold_build2 (MULT_EXPR, type,
3891 TREE_OPERAND (arg0, 0),
3892 build_real (type, r0));
3895 return NULL_TREE;
3898 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3899 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3901 static tree
3902 make_bit_field_ref (tree inner, tree type, HOST_WIDE_INT bitsize,
3903 HOST_WIDE_INT bitpos, int unsignedp)
3905 tree result, bftype;
3907 if (bitpos == 0)
3909 tree size = TYPE_SIZE (TREE_TYPE (inner));
3910 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3911 || POINTER_TYPE_P (TREE_TYPE (inner)))
3912 && host_integerp (size, 0)
3913 && tree_low_cst (size, 0) == bitsize)
3914 return fold_convert (type, inner);
3917 bftype = type;
3918 if (TYPE_PRECISION (bftype) != bitsize
3919 || TYPE_UNSIGNED (bftype) == !unsignedp)
3920 bftype = build_nonstandard_integer_type (bitsize, 0);
3922 result = build3 (BIT_FIELD_REF, bftype, inner,
3923 size_int (bitsize), bitsize_int (bitpos));
3925 if (bftype != type)
3926 result = fold_convert (type, result);
3928 return result;
3931 /* Optimize a bit-field compare.
3933 There are two cases: First is a compare against a constant and the
3934 second is a comparison of two items where the fields are at the same
3935 bit position relative to the start of a chunk (byte, halfword, word)
3936 large enough to contain it. In these cases we can avoid the shift
3937 implicit in bitfield extractions.
3939 For constants, we emit a compare of the shifted constant with the
3940 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3941 compared. For two fields at the same position, we do the ANDs with the
3942 similar mask and compare the result of the ANDs.
3944 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3945 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3946 are the left and right operands of the comparison, respectively.
3948 If the optimization described above can be done, we return the resulting
3949 tree. Otherwise we return zero. */
3951 static tree
3952 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3953 tree lhs, tree rhs)
3955 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3956 tree type = TREE_TYPE (lhs);
3957 tree signed_type, unsigned_type;
3958 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3959 enum machine_mode lmode, rmode, nmode;
3960 int lunsignedp, runsignedp;
3961 int lvolatilep = 0, rvolatilep = 0;
3962 tree linner, rinner = NULL_TREE;
3963 tree mask;
3964 tree offset;
3966 /* Get all the information about the extractions being done. If the bit size
3967 if the same as the size of the underlying object, we aren't doing an
3968 extraction at all and so can do nothing. We also don't want to
3969 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3970 then will no longer be able to replace it. */
3971 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3972 &lunsignedp, &lvolatilep, false);
3973 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3974 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3975 return 0;
3977 if (!const_p)
3979 /* If this is not a constant, we can only do something if bit positions,
3980 sizes, and signedness are the same. */
3981 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3982 &runsignedp, &rvolatilep, false);
3984 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3985 || lunsignedp != runsignedp || offset != 0
3986 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3987 return 0;
3990 /* See if we can find a mode to refer to this field. We should be able to,
3991 but fail if we can't. */
3992 nmode = get_best_mode (lbitsize, lbitpos,
3993 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3994 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3995 TYPE_ALIGN (TREE_TYPE (rinner))),
3996 word_mode, lvolatilep || rvolatilep);
3997 if (nmode == VOIDmode)
3998 return 0;
4000 /* Set signed and unsigned types of the precision of this mode for the
4001 shifts below. */
4002 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
4003 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4005 /* Compute the bit position and size for the new reference and our offset
4006 within it. If the new reference is the same size as the original, we
4007 won't optimize anything, so return zero. */
4008 nbitsize = GET_MODE_BITSIZE (nmode);
4009 nbitpos = lbitpos & ~ (nbitsize - 1);
4010 lbitpos -= nbitpos;
4011 if (nbitsize == lbitsize)
4012 return 0;
4014 if (BYTES_BIG_ENDIAN)
4015 lbitpos = nbitsize - lbitsize - lbitpos;
4017 /* Make the mask to be used against the extracted field. */
4018 mask = build_int_cst_type (unsigned_type, -1);
4019 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
4020 mask = const_binop (RSHIFT_EXPR, mask,
4021 size_int (nbitsize - lbitsize - lbitpos), 0);
4023 if (! const_p)
4024 /* If not comparing with constant, just rework the comparison
4025 and return. */
4026 return fold_build2 (code, compare_type,
4027 fold_build2 (BIT_AND_EXPR, unsigned_type,
4028 make_bit_field_ref (linner,
4029 unsigned_type,
4030 nbitsize, nbitpos,
4032 mask),
4033 fold_build2 (BIT_AND_EXPR, unsigned_type,
4034 make_bit_field_ref (rinner,
4035 unsigned_type,
4036 nbitsize, nbitpos,
4038 mask));
4040 /* Otherwise, we are handling the constant case. See if the constant is too
4041 big for the field. Warn and return a tree of for 0 (false) if so. We do
4042 this not only for its own sake, but to avoid having to test for this
4043 error case below. If we didn't, we might generate wrong code.
4045 For unsigned fields, the constant shifted right by the field length should
4046 be all zero. For signed fields, the high-order bits should agree with
4047 the sign bit. */
4049 if (lunsignedp)
4051 if (! integer_zerop (const_binop (RSHIFT_EXPR,
4052 fold_convert (unsigned_type, rhs),
4053 size_int (lbitsize), 0)))
4055 warning (0, "comparison is always %d due to width of bit-field",
4056 code == NE_EXPR);
4057 return constant_boolean_node (code == NE_EXPR, compare_type);
4060 else
4062 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
4063 size_int (lbitsize - 1), 0);
4064 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4066 warning (0, "comparison is always %d due to width of bit-field",
4067 code == NE_EXPR);
4068 return constant_boolean_node (code == NE_EXPR, compare_type);
4072 /* Single-bit compares should always be against zero. */
4073 if (lbitsize == 1 && ! integer_zerop (rhs))
4075 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4076 rhs = build_int_cst (type, 0);
4079 /* Make a new bitfield reference, shift the constant over the
4080 appropriate number of bits and mask it with the computed mask
4081 (in case this was a signed field). If we changed it, make a new one. */
4082 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
4083 if (lvolatilep)
4085 TREE_SIDE_EFFECTS (lhs) = 1;
4086 TREE_THIS_VOLATILE (lhs) = 1;
4089 rhs = const_binop (BIT_AND_EXPR,
4090 const_binop (LSHIFT_EXPR,
4091 fold_convert (unsigned_type, rhs),
4092 size_int (lbitpos), 0),
4093 mask, 0);
4095 return build2 (code, compare_type,
4096 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4097 rhs);
4100 /* Subroutine for fold_truthop: decode a field reference.
4102 If EXP is a comparison reference, we return the innermost reference.
4104 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4105 set to the starting bit number.
4107 If the innermost field can be completely contained in a mode-sized
4108 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4110 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4111 otherwise it is not changed.
4113 *PUNSIGNEDP is set to the signedness of the field.
4115 *PMASK is set to the mask used. This is either contained in a
4116 BIT_AND_EXPR or derived from the width of the field.
4118 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4120 Return 0 if this is not a component reference or is one that we can't
4121 do anything with. */
4123 static tree
4124 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
4125 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4126 int *punsignedp, int *pvolatilep,
4127 tree *pmask, tree *pand_mask)
4129 tree outer_type = 0;
4130 tree and_mask = 0;
4131 tree mask, inner, offset;
4132 tree unsigned_type;
4133 unsigned int precision;
4135 /* All the optimizations using this function assume integer fields.
4136 There are problems with FP fields since the type_for_size call
4137 below can fail for, e.g., XFmode. */
4138 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4139 return 0;
4141 /* We are interested in the bare arrangement of bits, so strip everything
4142 that doesn't affect the machine mode. However, record the type of the
4143 outermost expression if it may matter below. */
4144 if (CONVERT_EXPR_P (exp)
4145 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4146 outer_type = TREE_TYPE (exp);
4147 STRIP_NOPS (exp);
4149 if (TREE_CODE (exp) == BIT_AND_EXPR)
4151 and_mask = TREE_OPERAND (exp, 1);
4152 exp = TREE_OPERAND (exp, 0);
4153 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4154 if (TREE_CODE (and_mask) != INTEGER_CST)
4155 return 0;
4158 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4159 punsignedp, pvolatilep, false);
4160 if ((inner == exp && and_mask == 0)
4161 || *pbitsize < 0 || offset != 0
4162 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4163 return 0;
4165 /* If the number of bits in the reference is the same as the bitsize of
4166 the outer type, then the outer type gives the signedness. Otherwise
4167 (in case of a small bitfield) the signedness is unchanged. */
4168 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4169 *punsignedp = TYPE_UNSIGNED (outer_type);
4171 /* Compute the mask to access the bitfield. */
4172 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4173 precision = TYPE_PRECISION (unsigned_type);
4175 mask = build_int_cst_type (unsigned_type, -1);
4177 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4178 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4180 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4181 if (and_mask != 0)
4182 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
4183 fold_convert (unsigned_type, and_mask), mask);
4185 *pmask = mask;
4186 *pand_mask = and_mask;
4187 return inner;
4190 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4191 bit positions. */
4193 static int
4194 all_ones_mask_p (const_tree mask, int size)
4196 tree type = TREE_TYPE (mask);
4197 unsigned int precision = TYPE_PRECISION (type);
4198 tree tmask;
4200 tmask = build_int_cst_type (signed_type_for (type), -1);
4202 return
4203 tree_int_cst_equal (mask,
4204 const_binop (RSHIFT_EXPR,
4205 const_binop (LSHIFT_EXPR, tmask,
4206 size_int (precision - size),
4208 size_int (precision - size), 0));
4211 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4212 represents the sign bit of EXP's type. If EXP represents a sign
4213 or zero extension, also test VAL against the unextended type.
4214 The return value is the (sub)expression whose sign bit is VAL,
4215 or NULL_TREE otherwise. */
4217 static tree
4218 sign_bit_p (tree exp, const_tree val)
4220 unsigned HOST_WIDE_INT mask_lo, lo;
4221 HOST_WIDE_INT mask_hi, hi;
4222 int width;
4223 tree t;
4225 /* Tree EXP must have an integral type. */
4226 t = TREE_TYPE (exp);
4227 if (! INTEGRAL_TYPE_P (t))
4228 return NULL_TREE;
4230 /* Tree VAL must be an integer constant. */
4231 if (TREE_CODE (val) != INTEGER_CST
4232 || TREE_OVERFLOW (val))
4233 return NULL_TREE;
4235 width = TYPE_PRECISION (t);
4236 if (width > HOST_BITS_PER_WIDE_INT)
4238 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4239 lo = 0;
4241 mask_hi = ((unsigned HOST_WIDE_INT) -1
4242 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4243 mask_lo = -1;
4245 else
4247 hi = 0;
4248 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4250 mask_hi = 0;
4251 mask_lo = ((unsigned HOST_WIDE_INT) -1
4252 >> (HOST_BITS_PER_WIDE_INT - width));
4255 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4256 treat VAL as if it were unsigned. */
4257 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4258 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4259 return exp;
4261 /* Handle extension from a narrower type. */
4262 if (TREE_CODE (exp) == NOP_EXPR
4263 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4264 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4266 return NULL_TREE;
4269 /* Subroutine for fold_truthop: determine if an operand is simple enough
4270 to be evaluated unconditionally. */
4272 static int
4273 simple_operand_p (const_tree exp)
4275 /* Strip any conversions that don't change the machine mode. */
4276 STRIP_NOPS (exp);
4278 return (CONSTANT_CLASS_P (exp)
4279 || TREE_CODE (exp) == SSA_NAME
4280 || (DECL_P (exp)
4281 && ! TREE_ADDRESSABLE (exp)
4282 && ! TREE_THIS_VOLATILE (exp)
4283 && ! DECL_NONLOCAL (exp)
4284 /* Don't regard global variables as simple. They may be
4285 allocated in ways unknown to the compiler (shared memory,
4286 #pragma weak, etc). */
4287 && ! TREE_PUBLIC (exp)
4288 && ! DECL_EXTERNAL (exp)
4289 /* Loading a static variable is unduly expensive, but global
4290 registers aren't expensive. */
4291 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4294 /* The following functions are subroutines to fold_range_test and allow it to
4295 try to change a logical combination of comparisons into a range test.
4297 For example, both
4298 X == 2 || X == 3 || X == 4 || X == 5
4300 X >= 2 && X <= 5
4301 are converted to
4302 (unsigned) (X - 2) <= 3
4304 We describe each set of comparisons as being either inside or outside
4305 a range, using a variable named like IN_P, and then describe the
4306 range with a lower and upper bound. If one of the bounds is omitted,
4307 it represents either the highest or lowest value of the type.
4309 In the comments below, we represent a range by two numbers in brackets
4310 preceded by a "+" to designate being inside that range, or a "-" to
4311 designate being outside that range, so the condition can be inverted by
4312 flipping the prefix. An omitted bound is represented by a "-". For
4313 example, "- [-, 10]" means being outside the range starting at the lowest
4314 possible value and ending at 10, in other words, being greater than 10.
4315 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4316 always false.
4318 We set up things so that the missing bounds are handled in a consistent
4319 manner so neither a missing bound nor "true" and "false" need to be
4320 handled using a special case. */
4322 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4323 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4324 and UPPER1_P are nonzero if the respective argument is an upper bound
4325 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4326 must be specified for a comparison. ARG1 will be converted to ARG0's
4327 type if both are specified. */
4329 static tree
4330 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4331 tree arg1, int upper1_p)
4333 tree tem;
4334 int result;
4335 int sgn0, sgn1;
4337 /* If neither arg represents infinity, do the normal operation.
4338 Else, if not a comparison, return infinity. Else handle the special
4339 comparison rules. Note that most of the cases below won't occur, but
4340 are handled for consistency. */
4342 if (arg0 != 0 && arg1 != 0)
4344 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4345 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4346 STRIP_NOPS (tem);
4347 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4350 if (TREE_CODE_CLASS (code) != tcc_comparison)
4351 return 0;
4353 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4354 for neither. In real maths, we cannot assume open ended ranges are
4355 the same. But, this is computer arithmetic, where numbers are finite.
4356 We can therefore make the transformation of any unbounded range with
4357 the value Z, Z being greater than any representable number. This permits
4358 us to treat unbounded ranges as equal. */
4359 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4360 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4361 switch (code)
4363 case EQ_EXPR:
4364 result = sgn0 == sgn1;
4365 break;
4366 case NE_EXPR:
4367 result = sgn0 != sgn1;
4368 break;
4369 case LT_EXPR:
4370 result = sgn0 < sgn1;
4371 break;
4372 case LE_EXPR:
4373 result = sgn0 <= sgn1;
4374 break;
4375 case GT_EXPR:
4376 result = sgn0 > sgn1;
4377 break;
4378 case GE_EXPR:
4379 result = sgn0 >= sgn1;
4380 break;
4381 default:
4382 gcc_unreachable ();
4385 return constant_boolean_node (result, type);
4388 /* Given EXP, a logical expression, set the range it is testing into
4389 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4390 actually being tested. *PLOW and *PHIGH will be made of the same
4391 type as the returned expression. If EXP is not a comparison, we
4392 will most likely not be returning a useful value and range. Set
4393 *STRICT_OVERFLOW_P to true if the return value is only valid
4394 because signed overflow is undefined; otherwise, do not change
4395 *STRICT_OVERFLOW_P. */
4397 static tree
4398 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4399 bool *strict_overflow_p)
4401 enum tree_code code;
4402 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4403 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4404 int in_p, n_in_p;
4405 tree low, high, n_low, n_high;
4407 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4408 and see if we can refine the range. Some of the cases below may not
4409 happen, but it doesn't seem worth worrying about this. We "continue"
4410 the outer loop when we've changed something; otherwise we "break"
4411 the switch, which will "break" the while. */
4413 in_p = 0;
4414 low = high = build_int_cst (TREE_TYPE (exp), 0);
4416 while (1)
4418 code = TREE_CODE (exp);
4419 exp_type = TREE_TYPE (exp);
4421 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4423 if (TREE_OPERAND_LENGTH (exp) > 0)
4424 arg0 = TREE_OPERAND (exp, 0);
4425 if (TREE_CODE_CLASS (code) == tcc_comparison
4426 || TREE_CODE_CLASS (code) == tcc_unary
4427 || TREE_CODE_CLASS (code) == tcc_binary)
4428 arg0_type = TREE_TYPE (arg0);
4429 if (TREE_CODE_CLASS (code) == tcc_binary
4430 || TREE_CODE_CLASS (code) == tcc_comparison
4431 || (TREE_CODE_CLASS (code) == tcc_expression
4432 && TREE_OPERAND_LENGTH (exp) > 1))
4433 arg1 = TREE_OPERAND (exp, 1);
4436 switch (code)
4438 case TRUTH_NOT_EXPR:
4439 in_p = ! in_p, exp = arg0;
4440 continue;
4442 case EQ_EXPR: case NE_EXPR:
4443 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4444 /* We can only do something if the range is testing for zero
4445 and if the second operand is an integer constant. Note that
4446 saying something is "in" the range we make is done by
4447 complementing IN_P since it will set in the initial case of
4448 being not equal to zero; "out" is leaving it alone. */
4449 if (low == 0 || high == 0
4450 || ! integer_zerop (low) || ! integer_zerop (high)
4451 || TREE_CODE (arg1) != INTEGER_CST)
4452 break;
4454 switch (code)
4456 case NE_EXPR: /* - [c, c] */
4457 low = high = arg1;
4458 break;
4459 case EQ_EXPR: /* + [c, c] */
4460 in_p = ! in_p, low = high = arg1;
4461 break;
4462 case GT_EXPR: /* - [-, c] */
4463 low = 0, high = arg1;
4464 break;
4465 case GE_EXPR: /* + [c, -] */
4466 in_p = ! in_p, low = arg1, high = 0;
4467 break;
4468 case LT_EXPR: /* - [c, -] */
4469 low = arg1, high = 0;
4470 break;
4471 case LE_EXPR: /* + [-, c] */
4472 in_p = ! in_p, low = 0, high = arg1;
4473 break;
4474 default:
4475 gcc_unreachable ();
4478 /* If this is an unsigned comparison, we also know that EXP is
4479 greater than or equal to zero. We base the range tests we make
4480 on that fact, so we record it here so we can parse existing
4481 range tests. We test arg0_type since often the return type
4482 of, e.g. EQ_EXPR, is boolean. */
4483 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4485 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4486 in_p, low, high, 1,
4487 build_int_cst (arg0_type, 0),
4488 NULL_TREE))
4489 break;
4491 in_p = n_in_p, low = n_low, high = n_high;
4493 /* If the high bound is missing, but we have a nonzero low
4494 bound, reverse the range so it goes from zero to the low bound
4495 minus 1. */
4496 if (high == 0 && low && ! integer_zerop (low))
4498 in_p = ! in_p;
4499 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4500 integer_one_node, 0);
4501 low = build_int_cst (arg0_type, 0);
4505 exp = arg0;
4506 continue;
4508 case NEGATE_EXPR:
4509 /* (-x) IN [a,b] -> x in [-b, -a] */
4510 n_low = range_binop (MINUS_EXPR, exp_type,
4511 build_int_cst (exp_type, 0),
4512 0, high, 1);
4513 n_high = range_binop (MINUS_EXPR, exp_type,
4514 build_int_cst (exp_type, 0),
4515 0, low, 0);
4516 low = n_low, high = n_high;
4517 exp = arg0;
4518 continue;
4520 case BIT_NOT_EXPR:
4521 /* ~ X -> -X - 1 */
4522 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4523 build_int_cst (exp_type, 1));
4524 continue;
4526 case PLUS_EXPR: case MINUS_EXPR:
4527 if (TREE_CODE (arg1) != INTEGER_CST)
4528 break;
4530 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4531 move a constant to the other side. */
4532 if (!TYPE_UNSIGNED (arg0_type)
4533 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4534 break;
4536 /* If EXP is signed, any overflow in the computation is undefined,
4537 so we don't worry about it so long as our computations on
4538 the bounds don't overflow. For unsigned, overflow is defined
4539 and this is exactly the right thing. */
4540 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4541 arg0_type, low, 0, arg1, 0);
4542 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4543 arg0_type, high, 1, arg1, 0);
4544 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4545 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4546 break;
4548 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4549 *strict_overflow_p = true;
4551 /* Check for an unsigned range which has wrapped around the maximum
4552 value thus making n_high < n_low, and normalize it. */
4553 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4555 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4556 integer_one_node, 0);
4557 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4558 integer_one_node, 0);
4560 /* If the range is of the form +/- [ x+1, x ], we won't
4561 be able to normalize it. But then, it represents the
4562 whole range or the empty set, so make it
4563 +/- [ -, - ]. */
4564 if (tree_int_cst_equal (n_low, low)
4565 && tree_int_cst_equal (n_high, high))
4566 low = high = 0;
4567 else
4568 in_p = ! in_p;
4570 else
4571 low = n_low, high = n_high;
4573 exp = arg0;
4574 continue;
4576 CASE_CONVERT: case NON_LVALUE_EXPR:
4577 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4578 break;
4580 if (! INTEGRAL_TYPE_P (arg0_type)
4581 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4582 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4583 break;
4585 n_low = low, n_high = high;
4587 if (n_low != 0)
4588 n_low = fold_convert (arg0_type, n_low);
4590 if (n_high != 0)
4591 n_high = fold_convert (arg0_type, n_high);
4594 /* If we're converting arg0 from an unsigned type, to exp,
4595 a signed type, we will be doing the comparison as unsigned.
4596 The tests above have already verified that LOW and HIGH
4597 are both positive.
4599 So we have to ensure that we will handle large unsigned
4600 values the same way that the current signed bounds treat
4601 negative values. */
4603 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4605 tree high_positive;
4606 tree equiv_type;
4607 /* For fixed-point modes, we need to pass the saturating flag
4608 as the 2nd parameter. */
4609 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4610 equiv_type = lang_hooks.types.type_for_mode
4611 (TYPE_MODE (arg0_type),
4612 TYPE_SATURATING (arg0_type));
4613 else
4614 equiv_type = lang_hooks.types.type_for_mode
4615 (TYPE_MODE (arg0_type), 1);
4617 /* A range without an upper bound is, naturally, unbounded.
4618 Since convert would have cropped a very large value, use
4619 the max value for the destination type. */
4620 high_positive
4621 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4622 : TYPE_MAX_VALUE (arg0_type);
4624 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4625 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4626 fold_convert (arg0_type,
4627 high_positive),
4628 build_int_cst (arg0_type, 1));
4630 /* If the low bound is specified, "and" the range with the
4631 range for which the original unsigned value will be
4632 positive. */
4633 if (low != 0)
4635 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4636 1, n_low, n_high, 1,
4637 fold_convert (arg0_type,
4638 integer_zero_node),
4639 high_positive))
4640 break;
4642 in_p = (n_in_p == in_p);
4644 else
4646 /* Otherwise, "or" the range with the range of the input
4647 that will be interpreted as negative. */
4648 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4649 0, n_low, n_high, 1,
4650 fold_convert (arg0_type,
4651 integer_zero_node),
4652 high_positive))
4653 break;
4655 in_p = (in_p != n_in_p);
4659 exp = arg0;
4660 low = n_low, high = n_high;
4661 continue;
4663 default:
4664 break;
4667 break;
4670 /* If EXP is a constant, we can evaluate whether this is true or false. */
4671 if (TREE_CODE (exp) == INTEGER_CST)
4673 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4674 exp, 0, low, 0))
4675 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4676 exp, 1, high, 1)));
4677 low = high = 0;
4678 exp = 0;
4681 *pin_p = in_p, *plow = low, *phigh = high;
4682 return exp;
4685 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4686 type, TYPE, return an expression to test if EXP is in (or out of, depending
4687 on IN_P) the range. Return 0 if the test couldn't be created. */
4689 static tree
4690 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4692 tree etype = TREE_TYPE (exp), value;
4693 enum tree_code code;
4695 #ifdef HAVE_canonicalize_funcptr_for_compare
4696 /* Disable this optimization for function pointer expressions
4697 on targets that require function pointer canonicalization. */
4698 if (HAVE_canonicalize_funcptr_for_compare
4699 && TREE_CODE (etype) == POINTER_TYPE
4700 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4701 return NULL_TREE;
4702 #endif
4704 if (! in_p)
4706 value = build_range_check (type, exp, 1, low, high);
4707 if (value != 0)
4708 return invert_truthvalue (value);
4710 return 0;
4713 if (low == 0 && high == 0)
4714 return build_int_cst (type, 1);
4716 if (low == 0)
4717 return fold_build2 (LE_EXPR, type, exp,
4718 fold_convert (etype, high));
4720 if (high == 0)
4721 return fold_build2 (GE_EXPR, type, exp,
4722 fold_convert (etype, low));
4724 if (operand_equal_p (low, high, 0))
4725 return fold_build2 (EQ_EXPR, type, exp,
4726 fold_convert (etype, low));
4728 if (integer_zerop (low))
4730 if (! TYPE_UNSIGNED (etype))
4732 etype = unsigned_type_for (etype);
4733 high = fold_convert (etype, high);
4734 exp = fold_convert (etype, exp);
4736 return build_range_check (type, exp, 1, 0, high);
4739 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4740 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4742 unsigned HOST_WIDE_INT lo;
4743 HOST_WIDE_INT hi;
4744 int prec;
4746 prec = TYPE_PRECISION (etype);
4747 if (prec <= HOST_BITS_PER_WIDE_INT)
4749 hi = 0;
4750 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4752 else
4754 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4755 lo = (unsigned HOST_WIDE_INT) -1;
4758 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4760 if (TYPE_UNSIGNED (etype))
4762 tree signed_etype = signed_type_for (etype);
4763 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4764 etype
4765 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4766 else
4767 etype = signed_etype;
4768 exp = fold_convert (etype, exp);
4770 return fold_build2 (GT_EXPR, type, exp,
4771 build_int_cst (etype, 0));
4775 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4776 This requires wrap-around arithmetics for the type of the expression. */
4777 code = TREE_CODE (etype);
4778 switch (code)
4780 case INTEGER_TYPE:
4781 case ENUMERAL_TYPE:
4782 case BOOLEAN_TYPE:
4783 /* There is no requirement that LOW be within the range of ETYPE
4784 if the latter is a subtype. It must, however, be within the base
4785 type of ETYPE. So be sure we do the subtraction in that type. */
4786 if (code == INTEGER_TYPE && TREE_TYPE (etype))
4788 etype = TREE_TYPE (etype);
4789 /* But not in an enumeral or boolean type though. */
4790 code = TREE_CODE (etype);
4793 if (code != INTEGER_TYPE)
4794 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4795 TYPE_UNSIGNED (etype));
4796 break;
4798 default:
4799 break;
4802 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4803 if (TREE_CODE (etype) == INTEGER_TYPE
4804 && !TYPE_OVERFLOW_WRAPS (etype))
4806 tree utype, minv, maxv;
4808 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4809 for the type in question, as we rely on this here. */
4810 utype = unsigned_type_for (etype);
4811 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4812 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4813 integer_one_node, 1);
4814 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4816 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4817 minv, 1, maxv, 1)))
4818 etype = utype;
4819 else
4820 return 0;
4823 high = fold_convert (etype, high);
4824 low = fold_convert (etype, low);
4825 exp = fold_convert (etype, exp);
4827 value = const_binop (MINUS_EXPR, high, low, 0);
4830 if (POINTER_TYPE_P (etype))
4832 if (value != 0 && !TREE_OVERFLOW (value))
4834 low = fold_convert (sizetype, low);
4835 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4836 return build_range_check (type,
4837 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4838 1, build_int_cst (etype, 0), value);
4840 return 0;
4843 if (value != 0 && !TREE_OVERFLOW (value))
4844 return build_range_check (type,
4845 fold_build2 (MINUS_EXPR, etype, exp, low),
4846 1, build_int_cst (etype, 0), value);
4848 return 0;
4851 /* Return the predecessor of VAL in its type, handling the infinite case. */
4853 static tree
4854 range_predecessor (tree val)
4856 tree type = TREE_TYPE (val);
4858 if (INTEGRAL_TYPE_P (type)
4859 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4860 return 0;
4861 else
4862 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4865 /* Return the successor of VAL in its type, handling the infinite case. */
4867 static tree
4868 range_successor (tree val)
4870 tree type = TREE_TYPE (val);
4872 if (INTEGRAL_TYPE_P (type)
4873 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4874 return 0;
4875 else
4876 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4879 /* Given two ranges, see if we can merge them into one. Return 1 if we
4880 can, 0 if we can't. Set the output range into the specified parameters. */
4882 static int
4883 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4884 tree high0, int in1_p, tree low1, tree high1)
4886 int no_overlap;
4887 int subset;
4888 int temp;
4889 tree tem;
4890 int in_p;
4891 tree low, high;
4892 int lowequal = ((low0 == 0 && low1 == 0)
4893 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4894 low0, 0, low1, 0)));
4895 int highequal = ((high0 == 0 && high1 == 0)
4896 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4897 high0, 1, high1, 1)));
4899 /* Make range 0 be the range that starts first, or ends last if they
4900 start at the same value. Swap them if it isn't. */
4901 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4902 low0, 0, low1, 0))
4903 || (lowequal
4904 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4905 high1, 1, high0, 1))))
4907 temp = in0_p, in0_p = in1_p, in1_p = temp;
4908 tem = low0, low0 = low1, low1 = tem;
4909 tem = high0, high0 = high1, high1 = tem;
4912 /* Now flag two cases, whether the ranges are disjoint or whether the
4913 second range is totally subsumed in the first. Note that the tests
4914 below are simplified by the ones above. */
4915 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4916 high0, 1, low1, 0));
4917 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4918 high1, 1, high0, 1));
4920 /* We now have four cases, depending on whether we are including or
4921 excluding the two ranges. */
4922 if (in0_p && in1_p)
4924 /* If they don't overlap, the result is false. If the second range
4925 is a subset it is the result. Otherwise, the range is from the start
4926 of the second to the end of the first. */
4927 if (no_overlap)
4928 in_p = 0, low = high = 0;
4929 else if (subset)
4930 in_p = 1, low = low1, high = high1;
4931 else
4932 in_p = 1, low = low1, high = high0;
4935 else if (in0_p && ! in1_p)
4937 /* If they don't overlap, the result is the first range. If they are
4938 equal, the result is false. If the second range is a subset of the
4939 first, and the ranges begin at the same place, we go from just after
4940 the end of the second range to the end of the first. If the second
4941 range is not a subset of the first, or if it is a subset and both
4942 ranges end at the same place, the range starts at the start of the
4943 first range and ends just before the second range.
4944 Otherwise, we can't describe this as a single range. */
4945 if (no_overlap)
4946 in_p = 1, low = low0, high = high0;
4947 else if (lowequal && highequal)
4948 in_p = 0, low = high = 0;
4949 else if (subset && lowequal)
4951 low = range_successor (high1);
4952 high = high0;
4953 in_p = 1;
4954 if (low == 0)
4956 /* We are in the weird situation where high0 > high1 but
4957 high1 has no successor. Punt. */
4958 return 0;
4961 else if (! subset || highequal)
4963 low = low0;
4964 high = range_predecessor (low1);
4965 in_p = 1;
4966 if (high == 0)
4968 /* low0 < low1 but low1 has no predecessor. Punt. */
4969 return 0;
4972 else
4973 return 0;
4976 else if (! in0_p && in1_p)
4978 /* If they don't overlap, the result is the second range. If the second
4979 is a subset of the first, the result is false. Otherwise,
4980 the range starts just after the first range and ends at the
4981 end of the second. */
4982 if (no_overlap)
4983 in_p = 1, low = low1, high = high1;
4984 else if (subset || highequal)
4985 in_p = 0, low = high = 0;
4986 else
4988 low = range_successor (high0);
4989 high = high1;
4990 in_p = 1;
4991 if (low == 0)
4993 /* high1 > high0 but high0 has no successor. Punt. */
4994 return 0;
4999 else
5001 /* The case where we are excluding both ranges. Here the complex case
5002 is if they don't overlap. In that case, the only time we have a
5003 range is if they are adjacent. If the second is a subset of the
5004 first, the result is the first. Otherwise, the range to exclude
5005 starts at the beginning of the first range and ends at the end of the
5006 second. */
5007 if (no_overlap)
5009 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5010 range_successor (high0),
5011 1, low1, 0)))
5012 in_p = 0, low = low0, high = high1;
5013 else
5015 /* Canonicalize - [min, x] into - [-, x]. */
5016 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5017 switch (TREE_CODE (TREE_TYPE (low0)))
5019 case ENUMERAL_TYPE:
5020 if (TYPE_PRECISION (TREE_TYPE (low0))
5021 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5022 break;
5023 /* FALLTHROUGH */
5024 case INTEGER_TYPE:
5025 if (tree_int_cst_equal (low0,
5026 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5027 low0 = 0;
5028 break;
5029 case POINTER_TYPE:
5030 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5031 && integer_zerop (low0))
5032 low0 = 0;
5033 break;
5034 default:
5035 break;
5038 /* Canonicalize - [x, max] into - [x, -]. */
5039 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5040 switch (TREE_CODE (TREE_TYPE (high1)))
5042 case ENUMERAL_TYPE:
5043 if (TYPE_PRECISION (TREE_TYPE (high1))
5044 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5045 break;
5046 /* FALLTHROUGH */
5047 case INTEGER_TYPE:
5048 if (tree_int_cst_equal (high1,
5049 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5050 high1 = 0;
5051 break;
5052 case POINTER_TYPE:
5053 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5054 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5055 high1, 1,
5056 integer_one_node, 1)))
5057 high1 = 0;
5058 break;
5059 default:
5060 break;
5063 /* The ranges might be also adjacent between the maximum and
5064 minimum values of the given type. For
5065 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5066 return + [x + 1, y - 1]. */
5067 if (low0 == 0 && high1 == 0)
5069 low = range_successor (high0);
5070 high = range_predecessor (low1);
5071 if (low == 0 || high == 0)
5072 return 0;
5074 in_p = 1;
5076 else
5077 return 0;
5080 else if (subset)
5081 in_p = 0, low = low0, high = high0;
5082 else
5083 in_p = 0, low = low0, high = high1;
5086 *pin_p = in_p, *plow = low, *phigh = high;
5087 return 1;
5091 /* Subroutine of fold, looking inside expressions of the form
5092 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5093 of the COND_EXPR. This function is being used also to optimize
5094 A op B ? C : A, by reversing the comparison first.
5096 Return a folded expression whose code is not a COND_EXPR
5097 anymore, or NULL_TREE if no folding opportunity is found. */
5099 static tree
5100 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
5102 enum tree_code comp_code = TREE_CODE (arg0);
5103 tree arg00 = TREE_OPERAND (arg0, 0);
5104 tree arg01 = TREE_OPERAND (arg0, 1);
5105 tree arg1_type = TREE_TYPE (arg1);
5106 tree tem;
5108 STRIP_NOPS (arg1);
5109 STRIP_NOPS (arg2);
5111 /* If we have A op 0 ? A : -A, consider applying the following
5112 transformations:
5114 A == 0? A : -A same as -A
5115 A != 0? A : -A same as A
5116 A >= 0? A : -A same as abs (A)
5117 A > 0? A : -A same as abs (A)
5118 A <= 0? A : -A same as -abs (A)
5119 A < 0? A : -A same as -abs (A)
5121 None of these transformations work for modes with signed
5122 zeros. If A is +/-0, the first two transformations will
5123 change the sign of the result (from +0 to -0, or vice
5124 versa). The last four will fix the sign of the result,
5125 even though the original expressions could be positive or
5126 negative, depending on the sign of A.
5128 Note that all these transformations are correct if A is
5129 NaN, since the two alternatives (A and -A) are also NaNs. */
5130 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5131 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5132 ? real_zerop (arg01)
5133 : integer_zerop (arg01))
5134 && ((TREE_CODE (arg2) == NEGATE_EXPR
5135 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5136 /* In the case that A is of the form X-Y, '-A' (arg2) may
5137 have already been folded to Y-X, check for that. */
5138 || (TREE_CODE (arg1) == MINUS_EXPR
5139 && TREE_CODE (arg2) == MINUS_EXPR
5140 && operand_equal_p (TREE_OPERAND (arg1, 0),
5141 TREE_OPERAND (arg2, 1), 0)
5142 && operand_equal_p (TREE_OPERAND (arg1, 1),
5143 TREE_OPERAND (arg2, 0), 0))))
5144 switch (comp_code)
5146 case EQ_EXPR:
5147 case UNEQ_EXPR:
5148 tem = fold_convert (arg1_type, arg1);
5149 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
5150 case NE_EXPR:
5151 case LTGT_EXPR:
5152 return pedantic_non_lvalue (fold_convert (type, arg1));
5153 case UNGE_EXPR:
5154 case UNGT_EXPR:
5155 if (flag_trapping_math)
5156 break;
5157 /* Fall through. */
5158 case GE_EXPR:
5159 case GT_EXPR:
5160 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5161 arg1 = fold_convert (signed_type_for
5162 (TREE_TYPE (arg1)), arg1);
5163 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5164 return pedantic_non_lvalue (fold_convert (type, tem));
5165 case UNLE_EXPR:
5166 case UNLT_EXPR:
5167 if (flag_trapping_math)
5168 break;
5169 case LE_EXPR:
5170 case LT_EXPR:
5171 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5172 arg1 = fold_convert (signed_type_for
5173 (TREE_TYPE (arg1)), arg1);
5174 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5175 return negate_expr (fold_convert (type, tem));
5176 default:
5177 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5178 break;
5181 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5182 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5183 both transformations are correct when A is NaN: A != 0
5184 is then true, and A == 0 is false. */
5186 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5187 && integer_zerop (arg01) && integer_zerop (arg2))
5189 if (comp_code == NE_EXPR)
5190 return pedantic_non_lvalue (fold_convert (type, arg1));
5191 else if (comp_code == EQ_EXPR)
5192 return build_int_cst (type, 0);
5195 /* Try some transformations of A op B ? A : B.
5197 A == B? A : B same as B
5198 A != B? A : B same as A
5199 A >= B? A : B same as max (A, B)
5200 A > B? A : B same as max (B, A)
5201 A <= B? A : B same as min (A, B)
5202 A < B? A : B same as min (B, A)
5204 As above, these transformations don't work in the presence
5205 of signed zeros. For example, if A and B are zeros of
5206 opposite sign, the first two transformations will change
5207 the sign of the result. In the last four, the original
5208 expressions give different results for (A=+0, B=-0) and
5209 (A=-0, B=+0), but the transformed expressions do not.
5211 The first two transformations are correct if either A or B
5212 is a NaN. In the first transformation, the condition will
5213 be false, and B will indeed be chosen. In the case of the
5214 second transformation, the condition A != B will be true,
5215 and A will be chosen.
5217 The conversions to max() and min() are not correct if B is
5218 a number and A is not. The conditions in the original
5219 expressions will be false, so all four give B. The min()
5220 and max() versions would give a NaN instead. */
5221 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5222 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5223 /* Avoid these transformations if the COND_EXPR may be used
5224 as an lvalue in the C++ front-end. PR c++/19199. */
5225 && (in_gimple_form
5226 || (strcmp (lang_hooks.name, "GNU C++") != 0
5227 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5228 || ! maybe_lvalue_p (arg1)
5229 || ! maybe_lvalue_p (arg2)))
5231 tree comp_op0 = arg00;
5232 tree comp_op1 = arg01;
5233 tree comp_type = TREE_TYPE (comp_op0);
5235 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5236 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5238 comp_type = type;
5239 comp_op0 = arg1;
5240 comp_op1 = arg2;
5243 switch (comp_code)
5245 case EQ_EXPR:
5246 return pedantic_non_lvalue (fold_convert (type, arg2));
5247 case NE_EXPR:
5248 return pedantic_non_lvalue (fold_convert (type, arg1));
5249 case LE_EXPR:
5250 case LT_EXPR:
5251 case UNLE_EXPR:
5252 case UNLT_EXPR:
5253 /* In C++ a ?: expression can be an lvalue, so put the
5254 operand which will be used if they are equal first
5255 so that we can convert this back to the
5256 corresponding COND_EXPR. */
5257 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5259 comp_op0 = fold_convert (comp_type, comp_op0);
5260 comp_op1 = fold_convert (comp_type, comp_op1);
5261 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5262 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
5263 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
5264 return pedantic_non_lvalue (fold_convert (type, tem));
5266 break;
5267 case GE_EXPR:
5268 case GT_EXPR:
5269 case UNGE_EXPR:
5270 case UNGT_EXPR:
5271 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5273 comp_op0 = fold_convert (comp_type, comp_op0);
5274 comp_op1 = fold_convert (comp_type, comp_op1);
5275 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5276 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5277 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5278 return pedantic_non_lvalue (fold_convert (type, tem));
5280 break;
5281 case UNEQ_EXPR:
5282 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5283 return pedantic_non_lvalue (fold_convert (type, arg2));
5284 break;
5285 case LTGT_EXPR:
5286 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5287 return pedantic_non_lvalue (fold_convert (type, arg1));
5288 break;
5289 default:
5290 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5291 break;
5295 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5296 we might still be able to simplify this. For example,
5297 if C1 is one less or one more than C2, this might have started
5298 out as a MIN or MAX and been transformed by this function.
5299 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5301 if (INTEGRAL_TYPE_P (type)
5302 && TREE_CODE (arg01) == INTEGER_CST
5303 && TREE_CODE (arg2) == INTEGER_CST)
5304 switch (comp_code)
5306 case EQ_EXPR:
5307 /* We can replace A with C1 in this case. */
5308 arg1 = fold_convert (type, arg01);
5309 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5311 case LT_EXPR:
5312 /* If C1 is C2 + 1, this is min(A, C2). */
5313 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5314 OEP_ONLY_CONST)
5315 && operand_equal_p (arg01,
5316 const_binop (PLUS_EXPR, arg2,
5317 build_int_cst (type, 1), 0),
5318 OEP_ONLY_CONST))
5319 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5320 type,
5321 fold_convert (type, arg1),
5322 arg2));
5323 break;
5325 case LE_EXPR:
5326 /* If C1 is C2 - 1, this is min(A, C2). */
5327 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5328 OEP_ONLY_CONST)
5329 && operand_equal_p (arg01,
5330 const_binop (MINUS_EXPR, arg2,
5331 build_int_cst (type, 1), 0),
5332 OEP_ONLY_CONST))
5333 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5334 type,
5335 fold_convert (type, arg1),
5336 arg2));
5337 break;
5339 case GT_EXPR:
5340 /* If C1 is C2 - 1, this is max(A, C2). */
5341 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5342 OEP_ONLY_CONST)
5343 && operand_equal_p (arg01,
5344 const_binop (MINUS_EXPR, arg2,
5345 build_int_cst (type, 1), 0),
5346 OEP_ONLY_CONST))
5347 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5348 type,
5349 fold_convert (type, arg1),
5350 arg2));
5351 break;
5353 case GE_EXPR:
5354 /* If C1 is C2 + 1, this is max(A, C2). */
5355 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5356 OEP_ONLY_CONST)
5357 && operand_equal_p (arg01,
5358 const_binop (PLUS_EXPR, arg2,
5359 build_int_cst (type, 1), 0),
5360 OEP_ONLY_CONST))
5361 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5362 type,
5363 fold_convert (type, arg1),
5364 arg2));
5365 break;
5366 case NE_EXPR:
5367 break;
5368 default:
5369 gcc_unreachable ();
5372 return NULL_TREE;
5377 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5378 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5379 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5380 false) >= 2)
5381 #endif
5383 /* EXP is some logical combination of boolean tests. See if we can
5384 merge it into some range test. Return the new tree if so. */
5386 static tree
5387 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5389 int or_op = (code == TRUTH_ORIF_EXPR
5390 || code == TRUTH_OR_EXPR);
5391 int in0_p, in1_p, in_p;
5392 tree low0, low1, low, high0, high1, high;
5393 bool strict_overflow_p = false;
5394 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5395 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5396 tree tem;
5397 const char * const warnmsg = G_("assuming signed overflow does not occur "
5398 "when simplifying range test");
5400 /* If this is an OR operation, invert both sides; we will invert
5401 again at the end. */
5402 if (or_op)
5403 in0_p = ! in0_p, in1_p = ! in1_p;
5405 /* If both expressions are the same, if we can merge the ranges, and we
5406 can build the range test, return it or it inverted. If one of the
5407 ranges is always true or always false, consider it to be the same
5408 expression as the other. */
5409 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5410 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5411 in1_p, low1, high1)
5412 && 0 != (tem = (build_range_check (type,
5413 lhs != 0 ? lhs
5414 : rhs != 0 ? rhs : integer_zero_node,
5415 in_p, low, high))))
5417 if (strict_overflow_p)
5418 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5419 return or_op ? invert_truthvalue (tem) : tem;
5422 /* On machines where the branch cost is expensive, if this is a
5423 short-circuited branch and the underlying object on both sides
5424 is the same, make a non-short-circuit operation. */
5425 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5426 && lhs != 0 && rhs != 0
5427 && (code == TRUTH_ANDIF_EXPR
5428 || code == TRUTH_ORIF_EXPR)
5429 && operand_equal_p (lhs, rhs, 0))
5431 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5432 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5433 which cases we can't do this. */
5434 if (simple_operand_p (lhs))
5435 return build2 (code == TRUTH_ANDIF_EXPR
5436 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5437 type, op0, op1);
5439 else if (lang_hooks.decls.global_bindings_p () == 0
5440 && ! CONTAINS_PLACEHOLDER_P (lhs))
5442 tree common = save_expr (lhs);
5444 if (0 != (lhs = build_range_check (type, common,
5445 or_op ? ! in0_p : in0_p,
5446 low0, high0))
5447 && (0 != (rhs = build_range_check (type, common,
5448 or_op ? ! in1_p : in1_p,
5449 low1, high1))))
5451 if (strict_overflow_p)
5452 fold_overflow_warning (warnmsg,
5453 WARN_STRICT_OVERFLOW_COMPARISON);
5454 return build2 (code == TRUTH_ANDIF_EXPR
5455 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5456 type, lhs, rhs);
5461 return 0;
5464 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5465 bit value. Arrange things so the extra bits will be set to zero if and
5466 only if C is signed-extended to its full width. If MASK is nonzero,
5467 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5469 static tree
5470 unextend (tree c, int p, int unsignedp, tree mask)
5472 tree type = TREE_TYPE (c);
5473 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5474 tree temp;
5476 if (p == modesize || unsignedp)
5477 return c;
5479 /* We work by getting just the sign bit into the low-order bit, then
5480 into the high-order bit, then sign-extend. We then XOR that value
5481 with C. */
5482 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5483 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5485 /* We must use a signed type in order to get an arithmetic right shift.
5486 However, we must also avoid introducing accidental overflows, so that
5487 a subsequent call to integer_zerop will work. Hence we must
5488 do the type conversion here. At this point, the constant is either
5489 zero or one, and the conversion to a signed type can never overflow.
5490 We could get an overflow if this conversion is done anywhere else. */
5491 if (TYPE_UNSIGNED (type))
5492 temp = fold_convert (signed_type_for (type), temp);
5494 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5495 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5496 if (mask != 0)
5497 temp = const_binop (BIT_AND_EXPR, temp,
5498 fold_convert (TREE_TYPE (c), mask), 0);
5499 /* If necessary, convert the type back to match the type of C. */
5500 if (TYPE_UNSIGNED (type))
5501 temp = fold_convert (type, temp);
5503 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5506 /* Find ways of folding logical expressions of LHS and RHS:
5507 Try to merge two comparisons to the same innermost item.
5508 Look for range tests like "ch >= '0' && ch <= '9'".
5509 Look for combinations of simple terms on machines with expensive branches
5510 and evaluate the RHS unconditionally.
5512 For example, if we have p->a == 2 && p->b == 4 and we can make an
5513 object large enough to span both A and B, we can do this with a comparison
5514 against the object ANDed with the a mask.
5516 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5517 operations to do this with one comparison.
5519 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5520 function and the one above.
5522 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5523 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5525 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5526 two operands.
5528 We return the simplified tree or 0 if no optimization is possible. */
5530 static tree
5531 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5533 /* If this is the "or" of two comparisons, we can do something if
5534 the comparisons are NE_EXPR. If this is the "and", we can do something
5535 if the comparisons are EQ_EXPR. I.e.,
5536 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5538 WANTED_CODE is this operation code. For single bit fields, we can
5539 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5540 comparison for one-bit fields. */
5542 enum tree_code wanted_code;
5543 enum tree_code lcode, rcode;
5544 tree ll_arg, lr_arg, rl_arg, rr_arg;
5545 tree ll_inner, lr_inner, rl_inner, rr_inner;
5546 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5547 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5548 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5549 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5550 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5551 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5552 enum machine_mode lnmode, rnmode;
5553 tree ll_mask, lr_mask, rl_mask, rr_mask;
5554 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5555 tree l_const, r_const;
5556 tree lntype, rntype, result;
5557 HOST_WIDE_INT first_bit, end_bit;
5558 int volatilep;
5559 tree orig_lhs = lhs, orig_rhs = rhs;
5560 enum tree_code orig_code = code;
5562 /* Start by getting the comparison codes. Fail if anything is volatile.
5563 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5564 it were surrounded with a NE_EXPR. */
5566 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5567 return 0;
5569 lcode = TREE_CODE (lhs);
5570 rcode = TREE_CODE (rhs);
5572 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5574 lhs = build2 (NE_EXPR, truth_type, lhs,
5575 build_int_cst (TREE_TYPE (lhs), 0));
5576 lcode = NE_EXPR;
5579 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5581 rhs = build2 (NE_EXPR, truth_type, rhs,
5582 build_int_cst (TREE_TYPE (rhs), 0));
5583 rcode = NE_EXPR;
5586 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5587 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5588 return 0;
5590 ll_arg = TREE_OPERAND (lhs, 0);
5591 lr_arg = TREE_OPERAND (lhs, 1);
5592 rl_arg = TREE_OPERAND (rhs, 0);
5593 rr_arg = TREE_OPERAND (rhs, 1);
5595 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5596 if (simple_operand_p (ll_arg)
5597 && simple_operand_p (lr_arg))
5599 tree result;
5600 if (operand_equal_p (ll_arg, rl_arg, 0)
5601 && operand_equal_p (lr_arg, rr_arg, 0))
5603 result = combine_comparisons (code, lcode, rcode,
5604 truth_type, ll_arg, lr_arg);
5605 if (result)
5606 return result;
5608 else if (operand_equal_p (ll_arg, rr_arg, 0)
5609 && operand_equal_p (lr_arg, rl_arg, 0))
5611 result = combine_comparisons (code, lcode,
5612 swap_tree_comparison (rcode),
5613 truth_type, ll_arg, lr_arg);
5614 if (result)
5615 return result;
5619 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5620 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5622 /* If the RHS can be evaluated unconditionally and its operands are
5623 simple, it wins to evaluate the RHS unconditionally on machines
5624 with expensive branches. In this case, this isn't a comparison
5625 that can be merged. Avoid doing this if the RHS is a floating-point
5626 comparison since those can trap. */
5628 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5629 false) >= 2
5630 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5631 && simple_operand_p (rl_arg)
5632 && simple_operand_p (rr_arg))
5634 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5635 if (code == TRUTH_OR_EXPR
5636 && lcode == NE_EXPR && integer_zerop (lr_arg)
5637 && rcode == NE_EXPR && integer_zerop (rr_arg)
5638 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5639 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5640 return build2 (NE_EXPR, truth_type,
5641 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5642 ll_arg, rl_arg),
5643 build_int_cst (TREE_TYPE (ll_arg), 0));
5645 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5646 if (code == TRUTH_AND_EXPR
5647 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5648 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5649 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5650 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5651 return build2 (EQ_EXPR, truth_type,
5652 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5653 ll_arg, rl_arg),
5654 build_int_cst (TREE_TYPE (ll_arg), 0));
5656 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5658 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5659 return build2 (code, truth_type, lhs, rhs);
5660 return NULL_TREE;
5664 /* See if the comparisons can be merged. Then get all the parameters for
5665 each side. */
5667 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5668 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5669 return 0;
5671 volatilep = 0;
5672 ll_inner = decode_field_reference (ll_arg,
5673 &ll_bitsize, &ll_bitpos, &ll_mode,
5674 &ll_unsignedp, &volatilep, &ll_mask,
5675 &ll_and_mask);
5676 lr_inner = decode_field_reference (lr_arg,
5677 &lr_bitsize, &lr_bitpos, &lr_mode,
5678 &lr_unsignedp, &volatilep, &lr_mask,
5679 &lr_and_mask);
5680 rl_inner = decode_field_reference (rl_arg,
5681 &rl_bitsize, &rl_bitpos, &rl_mode,
5682 &rl_unsignedp, &volatilep, &rl_mask,
5683 &rl_and_mask);
5684 rr_inner = decode_field_reference (rr_arg,
5685 &rr_bitsize, &rr_bitpos, &rr_mode,
5686 &rr_unsignedp, &volatilep, &rr_mask,
5687 &rr_and_mask);
5689 /* It must be true that the inner operation on the lhs of each
5690 comparison must be the same if we are to be able to do anything.
5691 Then see if we have constants. If not, the same must be true for
5692 the rhs's. */
5693 if (volatilep || ll_inner == 0 || rl_inner == 0
5694 || ! operand_equal_p (ll_inner, rl_inner, 0))
5695 return 0;
5697 if (TREE_CODE (lr_arg) == INTEGER_CST
5698 && TREE_CODE (rr_arg) == INTEGER_CST)
5699 l_const = lr_arg, r_const = rr_arg;
5700 else if (lr_inner == 0 || rr_inner == 0
5701 || ! operand_equal_p (lr_inner, rr_inner, 0))
5702 return 0;
5703 else
5704 l_const = r_const = 0;
5706 /* If either comparison code is not correct for our logical operation,
5707 fail. However, we can convert a one-bit comparison against zero into
5708 the opposite comparison against that bit being set in the field. */
5710 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5711 if (lcode != wanted_code)
5713 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5715 /* Make the left operand unsigned, since we are only interested
5716 in the value of one bit. Otherwise we are doing the wrong
5717 thing below. */
5718 ll_unsignedp = 1;
5719 l_const = ll_mask;
5721 else
5722 return 0;
5725 /* This is analogous to the code for l_const above. */
5726 if (rcode != wanted_code)
5728 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5730 rl_unsignedp = 1;
5731 r_const = rl_mask;
5733 else
5734 return 0;
5737 /* See if we can find a mode that contains both fields being compared on
5738 the left. If we can't, fail. Otherwise, update all constants and masks
5739 to be relative to a field of that size. */
5740 first_bit = MIN (ll_bitpos, rl_bitpos);
5741 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5742 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5743 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5744 volatilep);
5745 if (lnmode == VOIDmode)
5746 return 0;
5748 lnbitsize = GET_MODE_BITSIZE (lnmode);
5749 lnbitpos = first_bit & ~ (lnbitsize - 1);
5750 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5751 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5753 if (BYTES_BIG_ENDIAN)
5755 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5756 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5759 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5760 size_int (xll_bitpos), 0);
5761 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5762 size_int (xrl_bitpos), 0);
5764 if (l_const)
5766 l_const = fold_convert (lntype, l_const);
5767 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5768 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5769 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5770 fold_build1 (BIT_NOT_EXPR,
5771 lntype, ll_mask),
5772 0)))
5774 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5776 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5779 if (r_const)
5781 r_const = fold_convert (lntype, r_const);
5782 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5783 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5784 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5785 fold_build1 (BIT_NOT_EXPR,
5786 lntype, rl_mask),
5787 0)))
5789 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5791 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5795 /* If the right sides are not constant, do the same for it. Also,
5796 disallow this optimization if a size or signedness mismatch occurs
5797 between the left and right sides. */
5798 if (l_const == 0)
5800 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5801 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5802 /* Make sure the two fields on the right
5803 correspond to the left without being swapped. */
5804 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5805 return 0;
5807 first_bit = MIN (lr_bitpos, rr_bitpos);
5808 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5809 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5810 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5811 volatilep);
5812 if (rnmode == VOIDmode)
5813 return 0;
5815 rnbitsize = GET_MODE_BITSIZE (rnmode);
5816 rnbitpos = first_bit & ~ (rnbitsize - 1);
5817 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5818 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5820 if (BYTES_BIG_ENDIAN)
5822 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5823 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5826 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5827 size_int (xlr_bitpos), 0);
5828 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5829 size_int (xrr_bitpos), 0);
5831 /* Make a mask that corresponds to both fields being compared.
5832 Do this for both items being compared. If the operands are the
5833 same size and the bits being compared are in the same position
5834 then we can do this by masking both and comparing the masked
5835 results. */
5836 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5837 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5838 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5840 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5841 ll_unsignedp || rl_unsignedp);
5842 if (! all_ones_mask_p (ll_mask, lnbitsize))
5843 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5845 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5846 lr_unsignedp || rr_unsignedp);
5847 if (! all_ones_mask_p (lr_mask, rnbitsize))
5848 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5850 return build2 (wanted_code, truth_type, lhs, rhs);
5853 /* There is still another way we can do something: If both pairs of
5854 fields being compared are adjacent, we may be able to make a wider
5855 field containing them both.
5857 Note that we still must mask the lhs/rhs expressions. Furthermore,
5858 the mask must be shifted to account for the shift done by
5859 make_bit_field_ref. */
5860 if ((ll_bitsize + ll_bitpos == rl_bitpos
5861 && lr_bitsize + lr_bitpos == rr_bitpos)
5862 || (ll_bitpos == rl_bitpos + rl_bitsize
5863 && lr_bitpos == rr_bitpos + rr_bitsize))
5865 tree type;
5867 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5868 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5869 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5870 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5872 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5873 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5874 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5875 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5877 /* Convert to the smaller type before masking out unwanted bits. */
5878 type = lntype;
5879 if (lntype != rntype)
5881 if (lnbitsize > rnbitsize)
5883 lhs = fold_convert (rntype, lhs);
5884 ll_mask = fold_convert (rntype, ll_mask);
5885 type = rntype;
5887 else if (lnbitsize < rnbitsize)
5889 rhs = fold_convert (lntype, rhs);
5890 lr_mask = fold_convert (lntype, lr_mask);
5891 type = lntype;
5895 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5896 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5898 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5899 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5901 return build2 (wanted_code, truth_type, lhs, rhs);
5904 return 0;
5907 /* Handle the case of comparisons with constants. If there is something in
5908 common between the masks, those bits of the constants must be the same.
5909 If not, the condition is always false. Test for this to avoid generating
5910 incorrect code below. */
5911 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5912 if (! integer_zerop (result)
5913 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5914 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5916 if (wanted_code == NE_EXPR)
5918 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5919 return constant_boolean_node (true, truth_type);
5921 else
5923 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5924 return constant_boolean_node (false, truth_type);
5928 /* Construct the expression we will return. First get the component
5929 reference we will make. Unless the mask is all ones the width of
5930 that field, perform the mask operation. Then compare with the
5931 merged constant. */
5932 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5933 ll_unsignedp || rl_unsignedp);
5935 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5936 if (! all_ones_mask_p (ll_mask, lnbitsize))
5937 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5939 return build2 (wanted_code, truth_type, result,
5940 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5943 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5944 constant. */
5946 static tree
5947 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5949 tree arg0 = op0;
5950 enum tree_code op_code;
5951 tree comp_const;
5952 tree minmax_const;
5953 int consts_equal, consts_lt;
5954 tree inner;
5956 STRIP_SIGN_NOPS (arg0);
5958 op_code = TREE_CODE (arg0);
5959 minmax_const = TREE_OPERAND (arg0, 1);
5960 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5961 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5962 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5963 inner = TREE_OPERAND (arg0, 0);
5965 /* If something does not permit us to optimize, return the original tree. */
5966 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5967 || TREE_CODE (comp_const) != INTEGER_CST
5968 || TREE_OVERFLOW (comp_const)
5969 || TREE_CODE (minmax_const) != INTEGER_CST
5970 || TREE_OVERFLOW (minmax_const))
5971 return NULL_TREE;
5973 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5974 and GT_EXPR, doing the rest with recursive calls using logical
5975 simplifications. */
5976 switch (code)
5978 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5980 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5981 type, op0, op1);
5982 if (tem)
5983 return invert_truthvalue (tem);
5984 return NULL_TREE;
5987 case GE_EXPR:
5988 return
5989 fold_build2 (TRUTH_ORIF_EXPR, type,
5990 optimize_minmax_comparison
5991 (EQ_EXPR, type, arg0, comp_const),
5992 optimize_minmax_comparison
5993 (GT_EXPR, type, arg0, comp_const));
5995 case EQ_EXPR:
5996 if (op_code == MAX_EXPR && consts_equal)
5997 /* MAX (X, 0) == 0 -> X <= 0 */
5998 return fold_build2 (LE_EXPR, type, inner, comp_const);
6000 else if (op_code == MAX_EXPR && consts_lt)
6001 /* MAX (X, 0) == 5 -> X == 5 */
6002 return fold_build2 (EQ_EXPR, type, inner, comp_const);
6004 else if (op_code == MAX_EXPR)
6005 /* MAX (X, 0) == -1 -> false */
6006 return omit_one_operand (type, integer_zero_node, inner);
6008 else if (consts_equal)
6009 /* MIN (X, 0) == 0 -> X >= 0 */
6010 return fold_build2 (GE_EXPR, type, inner, comp_const);
6012 else if (consts_lt)
6013 /* MIN (X, 0) == 5 -> false */
6014 return omit_one_operand (type, integer_zero_node, inner);
6016 else
6017 /* MIN (X, 0) == -1 -> X == -1 */
6018 return fold_build2 (EQ_EXPR, type, inner, comp_const);
6020 case GT_EXPR:
6021 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6022 /* MAX (X, 0) > 0 -> X > 0
6023 MAX (X, 0) > 5 -> X > 5 */
6024 return fold_build2 (GT_EXPR, type, inner, comp_const);
6026 else if (op_code == MAX_EXPR)
6027 /* MAX (X, 0) > -1 -> true */
6028 return omit_one_operand (type, integer_one_node, inner);
6030 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6031 /* MIN (X, 0) > 0 -> false
6032 MIN (X, 0) > 5 -> false */
6033 return omit_one_operand (type, integer_zero_node, inner);
6035 else
6036 /* MIN (X, 0) > -1 -> X > -1 */
6037 return fold_build2 (GT_EXPR, type, inner, comp_const);
6039 default:
6040 return NULL_TREE;
6044 /* T is an integer expression that is being multiplied, divided, or taken a
6045 modulus (CODE says which and what kind of divide or modulus) by a
6046 constant C. See if we can eliminate that operation by folding it with
6047 other operations already in T. WIDE_TYPE, if non-null, is a type that
6048 should be used for the computation if wider than our type.
6050 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6051 (X * 2) + (Y * 4). We must, however, be assured that either the original
6052 expression would not overflow or that overflow is undefined for the type
6053 in the language in question.
6055 If we return a non-null expression, it is an equivalent form of the
6056 original computation, but need not be in the original type.
6058 We set *STRICT_OVERFLOW_P to true if the return values depends on
6059 signed overflow being undefined. Otherwise we do not change
6060 *STRICT_OVERFLOW_P. */
6062 static tree
6063 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6064 bool *strict_overflow_p)
6066 /* To avoid exponential search depth, refuse to allow recursion past
6067 three levels. Beyond that (1) it's highly unlikely that we'll find
6068 something interesting and (2) we've probably processed it before
6069 when we built the inner expression. */
6071 static int depth;
6072 tree ret;
6074 if (depth > 3)
6075 return NULL;
6077 depth++;
6078 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6079 depth--;
6081 return ret;
6084 static tree
6085 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6086 bool *strict_overflow_p)
6088 tree type = TREE_TYPE (t);
6089 enum tree_code tcode = TREE_CODE (t);
6090 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6091 > GET_MODE_SIZE (TYPE_MODE (type)))
6092 ? wide_type : type);
6093 tree t1, t2;
6094 int same_p = tcode == code;
6095 tree op0 = NULL_TREE, op1 = NULL_TREE;
6096 bool sub_strict_overflow_p;
6098 /* Don't deal with constants of zero here; they confuse the code below. */
6099 if (integer_zerop (c))
6100 return NULL_TREE;
6102 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6103 op0 = TREE_OPERAND (t, 0);
6105 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6106 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6108 /* Note that we need not handle conditional operations here since fold
6109 already handles those cases. So just do arithmetic here. */
6110 switch (tcode)
6112 case INTEGER_CST:
6113 /* For a constant, we can always simplify if we are a multiply
6114 or (for divide and modulus) if it is a multiple of our constant. */
6115 if (code == MULT_EXPR
6116 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6117 return const_binop (code, fold_convert (ctype, t),
6118 fold_convert (ctype, c), 0);
6119 break;
6121 CASE_CONVERT: case NON_LVALUE_EXPR:
6122 /* If op0 is an expression ... */
6123 if ((COMPARISON_CLASS_P (op0)
6124 || UNARY_CLASS_P (op0)
6125 || BINARY_CLASS_P (op0)
6126 || VL_EXP_CLASS_P (op0)
6127 || EXPRESSION_CLASS_P (op0))
6128 /* ... and has wrapping overflow, and its type is smaller
6129 than ctype, then we cannot pass through as widening. */
6130 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6131 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6132 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6133 && (TYPE_PRECISION (ctype)
6134 > TYPE_PRECISION (TREE_TYPE (op0))))
6135 /* ... or this is a truncation (t is narrower than op0),
6136 then we cannot pass through this narrowing. */
6137 || (TYPE_PRECISION (type)
6138 < TYPE_PRECISION (TREE_TYPE (op0)))
6139 /* ... or signedness changes for division or modulus,
6140 then we cannot pass through this conversion. */
6141 || (code != MULT_EXPR
6142 && (TYPE_UNSIGNED (ctype)
6143 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6144 /* ... or has undefined overflow while the converted to
6145 type has not, we cannot do the operation in the inner type
6146 as that would introduce undefined overflow. */
6147 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6148 && !TYPE_OVERFLOW_UNDEFINED (type))))
6149 break;
6151 /* Pass the constant down and see if we can make a simplification. If
6152 we can, replace this expression with the inner simplification for
6153 possible later conversion to our or some other type. */
6154 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6155 && TREE_CODE (t2) == INTEGER_CST
6156 && !TREE_OVERFLOW (t2)
6157 && (0 != (t1 = extract_muldiv (op0, t2, code,
6158 code == MULT_EXPR
6159 ? ctype : NULL_TREE,
6160 strict_overflow_p))))
6161 return t1;
6162 break;
6164 case ABS_EXPR:
6165 /* If widening the type changes it from signed to unsigned, then we
6166 must avoid building ABS_EXPR itself as unsigned. */
6167 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6169 tree cstype = (*signed_type_for) (ctype);
6170 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6171 != 0)
6173 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6174 return fold_convert (ctype, t1);
6176 break;
6178 /* If the constant is negative, we cannot simplify this. */
6179 if (tree_int_cst_sgn (c) == -1)
6180 break;
6181 /* FALLTHROUGH */
6182 case NEGATE_EXPR:
6183 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6184 != 0)
6185 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6186 break;
6188 case MIN_EXPR: case MAX_EXPR:
6189 /* If widening the type changes the signedness, then we can't perform
6190 this optimization as that changes the result. */
6191 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6192 break;
6194 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6195 sub_strict_overflow_p = false;
6196 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6197 &sub_strict_overflow_p)) != 0
6198 && (t2 = extract_muldiv (op1, c, code, wide_type,
6199 &sub_strict_overflow_p)) != 0)
6201 if (tree_int_cst_sgn (c) < 0)
6202 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6203 if (sub_strict_overflow_p)
6204 *strict_overflow_p = true;
6205 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6206 fold_convert (ctype, t2));
6208 break;
6210 case LSHIFT_EXPR: case RSHIFT_EXPR:
6211 /* If the second operand is constant, this is a multiplication
6212 or floor division, by a power of two, so we can treat it that
6213 way unless the multiplier or divisor overflows. Signed
6214 left-shift overflow is implementation-defined rather than
6215 undefined in C90, so do not convert signed left shift into
6216 multiplication. */
6217 if (TREE_CODE (op1) == INTEGER_CST
6218 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6219 /* const_binop may not detect overflow correctly,
6220 so check for it explicitly here. */
6221 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6222 && TREE_INT_CST_HIGH (op1) == 0
6223 && 0 != (t1 = fold_convert (ctype,
6224 const_binop (LSHIFT_EXPR,
6225 size_one_node,
6226 op1, 0)))
6227 && !TREE_OVERFLOW (t1))
6228 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6229 ? MULT_EXPR : FLOOR_DIV_EXPR,
6230 ctype, fold_convert (ctype, op0), t1),
6231 c, code, wide_type, strict_overflow_p);
6232 break;
6234 case PLUS_EXPR: case MINUS_EXPR:
6235 /* See if we can eliminate the operation on both sides. If we can, we
6236 can return a new PLUS or MINUS. If we can't, the only remaining
6237 cases where we can do anything are if the second operand is a
6238 constant. */
6239 sub_strict_overflow_p = false;
6240 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6241 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6242 if (t1 != 0 && t2 != 0
6243 && (code == MULT_EXPR
6244 /* If not multiplication, we can only do this if both operands
6245 are divisible by c. */
6246 || (multiple_of_p (ctype, op0, c)
6247 && multiple_of_p (ctype, op1, c))))
6249 if (sub_strict_overflow_p)
6250 *strict_overflow_p = true;
6251 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6252 fold_convert (ctype, t2));
6255 /* If this was a subtraction, negate OP1 and set it to be an addition.
6256 This simplifies the logic below. */
6257 if (tcode == MINUS_EXPR)
6258 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6260 if (TREE_CODE (op1) != INTEGER_CST)
6261 break;
6263 /* If either OP1 or C are negative, this optimization is not safe for
6264 some of the division and remainder types while for others we need
6265 to change the code. */
6266 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6268 if (code == CEIL_DIV_EXPR)
6269 code = FLOOR_DIV_EXPR;
6270 else if (code == FLOOR_DIV_EXPR)
6271 code = CEIL_DIV_EXPR;
6272 else if (code != MULT_EXPR
6273 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6274 break;
6277 /* If it's a multiply or a division/modulus operation of a multiple
6278 of our constant, do the operation and verify it doesn't overflow. */
6279 if (code == MULT_EXPR
6280 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6282 op1 = const_binop (code, fold_convert (ctype, op1),
6283 fold_convert (ctype, c), 0);
6284 /* We allow the constant to overflow with wrapping semantics. */
6285 if (op1 == 0
6286 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6287 break;
6289 else
6290 break;
6292 /* If we have an unsigned type is not a sizetype, we cannot widen
6293 the operation since it will change the result if the original
6294 computation overflowed. */
6295 if (TYPE_UNSIGNED (ctype)
6296 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6297 && ctype != type)
6298 break;
6300 /* If we were able to eliminate our operation from the first side,
6301 apply our operation to the second side and reform the PLUS. */
6302 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6303 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6305 /* The last case is if we are a multiply. In that case, we can
6306 apply the distributive law to commute the multiply and addition
6307 if the multiplication of the constants doesn't overflow. */
6308 if (code == MULT_EXPR)
6309 return fold_build2 (tcode, ctype,
6310 fold_build2 (code, ctype,
6311 fold_convert (ctype, op0),
6312 fold_convert (ctype, c)),
6313 op1);
6315 break;
6317 case MULT_EXPR:
6318 /* We have a special case here if we are doing something like
6319 (C * 8) % 4 since we know that's zero. */
6320 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6321 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6322 /* If the multiplication can overflow we cannot optimize this.
6323 ??? Until we can properly mark individual operations as
6324 not overflowing we need to treat sizetype special here as
6325 stor-layout relies on this opimization to make
6326 DECL_FIELD_BIT_OFFSET always a constant. */
6327 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6328 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
6329 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
6330 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6331 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6333 *strict_overflow_p = true;
6334 return omit_one_operand (type, integer_zero_node, op0);
6337 /* ... fall through ... */
6339 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6340 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6341 /* If we can extract our operation from the LHS, do so and return a
6342 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6343 do something only if the second operand is a constant. */
6344 if (same_p
6345 && (t1 = extract_muldiv (op0, c, code, wide_type,
6346 strict_overflow_p)) != 0)
6347 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6348 fold_convert (ctype, op1));
6349 else if (tcode == MULT_EXPR && code == MULT_EXPR
6350 && (t1 = extract_muldiv (op1, c, code, wide_type,
6351 strict_overflow_p)) != 0)
6352 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6353 fold_convert (ctype, t1));
6354 else if (TREE_CODE (op1) != INTEGER_CST)
6355 return 0;
6357 /* If these are the same operation types, we can associate them
6358 assuming no overflow. */
6359 if (tcode == code
6360 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
6361 fold_convert (ctype, c), 1))
6362 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
6363 TREE_INT_CST_HIGH (t1),
6364 (TYPE_UNSIGNED (ctype)
6365 && tcode != MULT_EXPR) ? -1 : 1,
6366 TREE_OVERFLOW (t1)))
6367 && !TREE_OVERFLOW (t1))
6368 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6370 /* If these operations "cancel" each other, we have the main
6371 optimizations of this pass, which occur when either constant is a
6372 multiple of the other, in which case we replace this with either an
6373 operation or CODE or TCODE.
6375 If we have an unsigned type that is not a sizetype, we cannot do
6376 this since it will change the result if the original computation
6377 overflowed. */
6378 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6379 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6380 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6381 || (tcode == MULT_EXPR
6382 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6383 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6384 && code != MULT_EXPR)))
6386 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6388 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6389 *strict_overflow_p = true;
6390 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6391 fold_convert (ctype,
6392 const_binop (TRUNC_DIV_EXPR,
6393 op1, c, 0)));
6395 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6397 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6398 *strict_overflow_p = true;
6399 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6400 fold_convert (ctype,
6401 const_binop (TRUNC_DIV_EXPR,
6402 c, op1, 0)));
6405 break;
6407 default:
6408 break;
6411 return 0;
6414 /* Return a node which has the indicated constant VALUE (either 0 or
6415 1), and is of the indicated TYPE. */
6417 tree
6418 constant_boolean_node (int value, tree type)
6420 if (type == integer_type_node)
6421 return value ? integer_one_node : integer_zero_node;
6422 else if (type == boolean_type_node)
6423 return value ? boolean_true_node : boolean_false_node;
6424 else
6425 return build_int_cst (type, value);
6429 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6430 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6431 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6432 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6433 COND is the first argument to CODE; otherwise (as in the example
6434 given here), it is the second argument. TYPE is the type of the
6435 original expression. Return NULL_TREE if no simplification is
6436 possible. */
6438 static tree
6439 fold_binary_op_with_conditional_arg (enum tree_code code,
6440 tree type, tree op0, tree op1,
6441 tree cond, tree arg, int cond_first_p)
6443 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6444 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6445 tree test, true_value, false_value;
6446 tree lhs = NULL_TREE;
6447 tree rhs = NULL_TREE;
6449 /* This transformation is only worthwhile if we don't have to wrap
6450 arg in a SAVE_EXPR, and the operation can be simplified on at least
6451 one of the branches once its pushed inside the COND_EXPR. */
6452 if (!TREE_CONSTANT (arg))
6453 return NULL_TREE;
6455 if (TREE_CODE (cond) == COND_EXPR)
6457 test = TREE_OPERAND (cond, 0);
6458 true_value = TREE_OPERAND (cond, 1);
6459 false_value = TREE_OPERAND (cond, 2);
6460 /* If this operand throws an expression, then it does not make
6461 sense to try to perform a logical or arithmetic operation
6462 involving it. */
6463 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6464 lhs = true_value;
6465 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6466 rhs = false_value;
6468 else
6470 tree testtype = TREE_TYPE (cond);
6471 test = cond;
6472 true_value = constant_boolean_node (true, testtype);
6473 false_value = constant_boolean_node (false, testtype);
6476 arg = fold_convert (arg_type, arg);
6477 if (lhs == 0)
6479 true_value = fold_convert (cond_type, true_value);
6480 if (cond_first_p)
6481 lhs = fold_build2 (code, type, true_value, arg);
6482 else
6483 lhs = fold_build2 (code, type, arg, true_value);
6485 if (rhs == 0)
6487 false_value = fold_convert (cond_type, false_value);
6488 if (cond_first_p)
6489 rhs = fold_build2 (code, type, false_value, arg);
6490 else
6491 rhs = fold_build2 (code, type, arg, false_value);
6494 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6495 return fold_convert (type, test);
6499 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6501 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6502 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6503 ADDEND is the same as X.
6505 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6506 and finite. The problematic cases are when X is zero, and its mode
6507 has signed zeros. In the case of rounding towards -infinity,
6508 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6509 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6511 bool
6512 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6514 if (!real_zerop (addend))
6515 return false;
6517 /* Don't allow the fold with -fsignaling-nans. */
6518 if (HONOR_SNANS (TYPE_MODE (type)))
6519 return false;
6521 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6522 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6523 return true;
6525 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6526 if (TREE_CODE (addend) == REAL_CST
6527 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6528 negate = !negate;
6530 /* The mode has signed zeros, and we have to honor their sign.
6531 In this situation, there is only one case we can return true for.
6532 X - 0 is the same as X unless rounding towards -infinity is
6533 supported. */
6534 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6537 /* Subroutine of fold() that checks comparisons of built-in math
6538 functions against real constants.
6540 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6541 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6542 is the type of the result and ARG0 and ARG1 are the operands of the
6543 comparison. ARG1 must be a TREE_REAL_CST.
6545 The function returns the constant folded tree if a simplification
6546 can be made, and NULL_TREE otherwise. */
6548 static tree
6549 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6550 tree type, tree arg0, tree arg1)
6552 REAL_VALUE_TYPE c;
6554 if (BUILTIN_SQRT_P (fcode))
6556 tree arg = CALL_EXPR_ARG (arg0, 0);
6557 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6559 c = TREE_REAL_CST (arg1);
6560 if (REAL_VALUE_NEGATIVE (c))
6562 /* sqrt(x) < y is always false, if y is negative. */
6563 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6564 return omit_one_operand (type, integer_zero_node, arg);
6566 /* sqrt(x) > y is always true, if y is negative and we
6567 don't care about NaNs, i.e. negative values of x. */
6568 if (code == NE_EXPR || !HONOR_NANS (mode))
6569 return omit_one_operand (type, integer_one_node, arg);
6571 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6572 return fold_build2 (GE_EXPR, type, arg,
6573 build_real (TREE_TYPE (arg), dconst0));
6575 else if (code == GT_EXPR || code == GE_EXPR)
6577 REAL_VALUE_TYPE c2;
6579 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6580 real_convert (&c2, mode, &c2);
6582 if (REAL_VALUE_ISINF (c2))
6584 /* sqrt(x) > y is x == +Inf, when y is very large. */
6585 if (HONOR_INFINITIES (mode))
6586 return fold_build2 (EQ_EXPR, type, arg,
6587 build_real (TREE_TYPE (arg), c2));
6589 /* sqrt(x) > y is always false, when y is very large
6590 and we don't care about infinities. */
6591 return omit_one_operand (type, integer_zero_node, arg);
6594 /* sqrt(x) > c is the same as x > c*c. */
6595 return fold_build2 (code, type, arg,
6596 build_real (TREE_TYPE (arg), c2));
6598 else if (code == LT_EXPR || code == LE_EXPR)
6600 REAL_VALUE_TYPE c2;
6602 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6603 real_convert (&c2, mode, &c2);
6605 if (REAL_VALUE_ISINF (c2))
6607 /* sqrt(x) < y is always true, when y is a very large
6608 value and we don't care about NaNs or Infinities. */
6609 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6610 return omit_one_operand (type, integer_one_node, arg);
6612 /* sqrt(x) < y is x != +Inf when y is very large and we
6613 don't care about NaNs. */
6614 if (! HONOR_NANS (mode))
6615 return fold_build2 (NE_EXPR, type, arg,
6616 build_real (TREE_TYPE (arg), c2));
6618 /* sqrt(x) < y is x >= 0 when y is very large and we
6619 don't care about Infinities. */
6620 if (! HONOR_INFINITIES (mode))
6621 return fold_build2 (GE_EXPR, type, arg,
6622 build_real (TREE_TYPE (arg), dconst0));
6624 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6625 if (lang_hooks.decls.global_bindings_p () != 0
6626 || CONTAINS_PLACEHOLDER_P (arg))
6627 return NULL_TREE;
6629 arg = save_expr (arg);
6630 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6631 fold_build2 (GE_EXPR, type, arg,
6632 build_real (TREE_TYPE (arg),
6633 dconst0)),
6634 fold_build2 (NE_EXPR, type, arg,
6635 build_real (TREE_TYPE (arg),
6636 c2)));
6639 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6640 if (! HONOR_NANS (mode))
6641 return fold_build2 (code, type, arg,
6642 build_real (TREE_TYPE (arg), c2));
6644 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6645 if (lang_hooks.decls.global_bindings_p () == 0
6646 && ! CONTAINS_PLACEHOLDER_P (arg))
6648 arg = save_expr (arg);
6649 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6650 fold_build2 (GE_EXPR, type, arg,
6651 build_real (TREE_TYPE (arg),
6652 dconst0)),
6653 fold_build2 (code, type, arg,
6654 build_real (TREE_TYPE (arg),
6655 c2)));
6660 return NULL_TREE;
6663 /* Subroutine of fold() that optimizes comparisons against Infinities,
6664 either +Inf or -Inf.
6666 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6667 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6668 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6670 The function returns the constant folded tree if a simplification
6671 can be made, and NULL_TREE otherwise. */
6673 static tree
6674 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6676 enum machine_mode mode;
6677 REAL_VALUE_TYPE max;
6678 tree temp;
6679 bool neg;
6681 mode = TYPE_MODE (TREE_TYPE (arg0));
6683 /* For negative infinity swap the sense of the comparison. */
6684 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6685 if (neg)
6686 code = swap_tree_comparison (code);
6688 switch (code)
6690 case GT_EXPR:
6691 /* x > +Inf is always false, if with ignore sNANs. */
6692 if (HONOR_SNANS (mode))
6693 return NULL_TREE;
6694 return omit_one_operand (type, integer_zero_node, arg0);
6696 case LE_EXPR:
6697 /* x <= +Inf is always true, if we don't case about NaNs. */
6698 if (! HONOR_NANS (mode))
6699 return omit_one_operand (type, integer_one_node, arg0);
6701 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6702 if (lang_hooks.decls.global_bindings_p () == 0
6703 && ! CONTAINS_PLACEHOLDER_P (arg0))
6705 arg0 = save_expr (arg0);
6706 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6708 break;
6710 case EQ_EXPR:
6711 case GE_EXPR:
6712 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6713 real_maxval (&max, neg, mode);
6714 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6715 arg0, build_real (TREE_TYPE (arg0), max));
6717 case LT_EXPR:
6718 /* x < +Inf is always equal to x <= DBL_MAX. */
6719 real_maxval (&max, neg, mode);
6720 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6721 arg0, build_real (TREE_TYPE (arg0), max));
6723 case NE_EXPR:
6724 /* x != +Inf is always equal to !(x > DBL_MAX). */
6725 real_maxval (&max, neg, mode);
6726 if (! HONOR_NANS (mode))
6727 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6728 arg0, build_real (TREE_TYPE (arg0), max));
6730 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6731 arg0, build_real (TREE_TYPE (arg0), max));
6732 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6734 default:
6735 break;
6738 return NULL_TREE;
6741 /* Subroutine of fold() that optimizes comparisons of a division by
6742 a nonzero integer constant against an integer constant, i.e.
6743 X/C1 op C2.
6745 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6746 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6747 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6749 The function returns the constant folded tree if a simplification
6750 can be made, and NULL_TREE otherwise. */
6752 static tree
6753 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6755 tree prod, tmp, hi, lo;
6756 tree arg00 = TREE_OPERAND (arg0, 0);
6757 tree arg01 = TREE_OPERAND (arg0, 1);
6758 unsigned HOST_WIDE_INT lpart;
6759 HOST_WIDE_INT hpart;
6760 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6761 bool neg_overflow;
6762 int overflow;
6764 /* We have to do this the hard way to detect unsigned overflow.
6765 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6766 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6767 TREE_INT_CST_HIGH (arg01),
6768 TREE_INT_CST_LOW (arg1),
6769 TREE_INT_CST_HIGH (arg1),
6770 &lpart, &hpart, unsigned_p);
6771 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6772 -1, overflow);
6773 neg_overflow = false;
6775 if (unsigned_p)
6777 tmp = int_const_binop (MINUS_EXPR, arg01,
6778 build_int_cst (TREE_TYPE (arg01), 1), 0);
6779 lo = prod;
6781 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6782 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6783 TREE_INT_CST_HIGH (prod),
6784 TREE_INT_CST_LOW (tmp),
6785 TREE_INT_CST_HIGH (tmp),
6786 &lpart, &hpart, unsigned_p);
6787 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6788 -1, overflow | TREE_OVERFLOW (prod));
6790 else if (tree_int_cst_sgn (arg01) >= 0)
6792 tmp = int_const_binop (MINUS_EXPR, arg01,
6793 build_int_cst (TREE_TYPE (arg01), 1), 0);
6794 switch (tree_int_cst_sgn (arg1))
6796 case -1:
6797 neg_overflow = true;
6798 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6799 hi = prod;
6800 break;
6802 case 0:
6803 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6804 hi = tmp;
6805 break;
6807 case 1:
6808 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6809 lo = prod;
6810 break;
6812 default:
6813 gcc_unreachable ();
6816 else
6818 /* A negative divisor reverses the relational operators. */
6819 code = swap_tree_comparison (code);
6821 tmp = int_const_binop (PLUS_EXPR, arg01,
6822 build_int_cst (TREE_TYPE (arg01), 1), 0);
6823 switch (tree_int_cst_sgn (arg1))
6825 case -1:
6826 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6827 lo = prod;
6828 break;
6830 case 0:
6831 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6832 lo = tmp;
6833 break;
6835 case 1:
6836 neg_overflow = true;
6837 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6838 hi = prod;
6839 break;
6841 default:
6842 gcc_unreachable ();
6846 switch (code)
6848 case EQ_EXPR:
6849 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6850 return omit_one_operand (type, integer_zero_node, arg00);
6851 if (TREE_OVERFLOW (hi))
6852 return fold_build2 (GE_EXPR, type, arg00, lo);
6853 if (TREE_OVERFLOW (lo))
6854 return fold_build2 (LE_EXPR, type, arg00, hi);
6855 return build_range_check (type, arg00, 1, lo, hi);
6857 case NE_EXPR:
6858 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6859 return omit_one_operand (type, integer_one_node, arg00);
6860 if (TREE_OVERFLOW (hi))
6861 return fold_build2 (LT_EXPR, type, arg00, lo);
6862 if (TREE_OVERFLOW (lo))
6863 return fold_build2 (GT_EXPR, type, arg00, hi);
6864 return build_range_check (type, arg00, 0, lo, hi);
6866 case LT_EXPR:
6867 if (TREE_OVERFLOW (lo))
6869 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6870 return omit_one_operand (type, tmp, arg00);
6872 return fold_build2 (LT_EXPR, type, arg00, lo);
6874 case LE_EXPR:
6875 if (TREE_OVERFLOW (hi))
6877 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6878 return omit_one_operand (type, tmp, arg00);
6880 return fold_build2 (LE_EXPR, type, arg00, hi);
6882 case GT_EXPR:
6883 if (TREE_OVERFLOW (hi))
6885 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6886 return omit_one_operand (type, tmp, arg00);
6888 return fold_build2 (GT_EXPR, type, arg00, hi);
6890 case GE_EXPR:
6891 if (TREE_OVERFLOW (lo))
6893 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6894 return omit_one_operand (type, tmp, arg00);
6896 return fold_build2 (GE_EXPR, type, arg00, lo);
6898 default:
6899 break;
6902 return NULL_TREE;
6906 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6907 equality/inequality test, then return a simplified form of the test
6908 using a sign testing. Otherwise return NULL. TYPE is the desired
6909 result type. */
6911 static tree
6912 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6913 tree result_type)
6915 /* If this is testing a single bit, we can optimize the test. */
6916 if ((code == NE_EXPR || code == EQ_EXPR)
6917 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6918 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6920 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6921 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6922 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6924 if (arg00 != NULL_TREE
6925 /* This is only a win if casting to a signed type is cheap,
6926 i.e. when arg00's type is not a partial mode. */
6927 && TYPE_PRECISION (TREE_TYPE (arg00))
6928 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6930 tree stype = signed_type_for (TREE_TYPE (arg00));
6931 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6932 result_type, fold_convert (stype, arg00),
6933 build_int_cst (stype, 0));
6937 return NULL_TREE;
6940 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6941 equality/inequality test, then return a simplified form of
6942 the test using shifts and logical operations. Otherwise return
6943 NULL. TYPE is the desired result type. */
6945 tree
6946 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6947 tree result_type)
6949 /* If this is testing a single bit, we can optimize the test. */
6950 if ((code == NE_EXPR || code == EQ_EXPR)
6951 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6952 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6954 tree inner = TREE_OPERAND (arg0, 0);
6955 tree type = TREE_TYPE (arg0);
6956 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6957 enum machine_mode operand_mode = TYPE_MODE (type);
6958 int ops_unsigned;
6959 tree signed_type, unsigned_type, intermediate_type;
6960 tree tem, one;
6962 /* First, see if we can fold the single bit test into a sign-bit
6963 test. */
6964 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6965 result_type);
6966 if (tem)
6967 return tem;
6969 /* Otherwise we have (A & C) != 0 where C is a single bit,
6970 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6971 Similarly for (A & C) == 0. */
6973 /* If INNER is a right shift of a constant and it plus BITNUM does
6974 not overflow, adjust BITNUM and INNER. */
6975 if (TREE_CODE (inner) == RSHIFT_EXPR
6976 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6977 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6978 && bitnum < TYPE_PRECISION (type)
6979 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6980 bitnum - TYPE_PRECISION (type)))
6982 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6983 inner = TREE_OPERAND (inner, 0);
6986 /* If we are going to be able to omit the AND below, we must do our
6987 operations as unsigned. If we must use the AND, we have a choice.
6988 Normally unsigned is faster, but for some machines signed is. */
6989 #ifdef LOAD_EXTEND_OP
6990 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6991 && !flag_syntax_only) ? 0 : 1;
6992 #else
6993 ops_unsigned = 1;
6994 #endif
6996 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6997 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6998 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6999 inner = fold_convert (intermediate_type, inner);
7001 if (bitnum != 0)
7002 inner = build2 (RSHIFT_EXPR, intermediate_type,
7003 inner, size_int (bitnum));
7005 one = build_int_cst (intermediate_type, 1);
7007 if (code == EQ_EXPR)
7008 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
7010 /* Put the AND last so it can combine with more things. */
7011 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7013 /* Make sure to return the proper type. */
7014 inner = fold_convert (result_type, inner);
7016 return inner;
7018 return NULL_TREE;
7021 /* Check whether we are allowed to reorder operands arg0 and arg1,
7022 such that the evaluation of arg1 occurs before arg0. */
7024 static bool
7025 reorder_operands_p (const_tree arg0, const_tree arg1)
7027 if (! flag_evaluation_order)
7028 return true;
7029 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
7030 return true;
7031 return ! TREE_SIDE_EFFECTS (arg0)
7032 && ! TREE_SIDE_EFFECTS (arg1);
7035 /* Test whether it is preferable two swap two operands, ARG0 and
7036 ARG1, for example because ARG0 is an integer constant and ARG1
7037 isn't. If REORDER is true, only recommend swapping if we can
7038 evaluate the operands in reverse order. */
7040 bool
7041 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
7043 STRIP_SIGN_NOPS (arg0);
7044 STRIP_SIGN_NOPS (arg1);
7046 if (TREE_CODE (arg1) == INTEGER_CST)
7047 return 0;
7048 if (TREE_CODE (arg0) == INTEGER_CST)
7049 return 1;
7051 if (TREE_CODE (arg1) == REAL_CST)
7052 return 0;
7053 if (TREE_CODE (arg0) == REAL_CST)
7054 return 1;
7056 if (TREE_CODE (arg1) == FIXED_CST)
7057 return 0;
7058 if (TREE_CODE (arg0) == FIXED_CST)
7059 return 1;
7061 if (TREE_CODE (arg1) == COMPLEX_CST)
7062 return 0;
7063 if (TREE_CODE (arg0) == COMPLEX_CST)
7064 return 1;
7066 if (TREE_CONSTANT (arg1))
7067 return 0;
7068 if (TREE_CONSTANT (arg0))
7069 return 1;
7071 if (optimize_function_for_size_p (cfun))
7072 return 0;
7074 if (reorder && flag_evaluation_order
7075 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7076 return 0;
7078 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7079 for commutative and comparison operators. Ensuring a canonical
7080 form allows the optimizers to find additional redundancies without
7081 having to explicitly check for both orderings. */
7082 if (TREE_CODE (arg0) == SSA_NAME
7083 && TREE_CODE (arg1) == SSA_NAME
7084 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7085 return 1;
7087 /* Put SSA_NAMEs last. */
7088 if (TREE_CODE (arg1) == SSA_NAME)
7089 return 0;
7090 if (TREE_CODE (arg0) == SSA_NAME)
7091 return 1;
7093 /* Put variables last. */
7094 if (DECL_P (arg1))
7095 return 0;
7096 if (DECL_P (arg0))
7097 return 1;
7099 return 0;
7102 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7103 ARG0 is extended to a wider type. */
7105 static tree
7106 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
7108 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7109 tree arg1_unw;
7110 tree shorter_type, outer_type;
7111 tree min, max;
7112 bool above, below;
7114 if (arg0_unw == arg0)
7115 return NULL_TREE;
7116 shorter_type = TREE_TYPE (arg0_unw);
7118 #ifdef HAVE_canonicalize_funcptr_for_compare
7119 /* Disable this optimization if we're casting a function pointer
7120 type on targets that require function pointer canonicalization. */
7121 if (HAVE_canonicalize_funcptr_for_compare
7122 && TREE_CODE (shorter_type) == POINTER_TYPE
7123 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7124 return NULL_TREE;
7125 #endif
7127 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7128 return NULL_TREE;
7130 arg1_unw = get_unwidened (arg1, NULL_TREE);
7132 /* If possible, express the comparison in the shorter mode. */
7133 if ((code == EQ_EXPR || code == NE_EXPR
7134 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7135 && (TREE_TYPE (arg1_unw) == shorter_type
7136 || ((TYPE_PRECISION (shorter_type)
7137 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7138 && (TYPE_UNSIGNED (shorter_type)
7139 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7140 || (TREE_CODE (arg1_unw) == INTEGER_CST
7141 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7142 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7143 && int_fits_type_p (arg1_unw, shorter_type))))
7144 return fold_build2 (code, type, arg0_unw,
7145 fold_convert (shorter_type, arg1_unw));
7147 if (TREE_CODE (arg1_unw) != INTEGER_CST
7148 || TREE_CODE (shorter_type) != INTEGER_TYPE
7149 || !int_fits_type_p (arg1_unw, shorter_type))
7150 return NULL_TREE;
7152 /* If we are comparing with the integer that does not fit into the range
7153 of the shorter type, the result is known. */
7154 outer_type = TREE_TYPE (arg1_unw);
7155 min = lower_bound_in_type (outer_type, shorter_type);
7156 max = upper_bound_in_type (outer_type, shorter_type);
7158 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7159 max, arg1_unw));
7160 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7161 arg1_unw, min));
7163 switch (code)
7165 case EQ_EXPR:
7166 if (above || below)
7167 return omit_one_operand (type, integer_zero_node, arg0);
7168 break;
7170 case NE_EXPR:
7171 if (above || below)
7172 return omit_one_operand (type, integer_one_node, arg0);
7173 break;
7175 case LT_EXPR:
7176 case LE_EXPR:
7177 if (above)
7178 return omit_one_operand (type, integer_one_node, arg0);
7179 else if (below)
7180 return omit_one_operand (type, integer_zero_node, arg0);
7182 case GT_EXPR:
7183 case GE_EXPR:
7184 if (above)
7185 return omit_one_operand (type, integer_zero_node, arg0);
7186 else if (below)
7187 return omit_one_operand (type, integer_one_node, arg0);
7189 default:
7190 break;
7193 return NULL_TREE;
7196 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7197 ARG0 just the signedness is changed. */
7199 static tree
7200 fold_sign_changed_comparison (enum tree_code code, tree type,
7201 tree arg0, tree arg1)
7203 tree arg0_inner;
7204 tree inner_type, outer_type;
7206 if (!CONVERT_EXPR_P (arg0))
7207 return NULL_TREE;
7209 outer_type = TREE_TYPE (arg0);
7210 arg0_inner = TREE_OPERAND (arg0, 0);
7211 inner_type = TREE_TYPE (arg0_inner);
7213 #ifdef HAVE_canonicalize_funcptr_for_compare
7214 /* Disable this optimization if we're casting a function pointer
7215 type on targets that require function pointer canonicalization. */
7216 if (HAVE_canonicalize_funcptr_for_compare
7217 && TREE_CODE (inner_type) == POINTER_TYPE
7218 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7219 return NULL_TREE;
7220 #endif
7222 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7223 return NULL_TREE;
7225 /* If the conversion is from an integral subtype to its basetype
7226 leave it alone. */
7227 if (TREE_TYPE (inner_type) == outer_type)
7228 return NULL_TREE;
7230 if (TREE_CODE (arg1) != INTEGER_CST
7231 && !(CONVERT_EXPR_P (arg1)
7232 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7233 return NULL_TREE;
7235 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7236 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7237 && code != NE_EXPR
7238 && code != EQ_EXPR)
7239 return NULL_TREE;
7241 if (TREE_CODE (arg1) == INTEGER_CST)
7242 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7243 TREE_INT_CST_HIGH (arg1), 0,
7244 TREE_OVERFLOW (arg1));
7245 else
7246 arg1 = fold_convert (inner_type, arg1);
7248 return fold_build2 (code, type, arg0_inner, arg1);
7251 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7252 step of the array. Reconstructs s and delta in the case of s * delta
7253 being an integer constant (and thus already folded).
7254 ADDR is the address. MULT is the multiplicative expression.
7255 If the function succeeds, the new address expression is returned. Otherwise
7256 NULL_TREE is returned. */
7258 static tree
7259 try_move_mult_to_index (tree addr, tree op1)
7261 tree s, delta, step;
7262 tree ref = TREE_OPERAND (addr, 0), pref;
7263 tree ret, pos;
7264 tree itype;
7265 bool mdim = false;
7267 /* Strip the nops that might be added when converting op1 to sizetype. */
7268 STRIP_NOPS (op1);
7270 /* Canonicalize op1 into a possibly non-constant delta
7271 and an INTEGER_CST s. */
7272 if (TREE_CODE (op1) == MULT_EXPR)
7274 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7276 STRIP_NOPS (arg0);
7277 STRIP_NOPS (arg1);
7279 if (TREE_CODE (arg0) == INTEGER_CST)
7281 s = arg0;
7282 delta = arg1;
7284 else if (TREE_CODE (arg1) == INTEGER_CST)
7286 s = arg1;
7287 delta = arg0;
7289 else
7290 return NULL_TREE;
7292 else if (TREE_CODE (op1) == INTEGER_CST)
7294 delta = op1;
7295 s = NULL_TREE;
7297 else
7299 /* Simulate we are delta * 1. */
7300 delta = op1;
7301 s = integer_one_node;
7304 for (;; ref = TREE_OPERAND (ref, 0))
7306 if (TREE_CODE (ref) == ARRAY_REF)
7308 /* Remember if this was a multi-dimensional array. */
7309 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7310 mdim = true;
7312 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7313 if (! itype)
7314 continue;
7316 step = array_ref_element_size (ref);
7317 if (TREE_CODE (step) != INTEGER_CST)
7318 continue;
7320 if (s)
7322 if (! tree_int_cst_equal (step, s))
7323 continue;
7325 else
7327 /* Try if delta is a multiple of step. */
7328 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7329 if (! tmp)
7330 continue;
7331 delta = tmp;
7334 /* Only fold here if we can verify we do not overflow one
7335 dimension of a multi-dimensional array. */
7336 if (mdim)
7338 tree tmp;
7340 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7341 || !INTEGRAL_TYPE_P (itype)
7342 || !TYPE_MAX_VALUE (itype)
7343 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7344 continue;
7346 tmp = fold_binary (PLUS_EXPR, itype,
7347 fold_convert (itype,
7348 TREE_OPERAND (ref, 1)),
7349 fold_convert (itype, delta));
7350 if (!tmp
7351 || TREE_CODE (tmp) != INTEGER_CST
7352 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7353 continue;
7356 break;
7358 else
7359 mdim = false;
7361 if (!handled_component_p (ref))
7362 return NULL_TREE;
7365 /* We found the suitable array reference. So copy everything up to it,
7366 and replace the index. */
7368 pref = TREE_OPERAND (addr, 0);
7369 ret = copy_node (pref);
7370 pos = ret;
7372 while (pref != ref)
7374 pref = TREE_OPERAND (pref, 0);
7375 TREE_OPERAND (pos, 0) = copy_node (pref);
7376 pos = TREE_OPERAND (pos, 0);
7379 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
7380 fold_convert (itype,
7381 TREE_OPERAND (pos, 1)),
7382 fold_convert (itype, delta));
7384 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
7388 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7389 means A >= Y && A != MAX, but in this case we know that
7390 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7392 static tree
7393 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7395 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7397 if (TREE_CODE (bound) == LT_EXPR)
7398 a = TREE_OPERAND (bound, 0);
7399 else if (TREE_CODE (bound) == GT_EXPR)
7400 a = TREE_OPERAND (bound, 1);
7401 else
7402 return NULL_TREE;
7404 typea = TREE_TYPE (a);
7405 if (!INTEGRAL_TYPE_P (typea)
7406 && !POINTER_TYPE_P (typea))
7407 return NULL_TREE;
7409 if (TREE_CODE (ineq) == LT_EXPR)
7411 a1 = TREE_OPERAND (ineq, 1);
7412 y = TREE_OPERAND (ineq, 0);
7414 else if (TREE_CODE (ineq) == GT_EXPR)
7416 a1 = TREE_OPERAND (ineq, 0);
7417 y = TREE_OPERAND (ineq, 1);
7419 else
7420 return NULL_TREE;
7422 if (TREE_TYPE (a1) != typea)
7423 return NULL_TREE;
7425 if (POINTER_TYPE_P (typea))
7427 /* Convert the pointer types into integer before taking the difference. */
7428 tree ta = fold_convert (ssizetype, a);
7429 tree ta1 = fold_convert (ssizetype, a1);
7430 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7432 else
7433 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7435 if (!diff || !integer_onep (diff))
7436 return NULL_TREE;
7438 return fold_build2 (GE_EXPR, type, a, y);
7441 /* Fold a sum or difference of at least one multiplication.
7442 Returns the folded tree or NULL if no simplification could be made. */
7444 static tree
7445 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7447 tree arg00, arg01, arg10, arg11;
7448 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7450 /* (A * C) +- (B * C) -> (A+-B) * C.
7451 (A * C) +- A -> A * (C+-1).
7452 We are most concerned about the case where C is a constant,
7453 but other combinations show up during loop reduction. Since
7454 it is not difficult, try all four possibilities. */
7456 if (TREE_CODE (arg0) == MULT_EXPR)
7458 arg00 = TREE_OPERAND (arg0, 0);
7459 arg01 = TREE_OPERAND (arg0, 1);
7461 else if (TREE_CODE (arg0) == INTEGER_CST)
7463 arg00 = build_one_cst (type);
7464 arg01 = arg0;
7466 else
7468 /* We cannot generate constant 1 for fract. */
7469 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7470 return NULL_TREE;
7471 arg00 = arg0;
7472 arg01 = build_one_cst (type);
7474 if (TREE_CODE (arg1) == MULT_EXPR)
7476 arg10 = TREE_OPERAND (arg1, 0);
7477 arg11 = TREE_OPERAND (arg1, 1);
7479 else if (TREE_CODE (arg1) == INTEGER_CST)
7481 arg10 = build_one_cst (type);
7482 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7483 the purpose of this canonicalization. */
7484 if (TREE_INT_CST_HIGH (arg1) == -1
7485 && negate_expr_p (arg1)
7486 && code == PLUS_EXPR)
7488 arg11 = negate_expr (arg1);
7489 code = MINUS_EXPR;
7491 else
7492 arg11 = arg1;
7494 else
7496 /* We cannot generate constant 1 for fract. */
7497 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7498 return NULL_TREE;
7499 arg10 = arg1;
7500 arg11 = build_one_cst (type);
7502 same = NULL_TREE;
7504 if (operand_equal_p (arg01, arg11, 0))
7505 same = arg01, alt0 = arg00, alt1 = arg10;
7506 else if (operand_equal_p (arg00, arg10, 0))
7507 same = arg00, alt0 = arg01, alt1 = arg11;
7508 else if (operand_equal_p (arg00, arg11, 0))
7509 same = arg00, alt0 = arg01, alt1 = arg10;
7510 else if (operand_equal_p (arg01, arg10, 0))
7511 same = arg01, alt0 = arg00, alt1 = arg11;
7513 /* No identical multiplicands; see if we can find a common
7514 power-of-two factor in non-power-of-two multiplies. This
7515 can help in multi-dimensional array access. */
7516 else if (host_integerp (arg01, 0)
7517 && host_integerp (arg11, 0))
7519 HOST_WIDE_INT int01, int11, tmp;
7520 bool swap = false;
7521 tree maybe_same;
7522 int01 = TREE_INT_CST_LOW (arg01);
7523 int11 = TREE_INT_CST_LOW (arg11);
7525 /* Move min of absolute values to int11. */
7526 if ((int01 >= 0 ? int01 : -int01)
7527 < (int11 >= 0 ? int11 : -int11))
7529 tmp = int01, int01 = int11, int11 = tmp;
7530 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7531 maybe_same = arg01;
7532 swap = true;
7534 else
7535 maybe_same = arg11;
7537 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7538 /* The remainder should not be a constant, otherwise we
7539 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7540 increased the number of multiplications necessary. */
7541 && TREE_CODE (arg10) != INTEGER_CST)
7543 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7544 build_int_cst (TREE_TYPE (arg00),
7545 int01 / int11));
7546 alt1 = arg10;
7547 same = maybe_same;
7548 if (swap)
7549 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7553 if (same)
7554 return fold_build2 (MULT_EXPR, type,
7555 fold_build2 (code, type,
7556 fold_convert (type, alt0),
7557 fold_convert (type, alt1)),
7558 fold_convert (type, same));
7560 return NULL_TREE;
7563 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7564 specified by EXPR into the buffer PTR of length LEN bytes.
7565 Return the number of bytes placed in the buffer, or zero
7566 upon failure. */
7568 static int
7569 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7571 tree type = TREE_TYPE (expr);
7572 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7573 int byte, offset, word, words;
7574 unsigned char value;
7576 if (total_bytes > len)
7577 return 0;
7578 words = total_bytes / UNITS_PER_WORD;
7580 for (byte = 0; byte < total_bytes; byte++)
7582 int bitpos = byte * BITS_PER_UNIT;
7583 if (bitpos < HOST_BITS_PER_WIDE_INT)
7584 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7585 else
7586 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7587 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7589 if (total_bytes > UNITS_PER_WORD)
7591 word = byte / UNITS_PER_WORD;
7592 if (WORDS_BIG_ENDIAN)
7593 word = (words - 1) - word;
7594 offset = word * UNITS_PER_WORD;
7595 if (BYTES_BIG_ENDIAN)
7596 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7597 else
7598 offset += byte % UNITS_PER_WORD;
7600 else
7601 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7602 ptr[offset] = value;
7604 return total_bytes;
7608 /* Subroutine of native_encode_expr. Encode the REAL_CST
7609 specified by EXPR into the buffer PTR of length LEN bytes.
7610 Return the number of bytes placed in the buffer, or zero
7611 upon failure. */
7613 static int
7614 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7616 tree type = TREE_TYPE (expr);
7617 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7618 int byte, offset, word, words, bitpos;
7619 unsigned char value;
7621 /* There are always 32 bits in each long, no matter the size of
7622 the hosts long. We handle floating point representations with
7623 up to 192 bits. */
7624 long tmp[6];
7626 if (total_bytes > len)
7627 return 0;
7628 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7630 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7632 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7633 bitpos += BITS_PER_UNIT)
7635 byte = (bitpos / BITS_PER_UNIT) & 3;
7636 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7638 if (UNITS_PER_WORD < 4)
7640 word = byte / UNITS_PER_WORD;
7641 if (WORDS_BIG_ENDIAN)
7642 word = (words - 1) - word;
7643 offset = word * UNITS_PER_WORD;
7644 if (BYTES_BIG_ENDIAN)
7645 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7646 else
7647 offset += byte % UNITS_PER_WORD;
7649 else
7650 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7651 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7653 return total_bytes;
7656 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7657 specified by EXPR into the buffer PTR of length LEN bytes.
7658 Return the number of bytes placed in the buffer, or zero
7659 upon failure. */
7661 static int
7662 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7664 int rsize, isize;
7665 tree part;
7667 part = TREE_REALPART (expr);
7668 rsize = native_encode_expr (part, ptr, len);
7669 if (rsize == 0)
7670 return 0;
7671 part = TREE_IMAGPART (expr);
7672 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7673 if (isize != rsize)
7674 return 0;
7675 return rsize + isize;
7679 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7680 specified by EXPR into the buffer PTR of length LEN bytes.
7681 Return the number of bytes placed in the buffer, or zero
7682 upon failure. */
7684 static int
7685 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7687 int i, size, offset, count;
7688 tree itype, elem, elements;
7690 offset = 0;
7691 elements = TREE_VECTOR_CST_ELTS (expr);
7692 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7693 itype = TREE_TYPE (TREE_TYPE (expr));
7694 size = GET_MODE_SIZE (TYPE_MODE (itype));
7695 for (i = 0; i < count; i++)
7697 if (elements)
7699 elem = TREE_VALUE (elements);
7700 elements = TREE_CHAIN (elements);
7702 else
7703 elem = NULL_TREE;
7705 if (elem)
7707 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7708 return 0;
7710 else
7712 if (offset + size > len)
7713 return 0;
7714 memset (ptr+offset, 0, size);
7716 offset += size;
7718 return offset;
7722 /* Subroutine of native_encode_expr. Encode the STRING_CST
7723 specified by EXPR into the buffer PTR of length LEN bytes.
7724 Return the number of bytes placed in the buffer, or zero
7725 upon failure. */
7727 static int
7728 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7730 tree type = TREE_TYPE (expr);
7731 HOST_WIDE_INT total_bytes;
7733 if (TREE_CODE (type) != ARRAY_TYPE
7734 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7735 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7736 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7737 return 0;
7738 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7739 if (total_bytes > len)
7740 return 0;
7741 if (TREE_STRING_LENGTH (expr) < total_bytes)
7743 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7744 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7745 total_bytes - TREE_STRING_LENGTH (expr));
7747 else
7748 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7749 return total_bytes;
7753 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7754 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7755 buffer PTR of length LEN bytes. Return the number of bytes
7756 placed in the buffer, or zero upon failure. */
7759 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7761 switch (TREE_CODE (expr))
7763 case INTEGER_CST:
7764 return native_encode_int (expr, ptr, len);
7766 case REAL_CST:
7767 return native_encode_real (expr, ptr, len);
7769 case COMPLEX_CST:
7770 return native_encode_complex (expr, ptr, len);
7772 case VECTOR_CST:
7773 return native_encode_vector (expr, ptr, len);
7775 case STRING_CST:
7776 return native_encode_string (expr, ptr, len);
7778 default:
7779 return 0;
7784 /* Subroutine of native_interpret_expr. Interpret the contents of
7785 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7786 If the buffer cannot be interpreted, return NULL_TREE. */
7788 static tree
7789 native_interpret_int (tree type, const unsigned char *ptr, int len)
7791 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7792 int byte, offset, word, words;
7793 unsigned char value;
7794 unsigned int HOST_WIDE_INT lo = 0;
7795 HOST_WIDE_INT hi = 0;
7797 if (total_bytes > len)
7798 return NULL_TREE;
7799 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7800 return NULL_TREE;
7801 words = total_bytes / UNITS_PER_WORD;
7803 for (byte = 0; byte < total_bytes; byte++)
7805 int bitpos = byte * BITS_PER_UNIT;
7806 if (total_bytes > UNITS_PER_WORD)
7808 word = byte / UNITS_PER_WORD;
7809 if (WORDS_BIG_ENDIAN)
7810 word = (words - 1) - word;
7811 offset = word * UNITS_PER_WORD;
7812 if (BYTES_BIG_ENDIAN)
7813 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7814 else
7815 offset += byte % UNITS_PER_WORD;
7817 else
7818 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7819 value = ptr[offset];
7821 if (bitpos < HOST_BITS_PER_WIDE_INT)
7822 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7823 else
7824 hi |= (unsigned HOST_WIDE_INT) value
7825 << (bitpos - HOST_BITS_PER_WIDE_INT);
7828 return build_int_cst_wide_type (type, lo, hi);
7832 /* Subroutine of native_interpret_expr. Interpret the contents of
7833 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7834 If the buffer cannot be interpreted, return NULL_TREE. */
7836 static tree
7837 native_interpret_real (tree type, const unsigned char *ptr, int len)
7839 enum machine_mode mode = TYPE_MODE (type);
7840 int total_bytes = GET_MODE_SIZE (mode);
7841 int byte, offset, word, words, bitpos;
7842 unsigned char value;
7843 /* There are always 32 bits in each long, no matter the size of
7844 the hosts long. We handle floating point representations with
7845 up to 192 bits. */
7846 REAL_VALUE_TYPE r;
7847 long tmp[6];
7849 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7850 if (total_bytes > len || total_bytes > 24)
7851 return NULL_TREE;
7852 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7854 memset (tmp, 0, sizeof (tmp));
7855 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7856 bitpos += BITS_PER_UNIT)
7858 byte = (bitpos / BITS_PER_UNIT) & 3;
7859 if (UNITS_PER_WORD < 4)
7861 word = byte / UNITS_PER_WORD;
7862 if (WORDS_BIG_ENDIAN)
7863 word = (words - 1) - word;
7864 offset = word * UNITS_PER_WORD;
7865 if (BYTES_BIG_ENDIAN)
7866 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7867 else
7868 offset += byte % UNITS_PER_WORD;
7870 else
7871 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7872 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7874 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7877 real_from_target (&r, tmp, mode);
7878 return build_real (type, r);
7882 /* Subroutine of native_interpret_expr. Interpret the contents of
7883 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7884 If the buffer cannot be interpreted, return NULL_TREE. */
7886 static tree
7887 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7889 tree etype, rpart, ipart;
7890 int size;
7892 etype = TREE_TYPE (type);
7893 size = GET_MODE_SIZE (TYPE_MODE (etype));
7894 if (size * 2 > len)
7895 return NULL_TREE;
7896 rpart = native_interpret_expr (etype, ptr, size);
7897 if (!rpart)
7898 return NULL_TREE;
7899 ipart = native_interpret_expr (etype, ptr+size, size);
7900 if (!ipart)
7901 return NULL_TREE;
7902 return build_complex (type, rpart, ipart);
7906 /* Subroutine of native_interpret_expr. Interpret the contents of
7907 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7908 If the buffer cannot be interpreted, return NULL_TREE. */
7910 static tree
7911 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7913 tree etype, elem, elements;
7914 int i, size, count;
7916 etype = TREE_TYPE (type);
7917 size = GET_MODE_SIZE (TYPE_MODE (etype));
7918 count = TYPE_VECTOR_SUBPARTS (type);
7919 if (size * count > len)
7920 return NULL_TREE;
7922 elements = NULL_TREE;
7923 for (i = count - 1; i >= 0; i--)
7925 elem = native_interpret_expr (etype, ptr+(i*size), size);
7926 if (!elem)
7927 return NULL_TREE;
7928 elements = tree_cons (NULL_TREE, elem, elements);
7930 return build_vector (type, elements);
7934 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7935 the buffer PTR of length LEN as a constant of type TYPE. For
7936 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7937 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7938 return NULL_TREE. */
7940 tree
7941 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7943 switch (TREE_CODE (type))
7945 case INTEGER_TYPE:
7946 case ENUMERAL_TYPE:
7947 case BOOLEAN_TYPE:
7948 return native_interpret_int (type, ptr, len);
7950 case REAL_TYPE:
7951 return native_interpret_real (type, ptr, len);
7953 case COMPLEX_TYPE:
7954 return native_interpret_complex (type, ptr, len);
7956 case VECTOR_TYPE:
7957 return native_interpret_vector (type, ptr, len);
7959 default:
7960 return NULL_TREE;
7965 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7966 TYPE at compile-time. If we're unable to perform the conversion
7967 return NULL_TREE. */
7969 static tree
7970 fold_view_convert_expr (tree type, tree expr)
7972 /* We support up to 512-bit values (for V8DFmode). */
7973 unsigned char buffer[64];
7974 int len;
7976 /* Check that the host and target are sane. */
7977 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7978 return NULL_TREE;
7980 len = native_encode_expr (expr, buffer, sizeof (buffer));
7981 if (len == 0)
7982 return NULL_TREE;
7984 return native_interpret_expr (type, buffer, len);
7987 /* Build an expression for the address of T. Folds away INDIRECT_REF
7988 to avoid confusing the gimplify process. */
7990 tree
7991 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7993 /* The size of the object is not relevant when talking about its address. */
7994 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7995 t = TREE_OPERAND (t, 0);
7997 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7998 if (TREE_CODE (t) == INDIRECT_REF
7999 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
8001 t = TREE_OPERAND (t, 0);
8003 if (TREE_TYPE (t) != ptrtype)
8004 t = build1 (NOP_EXPR, ptrtype, t);
8006 else
8007 t = build1 (ADDR_EXPR, ptrtype, t);
8009 return t;
8012 /* Build an expression for the address of T. */
8014 tree
8015 build_fold_addr_expr (tree t)
8017 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8019 return build_fold_addr_expr_with_type (t, ptrtype);
8022 /* Fold a unary expression of code CODE and type TYPE with operand
8023 OP0. Return the folded expression if folding is successful.
8024 Otherwise, return NULL_TREE. */
8026 tree
8027 fold_unary (enum tree_code code, tree type, tree op0)
8029 tree tem;
8030 tree arg0;
8031 enum tree_code_class kind = TREE_CODE_CLASS (code);
8033 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8034 && TREE_CODE_LENGTH (code) == 1);
8036 arg0 = op0;
8037 if (arg0)
8039 if (CONVERT_EXPR_CODE_P (code)
8040 || code == FLOAT_EXPR || code == ABS_EXPR)
8042 /* Don't use STRIP_NOPS, because signedness of argument type
8043 matters. */
8044 STRIP_SIGN_NOPS (arg0);
8046 else
8048 /* Strip any conversions that don't change the mode. This
8049 is safe for every expression, except for a comparison
8050 expression because its signedness is derived from its
8051 operands.
8053 Note that this is done as an internal manipulation within
8054 the constant folder, in order to find the simplest
8055 representation of the arguments so that their form can be
8056 studied. In any cases, the appropriate type conversions
8057 should be put back in the tree that will get out of the
8058 constant folder. */
8059 STRIP_NOPS (arg0);
8063 if (TREE_CODE_CLASS (code) == tcc_unary)
8065 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8066 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8067 fold_build1 (code, type,
8068 fold_convert (TREE_TYPE (op0),
8069 TREE_OPERAND (arg0, 1))));
8070 else if (TREE_CODE (arg0) == COND_EXPR)
8072 tree arg01 = TREE_OPERAND (arg0, 1);
8073 tree arg02 = TREE_OPERAND (arg0, 2);
8074 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8075 arg01 = fold_build1 (code, type,
8076 fold_convert (TREE_TYPE (op0), arg01));
8077 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8078 arg02 = fold_build1 (code, type,
8079 fold_convert (TREE_TYPE (op0), arg02));
8080 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
8081 arg01, arg02);
8083 /* If this was a conversion, and all we did was to move into
8084 inside the COND_EXPR, bring it back out. But leave it if
8085 it is a conversion from integer to integer and the
8086 result precision is no wider than a word since such a
8087 conversion is cheap and may be optimized away by combine,
8088 while it couldn't if it were outside the COND_EXPR. Then return
8089 so we don't get into an infinite recursion loop taking the
8090 conversion out and then back in. */
8092 if ((CONVERT_EXPR_CODE_P (code)
8093 || code == NON_LVALUE_EXPR)
8094 && TREE_CODE (tem) == COND_EXPR
8095 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8096 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8097 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8098 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8099 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8100 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8101 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8102 && (INTEGRAL_TYPE_P
8103 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8104 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8105 || flag_syntax_only))
8106 tem = build1 (code, type,
8107 build3 (COND_EXPR,
8108 TREE_TYPE (TREE_OPERAND
8109 (TREE_OPERAND (tem, 1), 0)),
8110 TREE_OPERAND (tem, 0),
8111 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8112 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8113 return tem;
8115 else if (COMPARISON_CLASS_P (arg0))
8117 if (TREE_CODE (type) == BOOLEAN_TYPE)
8119 arg0 = copy_node (arg0);
8120 TREE_TYPE (arg0) = type;
8121 return arg0;
8123 else if (TREE_CODE (type) != INTEGER_TYPE)
8124 return fold_build3 (COND_EXPR, type, arg0,
8125 fold_build1 (code, type,
8126 integer_one_node),
8127 fold_build1 (code, type,
8128 integer_zero_node));
8132 switch (code)
8134 case PAREN_EXPR:
8135 /* Re-association barriers around constants and other re-association
8136 barriers can be removed. */
8137 if (CONSTANT_CLASS_P (op0)
8138 || TREE_CODE (op0) == PAREN_EXPR)
8139 return fold_convert (type, op0);
8140 return NULL_TREE;
8142 CASE_CONVERT:
8143 case FLOAT_EXPR:
8144 case FIX_TRUNC_EXPR:
8145 if (TREE_TYPE (op0) == type)
8146 return op0;
8148 /* If we have (type) (a CMP b) and type is an integral type, return
8149 new expression involving the new type. */
8150 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8151 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8152 TREE_OPERAND (op0, 1));
8154 /* Handle cases of two conversions in a row. */
8155 if (CONVERT_EXPR_P (op0))
8157 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8158 tree inter_type = TREE_TYPE (op0);
8159 int inside_int = INTEGRAL_TYPE_P (inside_type);
8160 int inside_ptr = POINTER_TYPE_P (inside_type);
8161 int inside_float = FLOAT_TYPE_P (inside_type);
8162 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8163 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8164 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8165 int inter_int = INTEGRAL_TYPE_P (inter_type);
8166 int inter_ptr = POINTER_TYPE_P (inter_type);
8167 int inter_float = FLOAT_TYPE_P (inter_type);
8168 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8169 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8170 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8171 int final_int = INTEGRAL_TYPE_P (type);
8172 int final_ptr = POINTER_TYPE_P (type);
8173 int final_float = FLOAT_TYPE_P (type);
8174 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8175 unsigned int final_prec = TYPE_PRECISION (type);
8176 int final_unsignedp = TYPE_UNSIGNED (type);
8178 /* In addition to the cases of two conversions in a row
8179 handled below, if we are converting something to its own
8180 type via an object of identical or wider precision, neither
8181 conversion is needed. */
8182 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8183 && (((inter_int || inter_ptr) && final_int)
8184 || (inter_float && final_float))
8185 && inter_prec >= final_prec)
8186 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8188 /* Likewise, if the intermediate and initial types are either both
8189 float or both integer, we don't need the middle conversion if the
8190 former is wider than the latter and doesn't change the signedness
8191 (for integers). Avoid this if the final type is a pointer since
8192 then we sometimes need the middle conversion. Likewise if the
8193 final type has a precision not equal to the size of its mode. */
8194 if (((inter_int && inside_int)
8195 || (inter_float && inside_float)
8196 || (inter_vec && inside_vec))
8197 && inter_prec >= inside_prec
8198 && (inter_float || inter_vec
8199 || inter_unsignedp == inside_unsignedp)
8200 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8201 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8202 && ! final_ptr
8203 && (! final_vec || inter_prec == inside_prec))
8204 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8206 /* If we have a sign-extension of a zero-extended value, we can
8207 replace that by a single zero-extension. */
8208 if (inside_int && inter_int && final_int
8209 && inside_prec < inter_prec && inter_prec < final_prec
8210 && inside_unsignedp && !inter_unsignedp)
8211 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8213 /* Two conversions in a row are not needed unless:
8214 - some conversion is floating-point (overstrict for now), or
8215 - some conversion is a vector (overstrict for now), or
8216 - the intermediate type is narrower than both initial and
8217 final, or
8218 - the intermediate type and innermost type differ in signedness,
8219 and the outermost type is wider than the intermediate, or
8220 - the initial type is a pointer type and the precisions of the
8221 intermediate and final types differ, or
8222 - the final type is a pointer type and the precisions of the
8223 initial and intermediate types differ. */
8224 if (! inside_float && ! inter_float && ! final_float
8225 && ! inside_vec && ! inter_vec && ! final_vec
8226 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8227 && ! (inside_int && inter_int
8228 && inter_unsignedp != inside_unsignedp
8229 && inter_prec < final_prec)
8230 && ((inter_unsignedp && inter_prec > inside_prec)
8231 == (final_unsignedp && final_prec > inter_prec))
8232 && ! (inside_ptr && inter_prec != final_prec)
8233 && ! (final_ptr && inside_prec != inter_prec)
8234 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8235 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8236 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8239 /* Handle (T *)&A.B.C for A being of type T and B and C
8240 living at offset zero. This occurs frequently in
8241 C++ upcasting and then accessing the base. */
8242 if (TREE_CODE (op0) == ADDR_EXPR
8243 && POINTER_TYPE_P (type)
8244 && handled_component_p (TREE_OPERAND (op0, 0)))
8246 HOST_WIDE_INT bitsize, bitpos;
8247 tree offset;
8248 enum machine_mode mode;
8249 int unsignedp, volatilep;
8250 tree base = TREE_OPERAND (op0, 0);
8251 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8252 &mode, &unsignedp, &volatilep, false);
8253 /* If the reference was to a (constant) zero offset, we can use
8254 the address of the base if it has the same base type
8255 as the result type. */
8256 if (! offset && bitpos == 0
8257 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8258 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8259 return fold_convert (type, build_fold_addr_expr (base));
8262 if (TREE_CODE (op0) == MODIFY_EXPR
8263 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8264 /* Detect assigning a bitfield. */
8265 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8266 && DECL_BIT_FIELD
8267 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8269 /* Don't leave an assignment inside a conversion
8270 unless assigning a bitfield. */
8271 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
8272 /* First do the assignment, then return converted constant. */
8273 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8274 TREE_NO_WARNING (tem) = 1;
8275 TREE_USED (tem) = 1;
8276 return tem;
8279 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8280 constants (if x has signed type, the sign bit cannot be set
8281 in c). This folds extension into the BIT_AND_EXPR.
8282 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8283 very likely don't have maximal range for their precision and this
8284 transformation effectively doesn't preserve non-maximal ranges. */
8285 if (TREE_CODE (type) == INTEGER_TYPE
8286 && TREE_CODE (op0) == BIT_AND_EXPR
8287 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
8288 /* Not if the conversion is to the sub-type. */
8289 && TREE_TYPE (type) != TREE_TYPE (op0))
8291 tree and = op0;
8292 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
8293 int change = 0;
8295 if (TYPE_UNSIGNED (TREE_TYPE (and))
8296 || (TYPE_PRECISION (type)
8297 <= TYPE_PRECISION (TREE_TYPE (and))))
8298 change = 1;
8299 else if (TYPE_PRECISION (TREE_TYPE (and1))
8300 <= HOST_BITS_PER_WIDE_INT
8301 && host_integerp (and1, 1))
8303 unsigned HOST_WIDE_INT cst;
8305 cst = tree_low_cst (and1, 1);
8306 cst &= (HOST_WIDE_INT) -1
8307 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8308 change = (cst == 0);
8309 #ifdef LOAD_EXTEND_OP
8310 if (change
8311 && !flag_syntax_only
8312 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8313 == ZERO_EXTEND))
8315 tree uns = unsigned_type_for (TREE_TYPE (and0));
8316 and0 = fold_convert (uns, and0);
8317 and1 = fold_convert (uns, and1);
8319 #endif
8321 if (change)
8323 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8324 TREE_INT_CST_HIGH (and1), 0,
8325 TREE_OVERFLOW (and1));
8326 return fold_build2 (BIT_AND_EXPR, type,
8327 fold_convert (type, and0), tem);
8331 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8332 when one of the new casts will fold away. Conservatively we assume
8333 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8334 if (POINTER_TYPE_P (type)
8335 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8336 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8337 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8338 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8340 tree arg00 = TREE_OPERAND (arg0, 0);
8341 tree arg01 = TREE_OPERAND (arg0, 1);
8343 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
8344 fold_convert (sizetype, arg01));
8347 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8348 of the same precision, and X is an integer type not narrower than
8349 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8350 if (INTEGRAL_TYPE_P (type)
8351 && TREE_CODE (op0) == BIT_NOT_EXPR
8352 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8353 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8354 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8356 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8357 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8358 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8359 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
8362 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8363 type of X and Y (integer types only). */
8364 if (INTEGRAL_TYPE_P (type)
8365 && TREE_CODE (op0) == MULT_EXPR
8366 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8367 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8369 /* Be careful not to introduce new overflows. */
8370 tree mult_type;
8371 if (TYPE_OVERFLOW_WRAPS (type))
8372 mult_type = type;
8373 else
8374 mult_type = unsigned_type_for (type);
8376 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8378 tem = fold_build2 (MULT_EXPR, mult_type,
8379 fold_convert (mult_type,
8380 TREE_OPERAND (op0, 0)),
8381 fold_convert (mult_type,
8382 TREE_OPERAND (op0, 1)));
8383 return fold_convert (type, tem);
8387 tem = fold_convert_const (code, type, op0);
8388 return tem ? tem : NULL_TREE;
8390 case FIXED_CONVERT_EXPR:
8391 tem = fold_convert_const (code, type, arg0);
8392 return tem ? tem : NULL_TREE;
8394 case VIEW_CONVERT_EXPR:
8395 if (TREE_TYPE (op0) == type)
8396 return op0;
8397 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8398 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8400 /* For integral conversions with the same precision or pointer
8401 conversions use a NOP_EXPR instead. */
8402 if ((INTEGRAL_TYPE_P (type)
8403 || POINTER_TYPE_P (type))
8404 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8405 || POINTER_TYPE_P (TREE_TYPE (op0)))
8406 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
8407 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
8408 a sub-type to its base type as generated by the Ada FE. */
8409 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0))
8410 && TREE_TYPE (TREE_TYPE (op0))))
8411 return fold_convert (type, op0);
8413 /* Strip inner integral conversions that do not change the precision. */
8414 if (CONVERT_EXPR_P (op0)
8415 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8416 || POINTER_TYPE_P (TREE_TYPE (op0)))
8417 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8418 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8419 && (TYPE_PRECISION (TREE_TYPE (op0))
8420 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8421 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8423 return fold_view_convert_expr (type, op0);
8425 case NEGATE_EXPR:
8426 tem = fold_negate_expr (arg0);
8427 if (tem)
8428 return fold_convert (type, tem);
8429 return NULL_TREE;
8431 case ABS_EXPR:
8432 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8433 return fold_abs_const (arg0, type);
8434 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8435 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8436 /* Convert fabs((double)float) into (double)fabsf(float). */
8437 else if (TREE_CODE (arg0) == NOP_EXPR
8438 && TREE_CODE (type) == REAL_TYPE)
8440 tree targ0 = strip_float_extensions (arg0);
8441 if (targ0 != arg0)
8442 return fold_convert (type, fold_build1 (ABS_EXPR,
8443 TREE_TYPE (targ0),
8444 targ0));
8446 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8447 else if (TREE_CODE (arg0) == ABS_EXPR)
8448 return arg0;
8449 else if (tree_expr_nonnegative_p (arg0))
8450 return arg0;
8452 /* Strip sign ops from argument. */
8453 if (TREE_CODE (type) == REAL_TYPE)
8455 tem = fold_strip_sign_ops (arg0);
8456 if (tem)
8457 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8459 return NULL_TREE;
8461 case CONJ_EXPR:
8462 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8463 return fold_convert (type, arg0);
8464 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8466 tree itype = TREE_TYPE (type);
8467 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8468 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8469 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8471 if (TREE_CODE (arg0) == COMPLEX_CST)
8473 tree itype = TREE_TYPE (type);
8474 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8475 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8476 return build_complex (type, rpart, negate_expr (ipart));
8478 if (TREE_CODE (arg0) == CONJ_EXPR)
8479 return fold_convert (type, TREE_OPERAND (arg0, 0));
8480 return NULL_TREE;
8482 case BIT_NOT_EXPR:
8483 if (TREE_CODE (arg0) == INTEGER_CST)
8484 return fold_not_const (arg0, type);
8485 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8486 return fold_convert (type, TREE_OPERAND (arg0, 0));
8487 /* Convert ~ (-A) to A - 1. */
8488 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8489 return fold_build2 (MINUS_EXPR, type,
8490 fold_convert (type, TREE_OPERAND (arg0, 0)),
8491 build_int_cst (type, 1));
8492 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8493 else if (INTEGRAL_TYPE_P (type)
8494 && ((TREE_CODE (arg0) == MINUS_EXPR
8495 && integer_onep (TREE_OPERAND (arg0, 1)))
8496 || (TREE_CODE (arg0) == PLUS_EXPR
8497 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8498 return fold_build1 (NEGATE_EXPR, type,
8499 fold_convert (type, TREE_OPERAND (arg0, 0)));
8500 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8501 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8502 && (tem = fold_unary (BIT_NOT_EXPR, type,
8503 fold_convert (type,
8504 TREE_OPERAND (arg0, 0)))))
8505 return fold_build2 (BIT_XOR_EXPR, type, tem,
8506 fold_convert (type, TREE_OPERAND (arg0, 1)));
8507 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8508 && (tem = fold_unary (BIT_NOT_EXPR, type,
8509 fold_convert (type,
8510 TREE_OPERAND (arg0, 1)))))
8511 return fold_build2 (BIT_XOR_EXPR, type,
8512 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8513 /* Perform BIT_NOT_EXPR on each element individually. */
8514 else if (TREE_CODE (arg0) == VECTOR_CST)
8516 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8517 int count = TYPE_VECTOR_SUBPARTS (type), i;
8519 for (i = 0; i < count; i++)
8521 if (elements)
8523 elem = TREE_VALUE (elements);
8524 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8525 if (elem == NULL_TREE)
8526 break;
8527 elements = TREE_CHAIN (elements);
8529 else
8530 elem = build_int_cst (TREE_TYPE (type), -1);
8531 list = tree_cons (NULL_TREE, elem, list);
8533 if (i == count)
8534 return build_vector (type, nreverse (list));
8537 return NULL_TREE;
8539 case TRUTH_NOT_EXPR:
8540 /* The argument to invert_truthvalue must have Boolean type. */
8541 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8542 arg0 = fold_convert (boolean_type_node, arg0);
8544 /* Note that the operand of this must be an int
8545 and its values must be 0 or 1.
8546 ("true" is a fixed value perhaps depending on the language,
8547 but we don't handle values other than 1 correctly yet.) */
8548 tem = fold_truth_not_expr (arg0);
8549 if (!tem)
8550 return NULL_TREE;
8551 return fold_convert (type, tem);
8553 case REALPART_EXPR:
8554 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8555 return fold_convert (type, arg0);
8556 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8557 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8558 TREE_OPERAND (arg0, 1));
8559 if (TREE_CODE (arg0) == COMPLEX_CST)
8560 return fold_convert (type, TREE_REALPART (arg0));
8561 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8563 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8564 tem = fold_build2 (TREE_CODE (arg0), itype,
8565 fold_build1 (REALPART_EXPR, itype,
8566 TREE_OPERAND (arg0, 0)),
8567 fold_build1 (REALPART_EXPR, itype,
8568 TREE_OPERAND (arg0, 1)));
8569 return fold_convert (type, tem);
8571 if (TREE_CODE (arg0) == CONJ_EXPR)
8573 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8574 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8575 return fold_convert (type, tem);
8577 if (TREE_CODE (arg0) == CALL_EXPR)
8579 tree fn = get_callee_fndecl (arg0);
8580 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8581 switch (DECL_FUNCTION_CODE (fn))
8583 CASE_FLT_FN (BUILT_IN_CEXPI):
8584 fn = mathfn_built_in (type, BUILT_IN_COS);
8585 if (fn)
8586 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8587 break;
8589 default:
8590 break;
8593 return NULL_TREE;
8595 case IMAGPART_EXPR:
8596 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8597 return fold_convert (type, integer_zero_node);
8598 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8599 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8600 TREE_OPERAND (arg0, 0));
8601 if (TREE_CODE (arg0) == COMPLEX_CST)
8602 return fold_convert (type, TREE_IMAGPART (arg0));
8603 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8605 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8606 tem = fold_build2 (TREE_CODE (arg0), itype,
8607 fold_build1 (IMAGPART_EXPR, itype,
8608 TREE_OPERAND (arg0, 0)),
8609 fold_build1 (IMAGPART_EXPR, itype,
8610 TREE_OPERAND (arg0, 1)));
8611 return fold_convert (type, tem);
8613 if (TREE_CODE (arg0) == CONJ_EXPR)
8615 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8616 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8617 return fold_convert (type, negate_expr (tem));
8619 if (TREE_CODE (arg0) == CALL_EXPR)
8621 tree fn = get_callee_fndecl (arg0);
8622 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8623 switch (DECL_FUNCTION_CODE (fn))
8625 CASE_FLT_FN (BUILT_IN_CEXPI):
8626 fn = mathfn_built_in (type, BUILT_IN_SIN);
8627 if (fn)
8628 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8629 break;
8631 default:
8632 break;
8635 return NULL_TREE;
8637 default:
8638 return NULL_TREE;
8639 } /* switch (code) */
8643 /* If the operation was a conversion do _not_ mark a resulting constant
8644 with TREE_OVERFLOW if the original constant was not. These conversions
8645 have implementation defined behavior and retaining the TREE_OVERFLOW
8646 flag here would confuse later passes such as VRP. */
8647 tree
8648 fold_unary_ignore_overflow (enum tree_code code, tree type, tree op0)
8650 tree res = fold_unary (code, type, op0);
8651 if (res
8652 && TREE_CODE (res) == INTEGER_CST
8653 && TREE_CODE (op0) == INTEGER_CST
8654 && CONVERT_EXPR_CODE_P (code))
8655 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8657 return res;
8660 /* Fold a binary expression of code CODE and type TYPE with operands
8661 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8662 Return the folded expression if folding is successful. Otherwise,
8663 return NULL_TREE. */
8665 static tree
8666 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8668 enum tree_code compl_code;
8670 if (code == MIN_EXPR)
8671 compl_code = MAX_EXPR;
8672 else if (code == MAX_EXPR)
8673 compl_code = MIN_EXPR;
8674 else
8675 gcc_unreachable ();
8677 /* MIN (MAX (a, b), b) == b. */
8678 if (TREE_CODE (op0) == compl_code
8679 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8680 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8682 /* MIN (MAX (b, a), b) == b. */
8683 if (TREE_CODE (op0) == compl_code
8684 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8685 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8686 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8688 /* MIN (a, MAX (a, b)) == a. */
8689 if (TREE_CODE (op1) == compl_code
8690 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8691 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8692 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8694 /* MIN (a, MAX (b, a)) == a. */
8695 if (TREE_CODE (op1) == compl_code
8696 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8697 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8698 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8700 return NULL_TREE;
8703 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8704 by changing CODE to reduce the magnitude of constants involved in
8705 ARG0 of the comparison.
8706 Returns a canonicalized comparison tree if a simplification was
8707 possible, otherwise returns NULL_TREE.
8708 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8709 valid if signed overflow is undefined. */
8711 static tree
8712 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8713 tree arg0, tree arg1,
8714 bool *strict_overflow_p)
8716 enum tree_code code0 = TREE_CODE (arg0);
8717 tree t, cst0 = NULL_TREE;
8718 int sgn0;
8719 bool swap = false;
8721 /* Match A +- CST code arg1 and CST code arg1. We can change the
8722 first form only if overflow is undefined. */
8723 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8724 /* In principle pointers also have undefined overflow behavior,
8725 but that causes problems elsewhere. */
8726 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8727 && (code0 == MINUS_EXPR
8728 || code0 == PLUS_EXPR)
8729 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8730 || code0 == INTEGER_CST))
8731 return NULL_TREE;
8733 /* Identify the constant in arg0 and its sign. */
8734 if (code0 == INTEGER_CST)
8735 cst0 = arg0;
8736 else
8737 cst0 = TREE_OPERAND (arg0, 1);
8738 sgn0 = tree_int_cst_sgn (cst0);
8740 /* Overflowed constants and zero will cause problems. */
8741 if (integer_zerop (cst0)
8742 || TREE_OVERFLOW (cst0))
8743 return NULL_TREE;
8745 /* See if we can reduce the magnitude of the constant in
8746 arg0 by changing the comparison code. */
8747 if (code0 == INTEGER_CST)
8749 /* CST <= arg1 -> CST-1 < arg1. */
8750 if (code == LE_EXPR && sgn0 == 1)
8751 code = LT_EXPR;
8752 /* -CST < arg1 -> -CST-1 <= arg1. */
8753 else if (code == LT_EXPR && sgn0 == -1)
8754 code = LE_EXPR;
8755 /* CST > arg1 -> CST-1 >= arg1. */
8756 else if (code == GT_EXPR && sgn0 == 1)
8757 code = GE_EXPR;
8758 /* -CST >= arg1 -> -CST-1 > arg1. */
8759 else if (code == GE_EXPR && sgn0 == -1)
8760 code = GT_EXPR;
8761 else
8762 return NULL_TREE;
8763 /* arg1 code' CST' might be more canonical. */
8764 swap = true;
8766 else
8768 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8769 if (code == LT_EXPR
8770 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8771 code = LE_EXPR;
8772 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8773 else if (code == GT_EXPR
8774 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8775 code = GE_EXPR;
8776 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8777 else if (code == LE_EXPR
8778 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8779 code = LT_EXPR;
8780 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8781 else if (code == GE_EXPR
8782 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8783 code = GT_EXPR;
8784 else
8785 return NULL_TREE;
8786 *strict_overflow_p = true;
8789 /* Now build the constant reduced in magnitude. But not if that
8790 would produce one outside of its types range. */
8791 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8792 && ((sgn0 == 1
8793 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8794 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8795 || (sgn0 == -1
8796 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8797 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8798 /* We cannot swap the comparison here as that would cause us to
8799 endlessly recurse. */
8800 return NULL_TREE;
8802 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8803 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8804 if (code0 != INTEGER_CST)
8805 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8807 /* If swapping might yield to a more canonical form, do so. */
8808 if (swap)
8809 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8810 else
8811 return fold_build2 (code, type, t, arg1);
8814 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8815 overflow further. Try to decrease the magnitude of constants involved
8816 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8817 and put sole constants at the second argument position.
8818 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8820 static tree
8821 maybe_canonicalize_comparison (enum tree_code code, tree type,
8822 tree arg0, tree arg1)
8824 tree t;
8825 bool strict_overflow_p;
8826 const char * const warnmsg = G_("assuming signed overflow does not occur "
8827 "when reducing constant in comparison");
8829 /* Try canonicalization by simplifying arg0. */
8830 strict_overflow_p = false;
8831 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8832 &strict_overflow_p);
8833 if (t)
8835 if (strict_overflow_p)
8836 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8837 return t;
8840 /* Try canonicalization by simplifying arg1 using the swapped
8841 comparison. */
8842 code = swap_tree_comparison (code);
8843 strict_overflow_p = false;
8844 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8845 &strict_overflow_p);
8846 if (t && strict_overflow_p)
8847 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8848 return t;
8851 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8852 space. This is used to avoid issuing overflow warnings for
8853 expressions like &p->x which can not wrap. */
8855 static bool
8856 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8858 unsigned HOST_WIDE_INT offset_low, total_low;
8859 HOST_WIDE_INT size, offset_high, total_high;
8861 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8862 return true;
8864 if (bitpos < 0)
8865 return true;
8867 if (offset == NULL_TREE)
8869 offset_low = 0;
8870 offset_high = 0;
8872 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8873 return true;
8874 else
8876 offset_low = TREE_INT_CST_LOW (offset);
8877 offset_high = TREE_INT_CST_HIGH (offset);
8880 if (add_double_with_sign (offset_low, offset_high,
8881 bitpos / BITS_PER_UNIT, 0,
8882 &total_low, &total_high,
8883 true))
8884 return true;
8886 if (total_high != 0)
8887 return true;
8889 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8890 if (size <= 0)
8891 return true;
8893 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8894 array. */
8895 if (TREE_CODE (base) == ADDR_EXPR)
8897 HOST_WIDE_INT base_size;
8899 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8900 if (base_size > 0 && size < base_size)
8901 size = base_size;
8904 return total_low > (unsigned HOST_WIDE_INT) size;
8907 /* Subroutine of fold_binary. This routine performs all of the
8908 transformations that are common to the equality/inequality
8909 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8910 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8911 fold_binary should call fold_binary. Fold a comparison with
8912 tree code CODE and type TYPE with operands OP0 and OP1. Return
8913 the folded comparison or NULL_TREE. */
8915 static tree
8916 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8918 tree arg0, arg1, tem;
8920 arg0 = op0;
8921 arg1 = op1;
8923 STRIP_SIGN_NOPS (arg0);
8924 STRIP_SIGN_NOPS (arg1);
8926 tem = fold_relational_const (code, type, arg0, arg1);
8927 if (tem != NULL_TREE)
8928 return tem;
8930 /* If one arg is a real or integer constant, put it last. */
8931 if (tree_swap_operands_p (arg0, arg1, true))
8932 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8934 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8935 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8936 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8937 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8938 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8939 && (TREE_CODE (arg1) == INTEGER_CST
8940 && !TREE_OVERFLOW (arg1)))
8942 tree const1 = TREE_OPERAND (arg0, 1);
8943 tree const2 = arg1;
8944 tree variable = TREE_OPERAND (arg0, 0);
8945 tree lhs;
8946 int lhs_add;
8947 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8949 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8950 TREE_TYPE (arg1), const2, const1);
8952 /* If the constant operation overflowed this can be
8953 simplified as a comparison against INT_MAX/INT_MIN. */
8954 if (TREE_CODE (lhs) == INTEGER_CST
8955 && TREE_OVERFLOW (lhs))
8957 int const1_sgn = tree_int_cst_sgn (const1);
8958 enum tree_code code2 = code;
8960 /* Get the sign of the constant on the lhs if the
8961 operation were VARIABLE + CONST1. */
8962 if (TREE_CODE (arg0) == MINUS_EXPR)
8963 const1_sgn = -const1_sgn;
8965 /* The sign of the constant determines if we overflowed
8966 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8967 Canonicalize to the INT_MIN overflow by swapping the comparison
8968 if necessary. */
8969 if (const1_sgn == -1)
8970 code2 = swap_tree_comparison (code);
8972 /* We now can look at the canonicalized case
8973 VARIABLE + 1 CODE2 INT_MIN
8974 and decide on the result. */
8975 if (code2 == LT_EXPR
8976 || code2 == LE_EXPR
8977 || code2 == EQ_EXPR)
8978 return omit_one_operand (type, boolean_false_node, variable);
8979 else if (code2 == NE_EXPR
8980 || code2 == GE_EXPR
8981 || code2 == GT_EXPR)
8982 return omit_one_operand (type, boolean_true_node, variable);
8985 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8986 && (TREE_CODE (lhs) != INTEGER_CST
8987 || !TREE_OVERFLOW (lhs)))
8989 fold_overflow_warning (("assuming signed overflow does not occur "
8990 "when changing X +- C1 cmp C2 to "
8991 "X cmp C1 +- C2"),
8992 WARN_STRICT_OVERFLOW_COMPARISON);
8993 return fold_build2 (code, type, variable, lhs);
8997 /* For comparisons of pointers we can decompose it to a compile time
8998 comparison of the base objects and the offsets into the object.
8999 This requires at least one operand being an ADDR_EXPR or a
9000 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9001 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9002 && (TREE_CODE (arg0) == ADDR_EXPR
9003 || TREE_CODE (arg1) == ADDR_EXPR
9004 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9005 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9007 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9008 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9009 enum machine_mode mode;
9010 int volatilep, unsignedp;
9011 bool indirect_base0 = false, indirect_base1 = false;
9013 /* Get base and offset for the access. Strip ADDR_EXPR for
9014 get_inner_reference, but put it back by stripping INDIRECT_REF
9015 off the base object if possible. indirect_baseN will be true
9016 if baseN is not an address but refers to the object itself. */
9017 base0 = arg0;
9018 if (TREE_CODE (arg0) == ADDR_EXPR)
9020 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9021 &bitsize, &bitpos0, &offset0, &mode,
9022 &unsignedp, &volatilep, false);
9023 if (TREE_CODE (base0) == INDIRECT_REF)
9024 base0 = TREE_OPERAND (base0, 0);
9025 else
9026 indirect_base0 = true;
9028 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9030 base0 = TREE_OPERAND (arg0, 0);
9031 offset0 = TREE_OPERAND (arg0, 1);
9034 base1 = arg1;
9035 if (TREE_CODE (arg1) == ADDR_EXPR)
9037 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9038 &bitsize, &bitpos1, &offset1, &mode,
9039 &unsignedp, &volatilep, false);
9040 if (TREE_CODE (base1) == INDIRECT_REF)
9041 base1 = TREE_OPERAND (base1, 0);
9042 else
9043 indirect_base1 = true;
9045 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9047 base1 = TREE_OPERAND (arg1, 0);
9048 offset1 = TREE_OPERAND (arg1, 1);
9051 /* If we have equivalent bases we might be able to simplify. */
9052 if (indirect_base0 == indirect_base1
9053 && operand_equal_p (base0, base1, 0))
9055 /* We can fold this expression to a constant if the non-constant
9056 offset parts are equal. */
9057 if ((offset0 == offset1
9058 || (offset0 && offset1
9059 && operand_equal_p (offset0, offset1, 0)))
9060 && (code == EQ_EXPR
9061 || code == NE_EXPR
9062 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9065 if (code != EQ_EXPR
9066 && code != NE_EXPR
9067 && bitpos0 != bitpos1
9068 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9069 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9070 fold_overflow_warning (("assuming pointer wraparound does not "
9071 "occur when comparing P +- C1 with "
9072 "P +- C2"),
9073 WARN_STRICT_OVERFLOW_CONDITIONAL);
9075 switch (code)
9077 case EQ_EXPR:
9078 return constant_boolean_node (bitpos0 == bitpos1, type);
9079 case NE_EXPR:
9080 return constant_boolean_node (bitpos0 != bitpos1, type);
9081 case LT_EXPR:
9082 return constant_boolean_node (bitpos0 < bitpos1, type);
9083 case LE_EXPR:
9084 return constant_boolean_node (bitpos0 <= bitpos1, type);
9085 case GE_EXPR:
9086 return constant_boolean_node (bitpos0 >= bitpos1, type);
9087 case GT_EXPR:
9088 return constant_boolean_node (bitpos0 > bitpos1, type);
9089 default:;
9092 /* We can simplify the comparison to a comparison of the variable
9093 offset parts if the constant offset parts are equal.
9094 Be careful to use signed size type here because otherwise we
9095 mess with array offsets in the wrong way. This is possible
9096 because pointer arithmetic is restricted to retain within an
9097 object and overflow on pointer differences is undefined as of
9098 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9099 else if (bitpos0 == bitpos1
9100 && ((code == EQ_EXPR || code == NE_EXPR)
9101 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9103 tree signed_size_type_node;
9104 signed_size_type_node = signed_type_for (size_type_node);
9106 /* By converting to signed size type we cover middle-end pointer
9107 arithmetic which operates on unsigned pointer types of size
9108 type size and ARRAY_REF offsets which are properly sign or
9109 zero extended from their type in case it is narrower than
9110 size type. */
9111 if (offset0 == NULL_TREE)
9112 offset0 = build_int_cst (signed_size_type_node, 0);
9113 else
9114 offset0 = fold_convert (signed_size_type_node, offset0);
9115 if (offset1 == NULL_TREE)
9116 offset1 = build_int_cst (signed_size_type_node, 0);
9117 else
9118 offset1 = fold_convert (signed_size_type_node, offset1);
9120 if (code != EQ_EXPR
9121 && code != NE_EXPR
9122 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9123 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9124 fold_overflow_warning (("assuming pointer wraparound does not "
9125 "occur when comparing P +- C1 with "
9126 "P +- C2"),
9127 WARN_STRICT_OVERFLOW_COMPARISON);
9129 return fold_build2 (code, type, offset0, offset1);
9132 /* For non-equal bases we can simplify if they are addresses
9133 of local binding decls or constants. */
9134 else if (indirect_base0 && indirect_base1
9135 /* We know that !operand_equal_p (base0, base1, 0)
9136 because the if condition was false. But make
9137 sure two decls are not the same. */
9138 && base0 != base1
9139 && TREE_CODE (arg0) == ADDR_EXPR
9140 && TREE_CODE (arg1) == ADDR_EXPR
9141 && (((TREE_CODE (base0) == VAR_DECL
9142 || TREE_CODE (base0) == PARM_DECL)
9143 && (targetm.binds_local_p (base0)
9144 || CONSTANT_CLASS_P (base1)))
9145 || CONSTANT_CLASS_P (base0))
9146 && (((TREE_CODE (base1) == VAR_DECL
9147 || TREE_CODE (base1) == PARM_DECL)
9148 && (targetm.binds_local_p (base1)
9149 || CONSTANT_CLASS_P (base0)))
9150 || CONSTANT_CLASS_P (base1)))
9152 if (code == EQ_EXPR)
9153 return omit_two_operands (type, boolean_false_node, arg0, arg1);
9154 else if (code == NE_EXPR)
9155 return omit_two_operands (type, boolean_true_node, arg0, arg1);
9157 /* For equal offsets we can simplify to a comparison of the
9158 base addresses. */
9159 else if (bitpos0 == bitpos1
9160 && (indirect_base0
9161 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9162 && (indirect_base1
9163 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9164 && ((offset0 == offset1)
9165 || (offset0 && offset1
9166 && operand_equal_p (offset0, offset1, 0))))
9168 if (indirect_base0)
9169 base0 = build_fold_addr_expr (base0);
9170 if (indirect_base1)
9171 base1 = build_fold_addr_expr (base1);
9172 return fold_build2 (code, type, base0, base1);
9176 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9177 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9178 the resulting offset is smaller in absolute value than the
9179 original one. */
9180 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9181 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9182 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9183 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9184 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9185 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9186 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9188 tree const1 = TREE_OPERAND (arg0, 1);
9189 tree const2 = TREE_OPERAND (arg1, 1);
9190 tree variable1 = TREE_OPERAND (arg0, 0);
9191 tree variable2 = TREE_OPERAND (arg1, 0);
9192 tree cst;
9193 const char * const warnmsg = G_("assuming signed overflow does not "
9194 "occur when combining constants around "
9195 "a comparison");
9197 /* Put the constant on the side where it doesn't overflow and is
9198 of lower absolute value than before. */
9199 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9200 ? MINUS_EXPR : PLUS_EXPR,
9201 const2, const1, 0);
9202 if (!TREE_OVERFLOW (cst)
9203 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9205 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9206 return fold_build2 (code, type,
9207 variable1,
9208 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
9209 variable2, cst));
9212 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9213 ? MINUS_EXPR : PLUS_EXPR,
9214 const1, const2, 0);
9215 if (!TREE_OVERFLOW (cst)
9216 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9218 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9219 return fold_build2 (code, type,
9220 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
9221 variable1, cst),
9222 variable2);
9226 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9227 signed arithmetic case. That form is created by the compiler
9228 often enough for folding it to be of value. One example is in
9229 computing loop trip counts after Operator Strength Reduction. */
9230 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9231 && TREE_CODE (arg0) == MULT_EXPR
9232 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9233 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9234 && integer_zerop (arg1))
9236 tree const1 = TREE_OPERAND (arg0, 1);
9237 tree const2 = arg1; /* zero */
9238 tree variable1 = TREE_OPERAND (arg0, 0);
9239 enum tree_code cmp_code = code;
9241 gcc_assert (!integer_zerop (const1));
9243 fold_overflow_warning (("assuming signed overflow does not occur when "
9244 "eliminating multiplication in comparison "
9245 "with zero"),
9246 WARN_STRICT_OVERFLOW_COMPARISON);
9248 /* If const1 is negative we swap the sense of the comparison. */
9249 if (tree_int_cst_sgn (const1) < 0)
9250 cmp_code = swap_tree_comparison (cmp_code);
9252 return fold_build2 (cmp_code, type, variable1, const2);
9255 tem = maybe_canonicalize_comparison (code, type, op0, op1);
9256 if (tem)
9257 return tem;
9259 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9261 tree targ0 = strip_float_extensions (arg0);
9262 tree targ1 = strip_float_extensions (arg1);
9263 tree newtype = TREE_TYPE (targ0);
9265 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9266 newtype = TREE_TYPE (targ1);
9268 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9269 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9270 return fold_build2 (code, type, fold_convert (newtype, targ0),
9271 fold_convert (newtype, targ1));
9273 /* (-a) CMP (-b) -> b CMP a */
9274 if (TREE_CODE (arg0) == NEGATE_EXPR
9275 && TREE_CODE (arg1) == NEGATE_EXPR)
9276 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9277 TREE_OPERAND (arg0, 0));
9279 if (TREE_CODE (arg1) == REAL_CST)
9281 REAL_VALUE_TYPE cst;
9282 cst = TREE_REAL_CST (arg1);
9284 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9285 if (TREE_CODE (arg0) == NEGATE_EXPR)
9286 return fold_build2 (swap_tree_comparison (code), type,
9287 TREE_OPERAND (arg0, 0),
9288 build_real (TREE_TYPE (arg1),
9289 REAL_VALUE_NEGATE (cst)));
9291 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9292 /* a CMP (-0) -> a CMP 0 */
9293 if (REAL_VALUE_MINUS_ZERO (cst))
9294 return fold_build2 (code, type, arg0,
9295 build_real (TREE_TYPE (arg1), dconst0));
9297 /* x != NaN is always true, other ops are always false. */
9298 if (REAL_VALUE_ISNAN (cst)
9299 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9301 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9302 return omit_one_operand (type, tem, arg0);
9305 /* Fold comparisons against infinity. */
9306 if (REAL_VALUE_ISINF (cst)
9307 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9309 tem = fold_inf_compare (code, type, arg0, arg1);
9310 if (tem != NULL_TREE)
9311 return tem;
9315 /* If this is a comparison of a real constant with a PLUS_EXPR
9316 or a MINUS_EXPR of a real constant, we can convert it into a
9317 comparison with a revised real constant as long as no overflow
9318 occurs when unsafe_math_optimizations are enabled. */
9319 if (flag_unsafe_math_optimizations
9320 && TREE_CODE (arg1) == REAL_CST
9321 && (TREE_CODE (arg0) == PLUS_EXPR
9322 || TREE_CODE (arg0) == MINUS_EXPR)
9323 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9324 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9325 ? MINUS_EXPR : PLUS_EXPR,
9326 arg1, TREE_OPERAND (arg0, 1), 0))
9327 && !TREE_OVERFLOW (tem))
9328 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9330 /* Likewise, we can simplify a comparison of a real constant with
9331 a MINUS_EXPR whose first operand is also a real constant, i.e.
9332 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9333 floating-point types only if -fassociative-math is set. */
9334 if (flag_associative_math
9335 && TREE_CODE (arg1) == REAL_CST
9336 && TREE_CODE (arg0) == MINUS_EXPR
9337 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9338 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9339 arg1, 0))
9340 && !TREE_OVERFLOW (tem))
9341 return fold_build2 (swap_tree_comparison (code), type,
9342 TREE_OPERAND (arg0, 1), tem);
9344 /* Fold comparisons against built-in math functions. */
9345 if (TREE_CODE (arg1) == REAL_CST
9346 && flag_unsafe_math_optimizations
9347 && ! flag_errno_math)
9349 enum built_in_function fcode = builtin_mathfn_code (arg0);
9351 if (fcode != END_BUILTINS)
9353 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9354 if (tem != NULL_TREE)
9355 return tem;
9360 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9361 && CONVERT_EXPR_P (arg0))
9363 /* If we are widening one operand of an integer comparison,
9364 see if the other operand is similarly being widened. Perhaps we
9365 can do the comparison in the narrower type. */
9366 tem = fold_widened_comparison (code, type, arg0, arg1);
9367 if (tem)
9368 return tem;
9370 /* Or if we are changing signedness. */
9371 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9372 if (tem)
9373 return tem;
9376 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9377 constant, we can simplify it. */
9378 if (TREE_CODE (arg1) == INTEGER_CST
9379 && (TREE_CODE (arg0) == MIN_EXPR
9380 || TREE_CODE (arg0) == MAX_EXPR)
9381 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9383 tem = optimize_minmax_comparison (code, type, op0, op1);
9384 if (tem)
9385 return tem;
9388 /* Simplify comparison of something with itself. (For IEEE
9389 floating-point, we can only do some of these simplifications.) */
9390 if (operand_equal_p (arg0, arg1, 0))
9392 switch (code)
9394 case EQ_EXPR:
9395 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9396 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9397 return constant_boolean_node (1, type);
9398 break;
9400 case GE_EXPR:
9401 case LE_EXPR:
9402 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9403 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9404 return constant_boolean_node (1, type);
9405 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9407 case NE_EXPR:
9408 /* For NE, we can only do this simplification if integer
9409 or we don't honor IEEE floating point NaNs. */
9410 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9411 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9412 break;
9413 /* ... fall through ... */
9414 case GT_EXPR:
9415 case LT_EXPR:
9416 return constant_boolean_node (0, type);
9417 default:
9418 gcc_unreachable ();
9422 /* If we are comparing an expression that just has comparisons
9423 of two integer values, arithmetic expressions of those comparisons,
9424 and constants, we can simplify it. There are only three cases
9425 to check: the two values can either be equal, the first can be
9426 greater, or the second can be greater. Fold the expression for
9427 those three values. Since each value must be 0 or 1, we have
9428 eight possibilities, each of which corresponds to the constant 0
9429 or 1 or one of the six possible comparisons.
9431 This handles common cases like (a > b) == 0 but also handles
9432 expressions like ((x > y) - (y > x)) > 0, which supposedly
9433 occur in macroized code. */
9435 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9437 tree cval1 = 0, cval2 = 0;
9438 int save_p = 0;
9440 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9441 /* Don't handle degenerate cases here; they should already
9442 have been handled anyway. */
9443 && cval1 != 0 && cval2 != 0
9444 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9445 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9446 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9447 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9448 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9449 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9450 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9452 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9453 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9455 /* We can't just pass T to eval_subst in case cval1 or cval2
9456 was the same as ARG1. */
9458 tree high_result
9459 = fold_build2 (code, type,
9460 eval_subst (arg0, cval1, maxval,
9461 cval2, minval),
9462 arg1);
9463 tree equal_result
9464 = fold_build2 (code, type,
9465 eval_subst (arg0, cval1, maxval,
9466 cval2, maxval),
9467 arg1);
9468 tree low_result
9469 = fold_build2 (code, type,
9470 eval_subst (arg0, cval1, minval,
9471 cval2, maxval),
9472 arg1);
9474 /* All three of these results should be 0 or 1. Confirm they are.
9475 Then use those values to select the proper code to use. */
9477 if (TREE_CODE (high_result) == INTEGER_CST
9478 && TREE_CODE (equal_result) == INTEGER_CST
9479 && TREE_CODE (low_result) == INTEGER_CST)
9481 /* Make a 3-bit mask with the high-order bit being the
9482 value for `>', the next for '=', and the low for '<'. */
9483 switch ((integer_onep (high_result) * 4)
9484 + (integer_onep (equal_result) * 2)
9485 + integer_onep (low_result))
9487 case 0:
9488 /* Always false. */
9489 return omit_one_operand (type, integer_zero_node, arg0);
9490 case 1:
9491 code = LT_EXPR;
9492 break;
9493 case 2:
9494 code = EQ_EXPR;
9495 break;
9496 case 3:
9497 code = LE_EXPR;
9498 break;
9499 case 4:
9500 code = GT_EXPR;
9501 break;
9502 case 5:
9503 code = NE_EXPR;
9504 break;
9505 case 6:
9506 code = GE_EXPR;
9507 break;
9508 case 7:
9509 /* Always true. */
9510 return omit_one_operand (type, integer_one_node, arg0);
9513 if (save_p)
9514 return save_expr (build2 (code, type, cval1, cval2));
9515 return fold_build2 (code, type, cval1, cval2);
9520 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9521 into a single range test. */
9522 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9523 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9524 && TREE_CODE (arg1) == INTEGER_CST
9525 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9526 && !integer_zerop (TREE_OPERAND (arg0, 1))
9527 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9528 && !TREE_OVERFLOW (arg1))
9530 tem = fold_div_compare (code, type, arg0, arg1);
9531 if (tem != NULL_TREE)
9532 return tem;
9535 /* Fold ~X op ~Y as Y op X. */
9536 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9537 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9539 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9540 return fold_build2 (code, type,
9541 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9542 TREE_OPERAND (arg0, 0));
9545 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9546 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9547 && TREE_CODE (arg1) == INTEGER_CST)
9549 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9550 return fold_build2 (swap_tree_comparison (code), type,
9551 TREE_OPERAND (arg0, 0),
9552 fold_build1 (BIT_NOT_EXPR, cmp_type,
9553 fold_convert (cmp_type, arg1)));
9556 return NULL_TREE;
9560 /* Subroutine of fold_binary. Optimize complex multiplications of the
9561 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9562 argument EXPR represents the expression "z" of type TYPE. */
9564 static tree
9565 fold_mult_zconjz (tree type, tree expr)
9567 tree itype = TREE_TYPE (type);
9568 tree rpart, ipart, tem;
9570 if (TREE_CODE (expr) == COMPLEX_EXPR)
9572 rpart = TREE_OPERAND (expr, 0);
9573 ipart = TREE_OPERAND (expr, 1);
9575 else if (TREE_CODE (expr) == COMPLEX_CST)
9577 rpart = TREE_REALPART (expr);
9578 ipart = TREE_IMAGPART (expr);
9580 else
9582 expr = save_expr (expr);
9583 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9584 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9587 rpart = save_expr (rpart);
9588 ipart = save_expr (ipart);
9589 tem = fold_build2 (PLUS_EXPR, itype,
9590 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9591 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9592 return fold_build2 (COMPLEX_EXPR, type, tem,
9593 fold_convert (itype, integer_zero_node));
9597 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9598 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9599 guarantees that P and N have the same least significant log2(M) bits.
9600 N is not otherwise constrained. In particular, N is not normalized to
9601 0 <= N < M as is common. In general, the precise value of P is unknown.
9602 M is chosen as large as possible such that constant N can be determined.
9604 Returns M and sets *RESIDUE to N.
9606 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9607 account. This is not always possible due to PR 35705.
9610 static unsigned HOST_WIDE_INT
9611 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9612 bool allow_func_align)
9614 enum tree_code code;
9616 *residue = 0;
9618 code = TREE_CODE (expr);
9619 if (code == ADDR_EXPR)
9621 expr = TREE_OPERAND (expr, 0);
9622 if (handled_component_p (expr))
9624 HOST_WIDE_INT bitsize, bitpos;
9625 tree offset;
9626 enum machine_mode mode;
9627 int unsignedp, volatilep;
9629 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9630 &mode, &unsignedp, &volatilep, false);
9631 *residue = bitpos / BITS_PER_UNIT;
9632 if (offset)
9634 if (TREE_CODE (offset) == INTEGER_CST)
9635 *residue += TREE_INT_CST_LOW (offset);
9636 else
9637 /* We don't handle more complicated offset expressions. */
9638 return 1;
9642 if (DECL_P (expr)
9643 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9644 return DECL_ALIGN_UNIT (expr);
9646 else if (code == POINTER_PLUS_EXPR)
9648 tree op0, op1;
9649 unsigned HOST_WIDE_INT modulus;
9650 enum tree_code inner_code;
9652 op0 = TREE_OPERAND (expr, 0);
9653 STRIP_NOPS (op0);
9654 modulus = get_pointer_modulus_and_residue (op0, residue,
9655 allow_func_align);
9657 op1 = TREE_OPERAND (expr, 1);
9658 STRIP_NOPS (op1);
9659 inner_code = TREE_CODE (op1);
9660 if (inner_code == INTEGER_CST)
9662 *residue += TREE_INT_CST_LOW (op1);
9663 return modulus;
9665 else if (inner_code == MULT_EXPR)
9667 op1 = TREE_OPERAND (op1, 1);
9668 if (TREE_CODE (op1) == INTEGER_CST)
9670 unsigned HOST_WIDE_INT align;
9672 /* Compute the greatest power-of-2 divisor of op1. */
9673 align = TREE_INT_CST_LOW (op1);
9674 align &= -align;
9676 /* If align is non-zero and less than *modulus, replace
9677 *modulus with align., If align is 0, then either op1 is 0
9678 or the greatest power-of-2 divisor of op1 doesn't fit in an
9679 unsigned HOST_WIDE_INT. In either case, no additional
9680 constraint is imposed. */
9681 if (align)
9682 modulus = MIN (modulus, align);
9684 return modulus;
9689 /* If we get here, we were unable to determine anything useful about the
9690 expression. */
9691 return 1;
9695 /* Fold a binary expression of code CODE and type TYPE with operands
9696 OP0 and OP1. Return the folded expression if folding is
9697 successful. Otherwise, return NULL_TREE. */
9699 tree
9700 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9702 enum tree_code_class kind = TREE_CODE_CLASS (code);
9703 tree arg0, arg1, tem;
9704 tree t1 = NULL_TREE;
9705 bool strict_overflow_p;
9707 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9708 && TREE_CODE_LENGTH (code) == 2
9709 && op0 != NULL_TREE
9710 && op1 != NULL_TREE);
9712 arg0 = op0;
9713 arg1 = op1;
9715 /* Strip any conversions that don't change the mode. This is
9716 safe for every expression, except for a comparison expression
9717 because its signedness is derived from its operands. So, in
9718 the latter case, only strip conversions that don't change the
9719 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9720 preserved.
9722 Note that this is done as an internal manipulation within the
9723 constant folder, in order to find the simplest representation
9724 of the arguments so that their form can be studied. In any
9725 cases, the appropriate type conversions should be put back in
9726 the tree that will get out of the constant folder. */
9728 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9730 STRIP_SIGN_NOPS (arg0);
9731 STRIP_SIGN_NOPS (arg1);
9733 else
9735 STRIP_NOPS (arg0);
9736 STRIP_NOPS (arg1);
9739 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9740 constant but we can't do arithmetic on them. */
9741 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9742 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9743 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9744 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9745 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9746 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9748 if (kind == tcc_binary)
9750 /* Make sure type and arg0 have the same saturating flag. */
9751 gcc_assert (TYPE_SATURATING (type)
9752 == TYPE_SATURATING (TREE_TYPE (arg0)));
9753 tem = const_binop (code, arg0, arg1, 0);
9755 else if (kind == tcc_comparison)
9756 tem = fold_relational_const (code, type, arg0, arg1);
9757 else
9758 tem = NULL_TREE;
9760 if (tem != NULL_TREE)
9762 if (TREE_TYPE (tem) != type)
9763 tem = fold_convert (type, tem);
9764 return tem;
9768 /* If this is a commutative operation, and ARG0 is a constant, move it
9769 to ARG1 to reduce the number of tests below. */
9770 if (commutative_tree_code (code)
9771 && tree_swap_operands_p (arg0, arg1, true))
9772 return fold_build2 (code, type, op1, op0);
9774 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9776 First check for cases where an arithmetic operation is applied to a
9777 compound, conditional, or comparison operation. Push the arithmetic
9778 operation inside the compound or conditional to see if any folding
9779 can then be done. Convert comparison to conditional for this purpose.
9780 The also optimizes non-constant cases that used to be done in
9781 expand_expr.
9783 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9784 one of the operands is a comparison and the other is a comparison, a
9785 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9786 code below would make the expression more complex. Change it to a
9787 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9788 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9790 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9791 || code == EQ_EXPR || code == NE_EXPR)
9792 && ((truth_value_p (TREE_CODE (arg0))
9793 && (truth_value_p (TREE_CODE (arg1))
9794 || (TREE_CODE (arg1) == BIT_AND_EXPR
9795 && integer_onep (TREE_OPERAND (arg1, 1)))))
9796 || (truth_value_p (TREE_CODE (arg1))
9797 && (truth_value_p (TREE_CODE (arg0))
9798 || (TREE_CODE (arg0) == BIT_AND_EXPR
9799 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9801 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9802 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9803 : TRUTH_XOR_EXPR,
9804 boolean_type_node,
9805 fold_convert (boolean_type_node, arg0),
9806 fold_convert (boolean_type_node, arg1));
9808 if (code == EQ_EXPR)
9809 tem = invert_truthvalue (tem);
9811 return fold_convert (type, tem);
9814 if (TREE_CODE_CLASS (code) == tcc_binary
9815 || TREE_CODE_CLASS (code) == tcc_comparison)
9817 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9818 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9819 fold_build2 (code, type,
9820 fold_convert (TREE_TYPE (op0),
9821 TREE_OPERAND (arg0, 1)),
9822 op1));
9823 if (TREE_CODE (arg1) == COMPOUND_EXPR
9824 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9825 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9826 fold_build2 (code, type, op0,
9827 fold_convert (TREE_TYPE (op1),
9828 TREE_OPERAND (arg1, 1))));
9830 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9832 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9833 arg0, arg1,
9834 /*cond_first_p=*/1);
9835 if (tem != NULL_TREE)
9836 return tem;
9839 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9841 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9842 arg1, arg0,
9843 /*cond_first_p=*/0);
9844 if (tem != NULL_TREE)
9845 return tem;
9849 switch (code)
9851 case POINTER_PLUS_EXPR:
9852 /* 0 +p index -> (type)index */
9853 if (integer_zerop (arg0))
9854 return non_lvalue (fold_convert (type, arg1));
9856 /* PTR +p 0 -> PTR */
9857 if (integer_zerop (arg1))
9858 return non_lvalue (fold_convert (type, arg0));
9860 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9861 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9862 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9863 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9864 fold_convert (sizetype, arg1),
9865 fold_convert (sizetype, arg0)));
9867 /* index +p PTR -> PTR +p index */
9868 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9869 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9870 return fold_build2 (POINTER_PLUS_EXPR, type,
9871 fold_convert (type, arg1),
9872 fold_convert (sizetype, arg0));
9874 /* (PTR +p B) +p A -> PTR +p (B + A) */
9875 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9877 tree inner;
9878 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9879 tree arg00 = TREE_OPERAND (arg0, 0);
9880 inner = fold_build2 (PLUS_EXPR, sizetype,
9881 arg01, fold_convert (sizetype, arg1));
9882 return fold_convert (type,
9883 fold_build2 (POINTER_PLUS_EXPR,
9884 TREE_TYPE (arg00), arg00, inner));
9887 /* PTR_CST +p CST -> CST1 */
9888 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9889 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9891 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9892 of the array. Loop optimizer sometimes produce this type of
9893 expressions. */
9894 if (TREE_CODE (arg0) == ADDR_EXPR)
9896 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9897 if (tem)
9898 return fold_convert (type, tem);
9901 return NULL_TREE;
9903 case PLUS_EXPR:
9904 /* A + (-B) -> A - B */
9905 if (TREE_CODE (arg1) == NEGATE_EXPR)
9906 return fold_build2 (MINUS_EXPR, type,
9907 fold_convert (type, arg0),
9908 fold_convert (type, TREE_OPERAND (arg1, 0)));
9909 /* (-A) + B -> B - A */
9910 if (TREE_CODE (arg0) == NEGATE_EXPR
9911 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9912 return fold_build2 (MINUS_EXPR, type,
9913 fold_convert (type, arg1),
9914 fold_convert (type, TREE_OPERAND (arg0, 0)));
9916 if (INTEGRAL_TYPE_P (type))
9918 /* Convert ~A + 1 to -A. */
9919 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9920 && integer_onep (arg1))
9921 return fold_build1 (NEGATE_EXPR, type,
9922 fold_convert (type, TREE_OPERAND (arg0, 0)));
9924 /* ~X + X is -1. */
9925 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9926 && !TYPE_OVERFLOW_TRAPS (type))
9928 tree tem = TREE_OPERAND (arg0, 0);
9930 STRIP_NOPS (tem);
9931 if (operand_equal_p (tem, arg1, 0))
9933 t1 = build_int_cst_type (type, -1);
9934 return omit_one_operand (type, t1, arg1);
9938 /* X + ~X is -1. */
9939 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9940 && !TYPE_OVERFLOW_TRAPS (type))
9942 tree tem = TREE_OPERAND (arg1, 0);
9944 STRIP_NOPS (tem);
9945 if (operand_equal_p (arg0, tem, 0))
9947 t1 = build_int_cst_type (type, -1);
9948 return omit_one_operand (type, t1, arg0);
9952 /* X + (X / CST) * -CST is X % CST. */
9953 if (TREE_CODE (arg1) == MULT_EXPR
9954 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9955 && operand_equal_p (arg0,
9956 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9958 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9959 tree cst1 = TREE_OPERAND (arg1, 1);
9960 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9961 if (sum && integer_zerop (sum))
9962 return fold_convert (type,
9963 fold_build2 (TRUNC_MOD_EXPR,
9964 TREE_TYPE (arg0), arg0, cst0));
9968 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9969 same or one. Make sure type is not saturating.
9970 fold_plusminus_mult_expr will re-associate. */
9971 if ((TREE_CODE (arg0) == MULT_EXPR
9972 || TREE_CODE (arg1) == MULT_EXPR)
9973 && !TYPE_SATURATING (type)
9974 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9976 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9977 if (tem)
9978 return tem;
9981 if (! FLOAT_TYPE_P (type))
9983 if (integer_zerop (arg1))
9984 return non_lvalue (fold_convert (type, arg0));
9986 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9987 with a constant, and the two constants have no bits in common,
9988 we should treat this as a BIT_IOR_EXPR since this may produce more
9989 simplifications. */
9990 if (TREE_CODE (arg0) == BIT_AND_EXPR
9991 && TREE_CODE (arg1) == BIT_AND_EXPR
9992 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9993 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9994 && integer_zerop (const_binop (BIT_AND_EXPR,
9995 TREE_OPERAND (arg0, 1),
9996 TREE_OPERAND (arg1, 1), 0)))
9998 code = BIT_IOR_EXPR;
9999 goto bit_ior;
10002 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10003 (plus (plus (mult) (mult)) (foo)) so that we can
10004 take advantage of the factoring cases below. */
10005 if (((TREE_CODE (arg0) == PLUS_EXPR
10006 || TREE_CODE (arg0) == MINUS_EXPR)
10007 && TREE_CODE (arg1) == MULT_EXPR)
10008 || ((TREE_CODE (arg1) == PLUS_EXPR
10009 || TREE_CODE (arg1) == MINUS_EXPR)
10010 && TREE_CODE (arg0) == MULT_EXPR))
10012 tree parg0, parg1, parg, marg;
10013 enum tree_code pcode;
10015 if (TREE_CODE (arg1) == MULT_EXPR)
10016 parg = arg0, marg = arg1;
10017 else
10018 parg = arg1, marg = arg0;
10019 pcode = TREE_CODE (parg);
10020 parg0 = TREE_OPERAND (parg, 0);
10021 parg1 = TREE_OPERAND (parg, 1);
10022 STRIP_NOPS (parg0);
10023 STRIP_NOPS (parg1);
10025 if (TREE_CODE (parg0) == MULT_EXPR
10026 && TREE_CODE (parg1) != MULT_EXPR)
10027 return fold_build2 (pcode, type,
10028 fold_build2 (PLUS_EXPR, type,
10029 fold_convert (type, parg0),
10030 fold_convert (type, marg)),
10031 fold_convert (type, parg1));
10032 if (TREE_CODE (parg0) != MULT_EXPR
10033 && TREE_CODE (parg1) == MULT_EXPR)
10034 return fold_build2 (PLUS_EXPR, type,
10035 fold_convert (type, parg0),
10036 fold_build2 (pcode, type,
10037 fold_convert (type, marg),
10038 fold_convert (type,
10039 parg1)));
10042 else
10044 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10045 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10046 return non_lvalue (fold_convert (type, arg0));
10048 /* Likewise if the operands are reversed. */
10049 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10050 return non_lvalue (fold_convert (type, arg1));
10052 /* Convert X + -C into X - C. */
10053 if (TREE_CODE (arg1) == REAL_CST
10054 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10056 tem = fold_negate_const (arg1, type);
10057 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10058 return fold_build2 (MINUS_EXPR, type,
10059 fold_convert (type, arg0),
10060 fold_convert (type, tem));
10063 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10064 to __complex__ ( x, y ). This is not the same for SNaNs or
10065 if signed zeros are involved. */
10066 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10067 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10068 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10070 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10071 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10072 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10073 bool arg0rz = false, arg0iz = false;
10074 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10075 || (arg0i && (arg0iz = real_zerop (arg0i))))
10077 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10078 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10079 if (arg0rz && arg1i && real_zerop (arg1i))
10081 tree rp = arg1r ? arg1r
10082 : build1 (REALPART_EXPR, rtype, arg1);
10083 tree ip = arg0i ? arg0i
10084 : build1 (IMAGPART_EXPR, rtype, arg0);
10085 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10087 else if (arg0iz && arg1r && real_zerop (arg1r))
10089 tree rp = arg0r ? arg0r
10090 : build1 (REALPART_EXPR, rtype, arg0);
10091 tree ip = arg1i ? arg1i
10092 : build1 (IMAGPART_EXPR, rtype, arg1);
10093 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10098 if (flag_unsafe_math_optimizations
10099 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10100 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10101 && (tem = distribute_real_division (code, type, arg0, arg1)))
10102 return tem;
10104 /* Convert x+x into x*2.0. */
10105 if (operand_equal_p (arg0, arg1, 0)
10106 && SCALAR_FLOAT_TYPE_P (type))
10107 return fold_build2 (MULT_EXPR, type, arg0,
10108 build_real (type, dconst2));
10110 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10111 We associate floats only if the user has specified
10112 -fassociative-math. */
10113 if (flag_associative_math
10114 && TREE_CODE (arg1) == PLUS_EXPR
10115 && TREE_CODE (arg0) != MULT_EXPR)
10117 tree tree10 = TREE_OPERAND (arg1, 0);
10118 tree tree11 = TREE_OPERAND (arg1, 1);
10119 if (TREE_CODE (tree11) == MULT_EXPR
10120 && TREE_CODE (tree10) == MULT_EXPR)
10122 tree tree0;
10123 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
10124 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
10127 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10128 We associate floats only if the user has specified
10129 -fassociative-math. */
10130 if (flag_associative_math
10131 && TREE_CODE (arg0) == PLUS_EXPR
10132 && TREE_CODE (arg1) != MULT_EXPR)
10134 tree tree00 = TREE_OPERAND (arg0, 0);
10135 tree tree01 = TREE_OPERAND (arg0, 1);
10136 if (TREE_CODE (tree01) == MULT_EXPR
10137 && TREE_CODE (tree00) == MULT_EXPR)
10139 tree tree0;
10140 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
10141 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
10146 bit_rotate:
10147 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10148 is a rotate of A by C1 bits. */
10149 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10150 is a rotate of A by B bits. */
10152 enum tree_code code0, code1;
10153 tree rtype;
10154 code0 = TREE_CODE (arg0);
10155 code1 = TREE_CODE (arg1);
10156 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10157 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10158 && operand_equal_p (TREE_OPERAND (arg0, 0),
10159 TREE_OPERAND (arg1, 0), 0)
10160 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10161 TYPE_UNSIGNED (rtype))
10162 /* Only create rotates in complete modes. Other cases are not
10163 expanded properly. */
10164 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10166 tree tree01, tree11;
10167 enum tree_code code01, code11;
10169 tree01 = TREE_OPERAND (arg0, 1);
10170 tree11 = TREE_OPERAND (arg1, 1);
10171 STRIP_NOPS (tree01);
10172 STRIP_NOPS (tree11);
10173 code01 = TREE_CODE (tree01);
10174 code11 = TREE_CODE (tree11);
10175 if (code01 == INTEGER_CST
10176 && code11 == INTEGER_CST
10177 && TREE_INT_CST_HIGH (tree01) == 0
10178 && TREE_INT_CST_HIGH (tree11) == 0
10179 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10180 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10181 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
10182 code0 == LSHIFT_EXPR ? tree01 : tree11);
10183 else if (code11 == MINUS_EXPR)
10185 tree tree110, tree111;
10186 tree110 = TREE_OPERAND (tree11, 0);
10187 tree111 = TREE_OPERAND (tree11, 1);
10188 STRIP_NOPS (tree110);
10189 STRIP_NOPS (tree111);
10190 if (TREE_CODE (tree110) == INTEGER_CST
10191 && 0 == compare_tree_int (tree110,
10192 TYPE_PRECISION
10193 (TREE_TYPE (TREE_OPERAND
10194 (arg0, 0))))
10195 && operand_equal_p (tree01, tree111, 0))
10196 return build2 ((code0 == LSHIFT_EXPR
10197 ? LROTATE_EXPR
10198 : RROTATE_EXPR),
10199 type, TREE_OPERAND (arg0, 0), tree01);
10201 else if (code01 == MINUS_EXPR)
10203 tree tree010, tree011;
10204 tree010 = TREE_OPERAND (tree01, 0);
10205 tree011 = TREE_OPERAND (tree01, 1);
10206 STRIP_NOPS (tree010);
10207 STRIP_NOPS (tree011);
10208 if (TREE_CODE (tree010) == INTEGER_CST
10209 && 0 == compare_tree_int (tree010,
10210 TYPE_PRECISION
10211 (TREE_TYPE (TREE_OPERAND
10212 (arg0, 0))))
10213 && operand_equal_p (tree11, tree011, 0))
10214 return build2 ((code0 != LSHIFT_EXPR
10215 ? LROTATE_EXPR
10216 : RROTATE_EXPR),
10217 type, TREE_OPERAND (arg0, 0), tree11);
10222 associate:
10223 /* In most languages, can't associate operations on floats through
10224 parentheses. Rather than remember where the parentheses were, we
10225 don't associate floats at all, unless the user has specified
10226 -fassociative-math.
10227 And, we need to make sure type is not saturating. */
10229 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10230 && !TYPE_SATURATING (type))
10232 tree var0, con0, lit0, minus_lit0;
10233 tree var1, con1, lit1, minus_lit1;
10234 bool ok = true;
10236 /* Split both trees into variables, constants, and literals. Then
10237 associate each group together, the constants with literals,
10238 then the result with variables. This increases the chances of
10239 literals being recombined later and of generating relocatable
10240 expressions for the sum of a constant and literal. */
10241 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10242 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10243 code == MINUS_EXPR);
10245 /* With undefined overflow we can only associate constants
10246 with one variable. */
10247 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10248 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10249 && var0 && var1)
10251 tree tmp0 = var0;
10252 tree tmp1 = var1;
10254 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10255 tmp0 = TREE_OPERAND (tmp0, 0);
10256 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10257 tmp1 = TREE_OPERAND (tmp1, 0);
10258 /* The only case we can still associate with two variables
10259 is if they are the same, modulo negation. */
10260 if (!operand_equal_p (tmp0, tmp1, 0))
10261 ok = false;
10264 /* Only do something if we found more than two objects. Otherwise,
10265 nothing has changed and we risk infinite recursion. */
10266 if (ok
10267 && (2 < ((var0 != 0) + (var1 != 0)
10268 + (con0 != 0) + (con1 != 0)
10269 + (lit0 != 0) + (lit1 != 0)
10270 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10272 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10273 if (code == MINUS_EXPR)
10274 code = PLUS_EXPR;
10276 var0 = associate_trees (var0, var1, code, type);
10277 con0 = associate_trees (con0, con1, code, type);
10278 lit0 = associate_trees (lit0, lit1, code, type);
10279 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
10281 /* Preserve the MINUS_EXPR if the negative part of the literal is
10282 greater than the positive part. Otherwise, the multiplicative
10283 folding code (i.e extract_muldiv) may be fooled in case
10284 unsigned constants are subtracted, like in the following
10285 example: ((X*2 + 4) - 8U)/2. */
10286 if (minus_lit0 && lit0)
10288 if (TREE_CODE (lit0) == INTEGER_CST
10289 && TREE_CODE (minus_lit0) == INTEGER_CST
10290 && tree_int_cst_lt (lit0, minus_lit0))
10292 minus_lit0 = associate_trees (minus_lit0, lit0,
10293 MINUS_EXPR, type);
10294 lit0 = 0;
10296 else
10298 lit0 = associate_trees (lit0, minus_lit0,
10299 MINUS_EXPR, type);
10300 minus_lit0 = 0;
10303 if (minus_lit0)
10305 if (con0 == 0)
10306 return fold_convert (type,
10307 associate_trees (var0, minus_lit0,
10308 MINUS_EXPR, type));
10309 else
10311 con0 = associate_trees (con0, minus_lit0,
10312 MINUS_EXPR, type);
10313 return fold_convert (type,
10314 associate_trees (var0, con0,
10315 PLUS_EXPR, type));
10319 con0 = associate_trees (con0, lit0, code, type);
10320 return fold_convert (type, associate_trees (var0, con0,
10321 code, type));
10325 return NULL_TREE;
10327 case MINUS_EXPR:
10328 /* Pointer simplifications for subtraction, simple reassociations. */
10329 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10331 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10332 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10333 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10335 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10336 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10337 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10338 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10339 return fold_build2 (PLUS_EXPR, type,
10340 fold_build2 (MINUS_EXPR, type, arg00, arg10),
10341 fold_build2 (MINUS_EXPR, type, arg01, arg11));
10343 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10344 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10346 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10347 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10348 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
10349 if (tmp)
10350 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
10353 /* A - (-B) -> A + B */
10354 if (TREE_CODE (arg1) == NEGATE_EXPR)
10355 return fold_build2 (PLUS_EXPR, type, op0,
10356 fold_convert (type, TREE_OPERAND (arg1, 0)));
10357 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10358 if (TREE_CODE (arg0) == NEGATE_EXPR
10359 && (FLOAT_TYPE_P (type)
10360 || INTEGRAL_TYPE_P (type))
10361 && negate_expr_p (arg1)
10362 && reorder_operands_p (arg0, arg1))
10363 return fold_build2 (MINUS_EXPR, type,
10364 fold_convert (type, negate_expr (arg1)),
10365 fold_convert (type, TREE_OPERAND (arg0, 0)));
10366 /* Convert -A - 1 to ~A. */
10367 if (INTEGRAL_TYPE_P (type)
10368 && TREE_CODE (arg0) == NEGATE_EXPR
10369 && integer_onep (arg1)
10370 && !TYPE_OVERFLOW_TRAPS (type))
10371 return fold_build1 (BIT_NOT_EXPR, type,
10372 fold_convert (type, TREE_OPERAND (arg0, 0)));
10374 /* Convert -1 - A to ~A. */
10375 if (INTEGRAL_TYPE_P (type)
10376 && integer_all_onesp (arg0))
10377 return fold_build1 (BIT_NOT_EXPR, type, op1);
10380 /* X - (X / CST) * CST is X % CST. */
10381 if (INTEGRAL_TYPE_P (type)
10382 && TREE_CODE (arg1) == MULT_EXPR
10383 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10384 && operand_equal_p (arg0,
10385 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10386 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10387 TREE_OPERAND (arg1, 1), 0))
10388 return fold_convert (type,
10389 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10390 arg0, TREE_OPERAND (arg1, 1)));
10392 if (! FLOAT_TYPE_P (type))
10394 if (integer_zerop (arg0))
10395 return negate_expr (fold_convert (type, arg1));
10396 if (integer_zerop (arg1))
10397 return non_lvalue (fold_convert (type, arg0));
10399 /* Fold A - (A & B) into ~B & A. */
10400 if (!TREE_SIDE_EFFECTS (arg0)
10401 && TREE_CODE (arg1) == BIT_AND_EXPR)
10403 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10405 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10406 return fold_build2 (BIT_AND_EXPR, type,
10407 fold_build1 (BIT_NOT_EXPR, type, arg10),
10408 fold_convert (type, arg0));
10410 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10412 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10413 return fold_build2 (BIT_AND_EXPR, type,
10414 fold_build1 (BIT_NOT_EXPR, type, arg11),
10415 fold_convert (type, arg0));
10419 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10420 any power of 2 minus 1. */
10421 if (TREE_CODE (arg0) == BIT_AND_EXPR
10422 && TREE_CODE (arg1) == BIT_AND_EXPR
10423 && operand_equal_p (TREE_OPERAND (arg0, 0),
10424 TREE_OPERAND (arg1, 0), 0))
10426 tree mask0 = TREE_OPERAND (arg0, 1);
10427 tree mask1 = TREE_OPERAND (arg1, 1);
10428 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
10430 if (operand_equal_p (tem, mask1, 0))
10432 tem = fold_build2 (BIT_XOR_EXPR, type,
10433 TREE_OPERAND (arg0, 0), mask1);
10434 return fold_build2 (MINUS_EXPR, type, tem, mask1);
10439 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10440 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10441 return non_lvalue (fold_convert (type, arg0));
10443 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10444 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10445 (-ARG1 + ARG0) reduces to -ARG1. */
10446 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10447 return negate_expr (fold_convert (type, arg1));
10449 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10450 __complex__ ( x, -y ). This is not the same for SNaNs or if
10451 signed zeros are involved. */
10452 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10453 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10454 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10456 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10457 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10458 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10459 bool arg0rz = false, arg0iz = false;
10460 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10461 || (arg0i && (arg0iz = real_zerop (arg0i))))
10463 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10464 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10465 if (arg0rz && arg1i && real_zerop (arg1i))
10467 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10468 arg1r ? arg1r
10469 : build1 (REALPART_EXPR, rtype, arg1));
10470 tree ip = arg0i ? arg0i
10471 : build1 (IMAGPART_EXPR, rtype, arg0);
10472 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10474 else if (arg0iz && arg1r && real_zerop (arg1r))
10476 tree rp = arg0r ? arg0r
10477 : build1 (REALPART_EXPR, rtype, arg0);
10478 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10479 arg1i ? arg1i
10480 : build1 (IMAGPART_EXPR, rtype, arg1));
10481 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10486 /* Fold &x - &x. This can happen from &x.foo - &x.
10487 This is unsafe for certain floats even in non-IEEE formats.
10488 In IEEE, it is unsafe because it does wrong for NaNs.
10489 Also note that operand_equal_p is always false if an operand
10490 is volatile. */
10492 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10493 && operand_equal_p (arg0, arg1, 0))
10494 return fold_convert (type, integer_zero_node);
10496 /* A - B -> A + (-B) if B is easily negatable. */
10497 if (negate_expr_p (arg1)
10498 && ((FLOAT_TYPE_P (type)
10499 /* Avoid this transformation if B is a positive REAL_CST. */
10500 && (TREE_CODE (arg1) != REAL_CST
10501 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10502 || INTEGRAL_TYPE_P (type)))
10503 return fold_build2 (PLUS_EXPR, type,
10504 fold_convert (type, arg0),
10505 fold_convert (type, negate_expr (arg1)));
10507 /* Try folding difference of addresses. */
10509 HOST_WIDE_INT diff;
10511 if ((TREE_CODE (arg0) == ADDR_EXPR
10512 || TREE_CODE (arg1) == ADDR_EXPR)
10513 && ptr_difference_const (arg0, arg1, &diff))
10514 return build_int_cst_type (type, diff);
10517 /* Fold &a[i] - &a[j] to i-j. */
10518 if (TREE_CODE (arg0) == ADDR_EXPR
10519 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10520 && TREE_CODE (arg1) == ADDR_EXPR
10521 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10523 tree aref0 = TREE_OPERAND (arg0, 0);
10524 tree aref1 = TREE_OPERAND (arg1, 0);
10525 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10526 TREE_OPERAND (aref1, 0), 0))
10528 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10529 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10530 tree esz = array_ref_element_size (aref0);
10531 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10532 return fold_build2 (MULT_EXPR, type, diff,
10533 fold_convert (type, esz));
10538 if (flag_unsafe_math_optimizations
10539 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10540 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10541 && (tem = distribute_real_division (code, type, arg0, arg1)))
10542 return tem;
10544 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10545 same or one. Make sure type is not saturating.
10546 fold_plusminus_mult_expr will re-associate. */
10547 if ((TREE_CODE (arg0) == MULT_EXPR
10548 || TREE_CODE (arg1) == MULT_EXPR)
10549 && !TYPE_SATURATING (type)
10550 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10552 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10553 if (tem)
10554 return tem;
10557 goto associate;
10559 case MULT_EXPR:
10560 /* (-A) * (-B) -> A * B */
10561 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10562 return fold_build2 (MULT_EXPR, type,
10563 fold_convert (type, TREE_OPERAND (arg0, 0)),
10564 fold_convert (type, negate_expr (arg1)));
10565 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10566 return fold_build2 (MULT_EXPR, type,
10567 fold_convert (type, negate_expr (arg0)),
10568 fold_convert (type, TREE_OPERAND (arg1, 0)));
10570 if (! FLOAT_TYPE_P (type))
10572 if (integer_zerop (arg1))
10573 return omit_one_operand (type, arg1, arg0);
10574 if (integer_onep (arg1))
10575 return non_lvalue (fold_convert (type, arg0));
10576 /* Transform x * -1 into -x. Make sure to do the negation
10577 on the original operand with conversions not stripped
10578 because we can only strip non-sign-changing conversions. */
10579 if (integer_all_onesp (arg1))
10580 return fold_convert (type, negate_expr (op0));
10581 /* Transform x * -C into -x * C if x is easily negatable. */
10582 if (TREE_CODE (arg1) == INTEGER_CST
10583 && tree_int_cst_sgn (arg1) == -1
10584 && negate_expr_p (arg0)
10585 && (tem = negate_expr (arg1)) != arg1
10586 && !TREE_OVERFLOW (tem))
10587 return fold_build2 (MULT_EXPR, type,
10588 fold_convert (type, negate_expr (arg0)), tem);
10590 /* (a * (1 << b)) is (a << b) */
10591 if (TREE_CODE (arg1) == LSHIFT_EXPR
10592 && integer_onep (TREE_OPERAND (arg1, 0)))
10593 return fold_build2 (LSHIFT_EXPR, type, op0,
10594 TREE_OPERAND (arg1, 1));
10595 if (TREE_CODE (arg0) == LSHIFT_EXPR
10596 && integer_onep (TREE_OPERAND (arg0, 0)))
10597 return fold_build2 (LSHIFT_EXPR, type, op1,
10598 TREE_OPERAND (arg0, 1));
10600 /* (A + A) * C -> A * 2 * C */
10601 if (TREE_CODE (arg0) == PLUS_EXPR
10602 && TREE_CODE (arg1) == INTEGER_CST
10603 && operand_equal_p (TREE_OPERAND (arg0, 0),
10604 TREE_OPERAND (arg0, 1), 0))
10605 return fold_build2 (MULT_EXPR, type,
10606 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10607 TREE_OPERAND (arg0, 1)),
10608 fold_build2 (MULT_EXPR, type,
10609 build_int_cst (type, 2) , arg1));
10611 strict_overflow_p = false;
10612 if (TREE_CODE (arg1) == INTEGER_CST
10613 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10614 &strict_overflow_p)))
10616 if (strict_overflow_p)
10617 fold_overflow_warning (("assuming signed overflow does not "
10618 "occur when simplifying "
10619 "multiplication"),
10620 WARN_STRICT_OVERFLOW_MISC);
10621 return fold_convert (type, tem);
10624 /* Optimize z * conj(z) for integer complex numbers. */
10625 if (TREE_CODE (arg0) == CONJ_EXPR
10626 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10627 return fold_mult_zconjz (type, arg1);
10628 if (TREE_CODE (arg1) == CONJ_EXPR
10629 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10630 return fold_mult_zconjz (type, arg0);
10632 else
10634 /* Maybe fold x * 0 to 0. The expressions aren't the same
10635 when x is NaN, since x * 0 is also NaN. Nor are they the
10636 same in modes with signed zeros, since multiplying a
10637 negative value by 0 gives -0, not +0. */
10638 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10639 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10640 && real_zerop (arg1))
10641 return omit_one_operand (type, arg1, arg0);
10642 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10643 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10644 && real_onep (arg1))
10645 return non_lvalue (fold_convert (type, arg0));
10647 /* Transform x * -1.0 into -x. */
10648 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10649 && real_minus_onep (arg1))
10650 return fold_convert (type, negate_expr (arg0));
10652 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10653 the result for floating point types due to rounding so it is applied
10654 only if -fassociative-math was specify. */
10655 if (flag_associative_math
10656 && TREE_CODE (arg0) == RDIV_EXPR
10657 && TREE_CODE (arg1) == REAL_CST
10658 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10660 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10661 arg1, 0);
10662 if (tem)
10663 return fold_build2 (RDIV_EXPR, type, tem,
10664 TREE_OPERAND (arg0, 1));
10667 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10668 if (operand_equal_p (arg0, arg1, 0))
10670 tree tem = fold_strip_sign_ops (arg0);
10671 if (tem != NULL_TREE)
10673 tem = fold_convert (type, tem);
10674 return fold_build2 (MULT_EXPR, type, tem, tem);
10678 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10679 This is not the same for NaNs or if signed zeros are
10680 involved. */
10681 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10682 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10683 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10684 && TREE_CODE (arg1) == COMPLEX_CST
10685 && real_zerop (TREE_REALPART (arg1)))
10687 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10688 if (real_onep (TREE_IMAGPART (arg1)))
10689 return fold_build2 (COMPLEX_EXPR, type,
10690 negate_expr (fold_build1 (IMAGPART_EXPR,
10691 rtype, arg0)),
10692 fold_build1 (REALPART_EXPR, rtype, arg0));
10693 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10694 return fold_build2 (COMPLEX_EXPR, type,
10695 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10696 negate_expr (fold_build1 (REALPART_EXPR,
10697 rtype, arg0)));
10700 /* Optimize z * conj(z) for floating point complex numbers.
10701 Guarded by flag_unsafe_math_optimizations as non-finite
10702 imaginary components don't produce scalar results. */
10703 if (flag_unsafe_math_optimizations
10704 && TREE_CODE (arg0) == CONJ_EXPR
10705 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10706 return fold_mult_zconjz (type, arg1);
10707 if (flag_unsafe_math_optimizations
10708 && TREE_CODE (arg1) == CONJ_EXPR
10709 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10710 return fold_mult_zconjz (type, arg0);
10712 if (flag_unsafe_math_optimizations)
10714 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10715 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10717 /* Optimizations of root(...)*root(...). */
10718 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10720 tree rootfn, arg;
10721 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10722 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10724 /* Optimize sqrt(x)*sqrt(x) as x. */
10725 if (BUILTIN_SQRT_P (fcode0)
10726 && operand_equal_p (arg00, arg10, 0)
10727 && ! HONOR_SNANS (TYPE_MODE (type)))
10728 return arg00;
10730 /* Optimize root(x)*root(y) as root(x*y). */
10731 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10732 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10733 return build_call_expr (rootfn, 1, arg);
10736 /* Optimize expN(x)*expN(y) as expN(x+y). */
10737 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10739 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10740 tree arg = fold_build2 (PLUS_EXPR, type,
10741 CALL_EXPR_ARG (arg0, 0),
10742 CALL_EXPR_ARG (arg1, 0));
10743 return build_call_expr (expfn, 1, arg);
10746 /* Optimizations of pow(...)*pow(...). */
10747 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10748 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10749 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10751 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10752 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10753 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10754 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10756 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10757 if (operand_equal_p (arg01, arg11, 0))
10759 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10760 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10761 return build_call_expr (powfn, 2, arg, arg01);
10764 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10765 if (operand_equal_p (arg00, arg10, 0))
10767 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10768 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10769 return build_call_expr (powfn, 2, arg00, arg);
10773 /* Optimize tan(x)*cos(x) as sin(x). */
10774 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10775 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10776 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10777 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10778 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10779 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10780 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10781 CALL_EXPR_ARG (arg1, 0), 0))
10783 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10785 if (sinfn != NULL_TREE)
10786 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10789 /* Optimize x*pow(x,c) as pow(x,c+1). */
10790 if (fcode1 == BUILT_IN_POW
10791 || fcode1 == BUILT_IN_POWF
10792 || fcode1 == BUILT_IN_POWL)
10794 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10795 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10796 if (TREE_CODE (arg11) == REAL_CST
10797 && !TREE_OVERFLOW (arg11)
10798 && operand_equal_p (arg0, arg10, 0))
10800 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10801 REAL_VALUE_TYPE c;
10802 tree arg;
10804 c = TREE_REAL_CST (arg11);
10805 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10806 arg = build_real (type, c);
10807 return build_call_expr (powfn, 2, arg0, arg);
10811 /* Optimize pow(x,c)*x as pow(x,c+1). */
10812 if (fcode0 == BUILT_IN_POW
10813 || fcode0 == BUILT_IN_POWF
10814 || fcode0 == BUILT_IN_POWL)
10816 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10817 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10818 if (TREE_CODE (arg01) == REAL_CST
10819 && !TREE_OVERFLOW (arg01)
10820 && operand_equal_p (arg1, arg00, 0))
10822 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10823 REAL_VALUE_TYPE c;
10824 tree arg;
10826 c = TREE_REAL_CST (arg01);
10827 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10828 arg = build_real (type, c);
10829 return build_call_expr (powfn, 2, arg1, arg);
10833 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10834 if (optimize_function_for_speed_p (cfun)
10835 && operand_equal_p (arg0, arg1, 0))
10837 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10839 if (powfn)
10841 tree arg = build_real (type, dconst2);
10842 return build_call_expr (powfn, 2, arg0, arg);
10847 goto associate;
10849 case BIT_IOR_EXPR:
10850 bit_ior:
10851 if (integer_all_onesp (arg1))
10852 return omit_one_operand (type, arg1, arg0);
10853 if (integer_zerop (arg1))
10854 return non_lvalue (fold_convert (type, arg0));
10855 if (operand_equal_p (arg0, arg1, 0))
10856 return non_lvalue (fold_convert (type, arg0));
10858 /* ~X | X is -1. */
10859 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10860 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10862 t1 = fold_convert (type, integer_zero_node);
10863 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10864 return omit_one_operand (type, t1, arg1);
10867 /* X | ~X is -1. */
10868 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10869 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10871 t1 = fold_convert (type, integer_zero_node);
10872 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10873 return omit_one_operand (type, t1, arg0);
10876 /* Canonicalize (X & C1) | C2. */
10877 if (TREE_CODE (arg0) == BIT_AND_EXPR
10878 && TREE_CODE (arg1) == INTEGER_CST
10879 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10881 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10882 int width = TYPE_PRECISION (type), w;
10883 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10884 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10885 hi2 = TREE_INT_CST_HIGH (arg1);
10886 lo2 = TREE_INT_CST_LOW (arg1);
10888 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10889 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10890 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10892 if (width > HOST_BITS_PER_WIDE_INT)
10894 mhi = (unsigned HOST_WIDE_INT) -1
10895 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10896 mlo = -1;
10898 else
10900 mhi = 0;
10901 mlo = (unsigned HOST_WIDE_INT) -1
10902 >> (HOST_BITS_PER_WIDE_INT - width);
10905 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10906 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10907 return fold_build2 (BIT_IOR_EXPR, type,
10908 TREE_OPERAND (arg0, 0), arg1);
10910 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10911 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10912 mode which allows further optimizations. */
10913 hi1 &= mhi;
10914 lo1 &= mlo;
10915 hi2 &= mhi;
10916 lo2 &= mlo;
10917 hi3 = hi1 & ~hi2;
10918 lo3 = lo1 & ~lo2;
10919 for (w = BITS_PER_UNIT;
10920 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10921 w <<= 1)
10923 unsigned HOST_WIDE_INT mask
10924 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10925 if (((lo1 | lo2) & mask) == mask
10926 && (lo1 & ~mask) == 0 && hi1 == 0)
10928 hi3 = 0;
10929 lo3 = mask;
10930 break;
10933 if (hi3 != hi1 || lo3 != lo1)
10934 return fold_build2 (BIT_IOR_EXPR, type,
10935 fold_build2 (BIT_AND_EXPR, type,
10936 TREE_OPERAND (arg0, 0),
10937 build_int_cst_wide (type,
10938 lo3, hi3)),
10939 arg1);
10942 /* (X & Y) | Y is (X, Y). */
10943 if (TREE_CODE (arg0) == BIT_AND_EXPR
10944 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10945 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10946 /* (X & Y) | X is (Y, X). */
10947 if (TREE_CODE (arg0) == BIT_AND_EXPR
10948 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10949 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10950 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10951 /* X | (X & Y) is (Y, X). */
10952 if (TREE_CODE (arg1) == BIT_AND_EXPR
10953 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10954 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10955 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10956 /* X | (Y & X) is (Y, X). */
10957 if (TREE_CODE (arg1) == BIT_AND_EXPR
10958 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10959 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10960 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10962 t1 = distribute_bit_expr (code, type, arg0, arg1);
10963 if (t1 != NULL_TREE)
10964 return t1;
10966 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10968 This results in more efficient code for machines without a NAND
10969 instruction. Combine will canonicalize to the first form
10970 which will allow use of NAND instructions provided by the
10971 backend if they exist. */
10972 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10973 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10975 return fold_build1 (BIT_NOT_EXPR, type,
10976 build2 (BIT_AND_EXPR, type,
10977 fold_convert (type,
10978 TREE_OPERAND (arg0, 0)),
10979 fold_convert (type,
10980 TREE_OPERAND (arg1, 0))));
10983 /* See if this can be simplified into a rotate first. If that
10984 is unsuccessful continue in the association code. */
10985 goto bit_rotate;
10987 case BIT_XOR_EXPR:
10988 if (integer_zerop (arg1))
10989 return non_lvalue (fold_convert (type, arg0));
10990 if (integer_all_onesp (arg1))
10991 return fold_build1 (BIT_NOT_EXPR, type, op0);
10992 if (operand_equal_p (arg0, arg1, 0))
10993 return omit_one_operand (type, integer_zero_node, arg0);
10995 /* ~X ^ X is -1. */
10996 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10997 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10999 t1 = fold_convert (type, integer_zero_node);
11000 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
11001 return omit_one_operand (type, t1, arg1);
11004 /* X ^ ~X is -1. */
11005 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11006 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11008 t1 = fold_convert (type, integer_zero_node);
11009 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
11010 return omit_one_operand (type, t1, arg0);
11013 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11014 with a constant, and the two constants have no bits in common,
11015 we should treat this as a BIT_IOR_EXPR since this may produce more
11016 simplifications. */
11017 if (TREE_CODE (arg0) == BIT_AND_EXPR
11018 && TREE_CODE (arg1) == BIT_AND_EXPR
11019 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11020 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11021 && integer_zerop (const_binop (BIT_AND_EXPR,
11022 TREE_OPERAND (arg0, 1),
11023 TREE_OPERAND (arg1, 1), 0)))
11025 code = BIT_IOR_EXPR;
11026 goto bit_ior;
11029 /* (X | Y) ^ X -> Y & ~ X*/
11030 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11031 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11033 tree t2 = TREE_OPERAND (arg0, 1);
11034 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
11035 arg1);
11036 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11037 fold_convert (type, t1));
11038 return t1;
11041 /* (Y | X) ^ X -> Y & ~ X*/
11042 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11043 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11045 tree t2 = TREE_OPERAND (arg0, 0);
11046 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
11047 arg1);
11048 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11049 fold_convert (type, t1));
11050 return t1;
11053 /* X ^ (X | Y) -> Y & ~ X*/
11054 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11055 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11057 tree t2 = TREE_OPERAND (arg1, 1);
11058 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
11059 arg0);
11060 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11061 fold_convert (type, t1));
11062 return t1;
11065 /* X ^ (Y | X) -> Y & ~ X*/
11066 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11067 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11069 tree t2 = TREE_OPERAND (arg1, 0);
11070 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
11071 arg0);
11072 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11073 fold_convert (type, t1));
11074 return t1;
11077 /* Convert ~X ^ ~Y to X ^ Y. */
11078 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11079 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11080 return fold_build2 (code, type,
11081 fold_convert (type, TREE_OPERAND (arg0, 0)),
11082 fold_convert (type, TREE_OPERAND (arg1, 0)));
11084 /* Convert ~X ^ C to X ^ ~C. */
11085 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11086 && TREE_CODE (arg1) == INTEGER_CST)
11087 return fold_build2 (code, type,
11088 fold_convert (type, TREE_OPERAND (arg0, 0)),
11089 fold_build1 (BIT_NOT_EXPR, type, arg1));
11091 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11092 if (TREE_CODE (arg0) == BIT_AND_EXPR
11093 && integer_onep (TREE_OPERAND (arg0, 1))
11094 && integer_onep (arg1))
11095 return fold_build2 (EQ_EXPR, type, arg0,
11096 build_int_cst (TREE_TYPE (arg0), 0));
11098 /* Fold (X & Y) ^ Y as ~X & Y. */
11099 if (TREE_CODE (arg0) == BIT_AND_EXPR
11100 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11102 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11103 return fold_build2 (BIT_AND_EXPR, type,
11104 fold_build1 (BIT_NOT_EXPR, type, tem),
11105 fold_convert (type, arg1));
11107 /* Fold (X & Y) ^ X as ~Y & X. */
11108 if (TREE_CODE (arg0) == BIT_AND_EXPR
11109 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11110 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11112 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11113 return fold_build2 (BIT_AND_EXPR, type,
11114 fold_build1 (BIT_NOT_EXPR, type, tem),
11115 fold_convert (type, arg1));
11117 /* Fold X ^ (X & Y) as X & ~Y. */
11118 if (TREE_CODE (arg1) == BIT_AND_EXPR
11119 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11121 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11122 return fold_build2 (BIT_AND_EXPR, type,
11123 fold_convert (type, arg0),
11124 fold_build1 (BIT_NOT_EXPR, type, tem));
11126 /* Fold X ^ (Y & X) as ~Y & X. */
11127 if (TREE_CODE (arg1) == BIT_AND_EXPR
11128 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11129 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11131 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11132 return fold_build2 (BIT_AND_EXPR, type,
11133 fold_build1 (BIT_NOT_EXPR, type, tem),
11134 fold_convert (type, arg0));
11137 /* See if this can be simplified into a rotate first. If that
11138 is unsuccessful continue in the association code. */
11139 goto bit_rotate;
11141 case BIT_AND_EXPR:
11142 if (integer_all_onesp (arg1))
11143 return non_lvalue (fold_convert (type, arg0));
11144 if (integer_zerop (arg1))
11145 return omit_one_operand (type, arg1, arg0);
11146 if (operand_equal_p (arg0, arg1, 0))
11147 return non_lvalue (fold_convert (type, arg0));
11149 /* ~X & X is always zero. */
11150 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11151 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11152 return omit_one_operand (type, integer_zero_node, arg1);
11154 /* X & ~X is always zero. */
11155 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11156 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11157 return omit_one_operand (type, integer_zero_node, arg0);
11159 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11160 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11161 && TREE_CODE (arg1) == INTEGER_CST
11162 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11164 tree tmp1 = fold_convert (type, arg1);
11165 tree tmp2 = fold_convert (type, TREE_OPERAND (arg0, 0));
11166 tree tmp3 = fold_convert (type, TREE_OPERAND (arg0, 1));
11167 tmp2 = fold_build2 (BIT_AND_EXPR, type, tmp2, tmp1);
11168 tmp3 = fold_build2 (BIT_AND_EXPR, type, tmp3, tmp1);
11169 return fold_convert (type,
11170 fold_build2 (BIT_IOR_EXPR, type, tmp2, tmp3));
11173 /* (X | Y) & Y is (X, Y). */
11174 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11175 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11176 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
11177 /* (X | Y) & X is (Y, X). */
11178 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11179 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11180 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11181 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
11182 /* X & (X | Y) is (Y, X). */
11183 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11184 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11185 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11186 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
11187 /* X & (Y | X) is (Y, X). */
11188 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11189 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11190 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11191 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
11193 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11194 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11195 && integer_onep (TREE_OPERAND (arg0, 1))
11196 && integer_onep (arg1))
11198 tem = TREE_OPERAND (arg0, 0);
11199 return fold_build2 (EQ_EXPR, type,
11200 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11201 build_int_cst (TREE_TYPE (tem), 1)),
11202 build_int_cst (TREE_TYPE (tem), 0));
11204 /* Fold ~X & 1 as (X & 1) == 0. */
11205 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11206 && integer_onep (arg1))
11208 tem = TREE_OPERAND (arg0, 0);
11209 return fold_build2 (EQ_EXPR, type,
11210 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11211 build_int_cst (TREE_TYPE (tem), 1)),
11212 build_int_cst (TREE_TYPE (tem), 0));
11215 /* Fold (X ^ Y) & Y as ~X & Y. */
11216 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11217 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11219 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11220 return fold_build2 (BIT_AND_EXPR, type,
11221 fold_build1 (BIT_NOT_EXPR, type, tem),
11222 fold_convert (type, arg1));
11224 /* Fold (X ^ Y) & X as ~Y & X. */
11225 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11226 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11227 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11229 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11230 return fold_build2 (BIT_AND_EXPR, type,
11231 fold_build1 (BIT_NOT_EXPR, type, tem),
11232 fold_convert (type, arg1));
11234 /* Fold X & (X ^ Y) as X & ~Y. */
11235 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11236 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11238 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11239 return fold_build2 (BIT_AND_EXPR, type,
11240 fold_convert (type, arg0),
11241 fold_build1 (BIT_NOT_EXPR, type, tem));
11243 /* Fold X & (Y ^ X) as ~Y & X. */
11244 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11245 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11246 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11248 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11249 return fold_build2 (BIT_AND_EXPR, type,
11250 fold_build1 (BIT_NOT_EXPR, type, tem),
11251 fold_convert (type, arg0));
11254 t1 = distribute_bit_expr (code, type, arg0, arg1);
11255 if (t1 != NULL_TREE)
11256 return t1;
11257 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11258 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11259 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11261 unsigned int prec
11262 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11264 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11265 && (~TREE_INT_CST_LOW (arg1)
11266 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11267 return fold_convert (type, TREE_OPERAND (arg0, 0));
11270 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11272 This results in more efficient code for machines without a NOR
11273 instruction. Combine will canonicalize to the first form
11274 which will allow use of NOR instructions provided by the
11275 backend if they exist. */
11276 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11277 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11279 return fold_build1 (BIT_NOT_EXPR, type,
11280 build2 (BIT_IOR_EXPR, type,
11281 fold_convert (type,
11282 TREE_OPERAND (arg0, 0)),
11283 fold_convert (type,
11284 TREE_OPERAND (arg1, 0))));
11287 /* If arg0 is derived from the address of an object or function, we may
11288 be able to fold this expression using the object or function's
11289 alignment. */
11290 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11292 unsigned HOST_WIDE_INT modulus, residue;
11293 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11295 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11296 integer_onep (arg1));
11298 /* This works because modulus is a power of 2. If this weren't the
11299 case, we'd have to replace it by its greatest power-of-2
11300 divisor: modulus & -modulus. */
11301 if (low < modulus)
11302 return build_int_cst (type, residue & low);
11305 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11306 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11307 if the new mask might be further optimized. */
11308 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11309 || TREE_CODE (arg0) == RSHIFT_EXPR)
11310 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11311 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11312 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11313 < TYPE_PRECISION (TREE_TYPE (arg0))
11314 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11315 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11317 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11318 unsigned HOST_WIDE_INT mask
11319 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11320 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11321 tree shift_type = TREE_TYPE (arg0);
11323 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11324 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11325 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11326 && TYPE_PRECISION (TREE_TYPE (arg0))
11327 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11329 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11330 tree arg00 = TREE_OPERAND (arg0, 0);
11331 /* See if more bits can be proven as zero because of
11332 zero extension. */
11333 if (TREE_CODE (arg00) == NOP_EXPR
11334 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11336 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11337 if (TYPE_PRECISION (inner_type)
11338 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11339 && TYPE_PRECISION (inner_type) < prec)
11341 prec = TYPE_PRECISION (inner_type);
11342 /* See if we can shorten the right shift. */
11343 if (shiftc < prec)
11344 shift_type = inner_type;
11347 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11348 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11349 zerobits <<= prec - shiftc;
11350 /* For arithmetic shift if sign bit could be set, zerobits
11351 can contain actually sign bits, so no transformation is
11352 possible, unless MASK masks them all away. In that
11353 case the shift needs to be converted into logical shift. */
11354 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11355 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11357 if ((mask & zerobits) == 0)
11358 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11359 else
11360 zerobits = 0;
11364 /* ((X << 16) & 0xff00) is (X, 0). */
11365 if ((mask & zerobits) == mask)
11366 return omit_one_operand (type, build_int_cst (type, 0), arg0);
11368 newmask = mask | zerobits;
11369 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11371 unsigned int prec;
11373 /* Only do the transformation if NEWMASK is some integer
11374 mode's mask. */
11375 for (prec = BITS_PER_UNIT;
11376 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11377 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11378 break;
11379 if (prec < HOST_BITS_PER_WIDE_INT
11380 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11382 if (shift_type != TREE_TYPE (arg0))
11384 tem = fold_build2 (TREE_CODE (arg0), shift_type,
11385 fold_convert (shift_type,
11386 TREE_OPERAND (arg0, 0)),
11387 TREE_OPERAND (arg0, 1));
11388 tem = fold_convert (type, tem);
11390 else
11391 tem = op0;
11392 return fold_build2 (BIT_AND_EXPR, type, tem,
11393 build_int_cst_type (TREE_TYPE (op1),
11394 newmask));
11399 goto associate;
11401 case RDIV_EXPR:
11402 /* Don't touch a floating-point divide by zero unless the mode
11403 of the constant can represent infinity. */
11404 if (TREE_CODE (arg1) == REAL_CST
11405 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11406 && real_zerop (arg1))
11407 return NULL_TREE;
11409 /* Optimize A / A to 1.0 if we don't care about
11410 NaNs or Infinities. Skip the transformation
11411 for non-real operands. */
11412 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11413 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11414 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11415 && operand_equal_p (arg0, arg1, 0))
11417 tree r = build_real (TREE_TYPE (arg0), dconst1);
11419 return omit_two_operands (type, r, arg0, arg1);
11422 /* The complex version of the above A / A optimization. */
11423 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11424 && operand_equal_p (arg0, arg1, 0))
11426 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11427 if (! HONOR_NANS (TYPE_MODE (elem_type))
11428 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11430 tree r = build_real (elem_type, dconst1);
11431 /* omit_two_operands will call fold_convert for us. */
11432 return omit_two_operands (type, r, arg0, arg1);
11436 /* (-A) / (-B) -> A / B */
11437 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11438 return fold_build2 (RDIV_EXPR, type,
11439 TREE_OPERAND (arg0, 0),
11440 negate_expr (arg1));
11441 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11442 return fold_build2 (RDIV_EXPR, type,
11443 negate_expr (arg0),
11444 TREE_OPERAND (arg1, 0));
11446 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11447 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11448 && real_onep (arg1))
11449 return non_lvalue (fold_convert (type, arg0));
11451 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11452 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11453 && real_minus_onep (arg1))
11454 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11456 /* If ARG1 is a constant, we can convert this to a multiply by the
11457 reciprocal. This does not have the same rounding properties,
11458 so only do this if -freciprocal-math. We can actually
11459 always safely do it if ARG1 is a power of two, but it's hard to
11460 tell if it is or not in a portable manner. */
11461 if (TREE_CODE (arg1) == REAL_CST)
11463 if (flag_reciprocal_math
11464 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11465 arg1, 0)))
11466 return fold_build2 (MULT_EXPR, type, arg0, tem);
11467 /* Find the reciprocal if optimizing and the result is exact. */
11468 if (optimize)
11470 REAL_VALUE_TYPE r;
11471 r = TREE_REAL_CST (arg1);
11472 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11474 tem = build_real (type, r);
11475 return fold_build2 (MULT_EXPR, type,
11476 fold_convert (type, arg0), tem);
11480 /* Convert A/B/C to A/(B*C). */
11481 if (flag_reciprocal_math
11482 && TREE_CODE (arg0) == RDIV_EXPR)
11483 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11484 fold_build2 (MULT_EXPR, type,
11485 TREE_OPERAND (arg0, 1), arg1));
11487 /* Convert A/(B/C) to (A/B)*C. */
11488 if (flag_reciprocal_math
11489 && TREE_CODE (arg1) == RDIV_EXPR)
11490 return fold_build2 (MULT_EXPR, type,
11491 fold_build2 (RDIV_EXPR, type, arg0,
11492 TREE_OPERAND (arg1, 0)),
11493 TREE_OPERAND (arg1, 1));
11495 /* Convert C1/(X*C2) into (C1/C2)/X. */
11496 if (flag_reciprocal_math
11497 && TREE_CODE (arg1) == MULT_EXPR
11498 && TREE_CODE (arg0) == REAL_CST
11499 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11501 tree tem = const_binop (RDIV_EXPR, arg0,
11502 TREE_OPERAND (arg1, 1), 0);
11503 if (tem)
11504 return fold_build2 (RDIV_EXPR, type, tem,
11505 TREE_OPERAND (arg1, 0));
11508 if (flag_unsafe_math_optimizations)
11510 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11511 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11513 /* Optimize sin(x)/cos(x) as tan(x). */
11514 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11515 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11516 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11517 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11518 CALL_EXPR_ARG (arg1, 0), 0))
11520 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11522 if (tanfn != NULL_TREE)
11523 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11526 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11527 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11528 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11529 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11530 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11531 CALL_EXPR_ARG (arg1, 0), 0))
11533 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11535 if (tanfn != NULL_TREE)
11537 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11538 return fold_build2 (RDIV_EXPR, type,
11539 build_real (type, dconst1), tmp);
11543 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11544 NaNs or Infinities. */
11545 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11546 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11547 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11549 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11550 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11552 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11553 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11554 && operand_equal_p (arg00, arg01, 0))
11556 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11558 if (cosfn != NULL_TREE)
11559 return build_call_expr (cosfn, 1, arg00);
11563 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11564 NaNs or Infinities. */
11565 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11566 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11567 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11569 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11570 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11572 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11573 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11574 && operand_equal_p (arg00, arg01, 0))
11576 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11578 if (cosfn != NULL_TREE)
11580 tree tmp = build_call_expr (cosfn, 1, arg00);
11581 return fold_build2 (RDIV_EXPR, type,
11582 build_real (type, dconst1),
11583 tmp);
11588 /* Optimize pow(x,c)/x as pow(x,c-1). */
11589 if (fcode0 == BUILT_IN_POW
11590 || fcode0 == BUILT_IN_POWF
11591 || fcode0 == BUILT_IN_POWL)
11593 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11594 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11595 if (TREE_CODE (arg01) == REAL_CST
11596 && !TREE_OVERFLOW (arg01)
11597 && operand_equal_p (arg1, arg00, 0))
11599 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11600 REAL_VALUE_TYPE c;
11601 tree arg;
11603 c = TREE_REAL_CST (arg01);
11604 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11605 arg = build_real (type, c);
11606 return build_call_expr (powfn, 2, arg1, arg);
11610 /* Optimize a/root(b/c) into a*root(c/b). */
11611 if (BUILTIN_ROOT_P (fcode1))
11613 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11615 if (TREE_CODE (rootarg) == RDIV_EXPR)
11617 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11618 tree b = TREE_OPERAND (rootarg, 0);
11619 tree c = TREE_OPERAND (rootarg, 1);
11621 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11623 tmp = build_call_expr (rootfn, 1, tmp);
11624 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11628 /* Optimize x/expN(y) into x*expN(-y). */
11629 if (BUILTIN_EXPONENT_P (fcode1))
11631 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11632 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11633 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11634 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11637 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11638 if (fcode1 == BUILT_IN_POW
11639 || fcode1 == BUILT_IN_POWF
11640 || fcode1 == BUILT_IN_POWL)
11642 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11643 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11644 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11645 tree neg11 = fold_convert (type, negate_expr (arg11));
11646 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11647 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11650 return NULL_TREE;
11652 case TRUNC_DIV_EXPR:
11653 case FLOOR_DIV_EXPR:
11654 /* Simplify A / (B << N) where A and B are positive and B is
11655 a power of 2, to A >> (N + log2(B)). */
11656 strict_overflow_p = false;
11657 if (TREE_CODE (arg1) == LSHIFT_EXPR
11658 && (TYPE_UNSIGNED (type)
11659 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11661 tree sval = TREE_OPERAND (arg1, 0);
11662 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11664 tree sh_cnt = TREE_OPERAND (arg1, 1);
11665 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11667 if (strict_overflow_p)
11668 fold_overflow_warning (("assuming signed overflow does not "
11669 "occur when simplifying A / (B << N)"),
11670 WARN_STRICT_OVERFLOW_MISC);
11672 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11673 sh_cnt, build_int_cst (NULL_TREE, pow2));
11674 return fold_build2 (RSHIFT_EXPR, type,
11675 fold_convert (type, arg0), sh_cnt);
11679 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11680 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11681 if (INTEGRAL_TYPE_P (type)
11682 && TYPE_UNSIGNED (type)
11683 && code == FLOOR_DIV_EXPR)
11684 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11686 /* Fall thru */
11688 case ROUND_DIV_EXPR:
11689 case CEIL_DIV_EXPR:
11690 case EXACT_DIV_EXPR:
11691 if (integer_onep (arg1))
11692 return non_lvalue (fold_convert (type, arg0));
11693 if (integer_zerop (arg1))
11694 return NULL_TREE;
11695 /* X / -1 is -X. */
11696 if (!TYPE_UNSIGNED (type)
11697 && TREE_CODE (arg1) == INTEGER_CST
11698 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11699 && TREE_INT_CST_HIGH (arg1) == -1)
11700 return fold_convert (type, negate_expr (arg0));
11702 /* Convert -A / -B to A / B when the type is signed and overflow is
11703 undefined. */
11704 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11705 && TREE_CODE (arg0) == NEGATE_EXPR
11706 && negate_expr_p (arg1))
11708 if (INTEGRAL_TYPE_P (type))
11709 fold_overflow_warning (("assuming signed overflow does not occur "
11710 "when distributing negation across "
11711 "division"),
11712 WARN_STRICT_OVERFLOW_MISC);
11713 return fold_build2 (code, type,
11714 fold_convert (type, TREE_OPERAND (arg0, 0)),
11715 fold_convert (type, negate_expr (arg1)));
11717 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11718 && TREE_CODE (arg1) == NEGATE_EXPR
11719 && negate_expr_p (arg0))
11721 if (INTEGRAL_TYPE_P (type))
11722 fold_overflow_warning (("assuming signed overflow does not occur "
11723 "when distributing negation across "
11724 "division"),
11725 WARN_STRICT_OVERFLOW_MISC);
11726 return fold_build2 (code, type,
11727 fold_convert (type, negate_expr (arg0)),
11728 fold_convert (type, TREE_OPERAND (arg1, 0)));
11731 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11732 operation, EXACT_DIV_EXPR.
11734 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11735 At one time others generated faster code, it's not clear if they do
11736 after the last round to changes to the DIV code in expmed.c. */
11737 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11738 && multiple_of_p (type, arg0, arg1))
11739 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11741 strict_overflow_p = false;
11742 if (TREE_CODE (arg1) == INTEGER_CST
11743 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11744 &strict_overflow_p)))
11746 if (strict_overflow_p)
11747 fold_overflow_warning (("assuming signed overflow does not occur "
11748 "when simplifying division"),
11749 WARN_STRICT_OVERFLOW_MISC);
11750 return fold_convert (type, tem);
11753 return NULL_TREE;
11755 case CEIL_MOD_EXPR:
11756 case FLOOR_MOD_EXPR:
11757 case ROUND_MOD_EXPR:
11758 case TRUNC_MOD_EXPR:
11759 /* X % 1 is always zero, but be sure to preserve any side
11760 effects in X. */
11761 if (integer_onep (arg1))
11762 return omit_one_operand (type, integer_zero_node, arg0);
11764 /* X % 0, return X % 0 unchanged so that we can get the
11765 proper warnings and errors. */
11766 if (integer_zerop (arg1))
11767 return NULL_TREE;
11769 /* 0 % X is always zero, but be sure to preserve any side
11770 effects in X. Place this after checking for X == 0. */
11771 if (integer_zerop (arg0))
11772 return omit_one_operand (type, integer_zero_node, arg1);
11774 /* X % -1 is zero. */
11775 if (!TYPE_UNSIGNED (type)
11776 && TREE_CODE (arg1) == INTEGER_CST
11777 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11778 && TREE_INT_CST_HIGH (arg1) == -1)
11779 return omit_one_operand (type, integer_zero_node, arg0);
11781 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11782 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11783 strict_overflow_p = false;
11784 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11785 && (TYPE_UNSIGNED (type)
11786 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11788 tree c = arg1;
11789 /* Also optimize A % (C << N) where C is a power of 2,
11790 to A & ((C << N) - 1). */
11791 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11792 c = TREE_OPERAND (arg1, 0);
11794 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11796 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11797 build_int_cst (TREE_TYPE (arg1), 1));
11798 if (strict_overflow_p)
11799 fold_overflow_warning (("assuming signed overflow does not "
11800 "occur when simplifying "
11801 "X % (power of two)"),
11802 WARN_STRICT_OVERFLOW_MISC);
11803 return fold_build2 (BIT_AND_EXPR, type,
11804 fold_convert (type, arg0),
11805 fold_convert (type, mask));
11809 /* X % -C is the same as X % C. */
11810 if (code == TRUNC_MOD_EXPR
11811 && !TYPE_UNSIGNED (type)
11812 && TREE_CODE (arg1) == INTEGER_CST
11813 && !TREE_OVERFLOW (arg1)
11814 && TREE_INT_CST_HIGH (arg1) < 0
11815 && !TYPE_OVERFLOW_TRAPS (type)
11816 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11817 && !sign_bit_p (arg1, arg1))
11818 return fold_build2 (code, type, fold_convert (type, arg0),
11819 fold_convert (type, negate_expr (arg1)));
11821 /* X % -Y is the same as X % Y. */
11822 if (code == TRUNC_MOD_EXPR
11823 && !TYPE_UNSIGNED (type)
11824 && TREE_CODE (arg1) == NEGATE_EXPR
11825 && !TYPE_OVERFLOW_TRAPS (type))
11826 return fold_build2 (code, type, fold_convert (type, arg0),
11827 fold_convert (type, TREE_OPERAND (arg1, 0)));
11829 if (TREE_CODE (arg1) == INTEGER_CST
11830 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11831 &strict_overflow_p)))
11833 if (strict_overflow_p)
11834 fold_overflow_warning (("assuming signed overflow does not occur "
11835 "when simplifying modulus"),
11836 WARN_STRICT_OVERFLOW_MISC);
11837 return fold_convert (type, tem);
11840 return NULL_TREE;
11842 case LROTATE_EXPR:
11843 case RROTATE_EXPR:
11844 if (integer_all_onesp (arg0))
11845 return omit_one_operand (type, arg0, arg1);
11846 goto shift;
11848 case RSHIFT_EXPR:
11849 /* Optimize -1 >> x for arithmetic right shifts. */
11850 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11851 && tree_expr_nonnegative_p (arg1))
11852 return omit_one_operand (type, arg0, arg1);
11853 /* ... fall through ... */
11855 case LSHIFT_EXPR:
11856 shift:
11857 if (integer_zerop (arg1))
11858 return non_lvalue (fold_convert (type, arg0));
11859 if (integer_zerop (arg0))
11860 return omit_one_operand (type, arg0, arg1);
11862 /* Since negative shift count is not well-defined,
11863 don't try to compute it in the compiler. */
11864 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11865 return NULL_TREE;
11867 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11868 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11869 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11870 && host_integerp (TREE_OPERAND (arg0, 1), false)
11871 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11873 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11874 + TREE_INT_CST_LOW (arg1));
11876 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11877 being well defined. */
11878 if (low >= TYPE_PRECISION (type))
11880 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11881 low = low % TYPE_PRECISION (type);
11882 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11883 return build_int_cst (type, 0);
11884 else
11885 low = TYPE_PRECISION (type) - 1;
11888 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11889 build_int_cst (type, low));
11892 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11893 into x & ((unsigned)-1 >> c) for unsigned types. */
11894 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11895 || (TYPE_UNSIGNED (type)
11896 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11897 && host_integerp (arg1, false)
11898 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11899 && host_integerp (TREE_OPERAND (arg0, 1), false)
11900 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11902 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11903 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11904 tree lshift;
11905 tree arg00;
11907 if (low0 == low1)
11909 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11911 lshift = build_int_cst (type, -1);
11912 lshift = int_const_binop (code, lshift, arg1, 0);
11914 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11918 /* Rewrite an LROTATE_EXPR by a constant into an
11919 RROTATE_EXPR by a new constant. */
11920 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11922 tree tem = build_int_cst (TREE_TYPE (arg1),
11923 TYPE_PRECISION (type));
11924 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11925 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11928 /* If we have a rotate of a bit operation with the rotate count and
11929 the second operand of the bit operation both constant,
11930 permute the two operations. */
11931 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11932 && (TREE_CODE (arg0) == BIT_AND_EXPR
11933 || TREE_CODE (arg0) == BIT_IOR_EXPR
11934 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11935 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11936 return fold_build2 (TREE_CODE (arg0), type,
11937 fold_build2 (code, type,
11938 TREE_OPERAND (arg0, 0), arg1),
11939 fold_build2 (code, type,
11940 TREE_OPERAND (arg0, 1), arg1));
11942 /* Two consecutive rotates adding up to the precision of the
11943 type can be ignored. */
11944 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11945 && TREE_CODE (arg0) == RROTATE_EXPR
11946 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11947 && TREE_INT_CST_HIGH (arg1) == 0
11948 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11949 && ((TREE_INT_CST_LOW (arg1)
11950 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11951 == (unsigned int) TYPE_PRECISION (type)))
11952 return TREE_OPERAND (arg0, 0);
11954 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11955 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11956 if the latter can be further optimized. */
11957 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11958 && TREE_CODE (arg0) == BIT_AND_EXPR
11959 && TREE_CODE (arg1) == INTEGER_CST
11960 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11962 tree mask = fold_build2 (code, type,
11963 fold_convert (type, TREE_OPERAND (arg0, 1)),
11964 arg1);
11965 tree shift = fold_build2 (code, type,
11966 fold_convert (type, TREE_OPERAND (arg0, 0)),
11967 arg1);
11968 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11969 if (tem)
11970 return tem;
11973 return NULL_TREE;
11975 case MIN_EXPR:
11976 if (operand_equal_p (arg0, arg1, 0))
11977 return omit_one_operand (type, arg0, arg1);
11978 if (INTEGRAL_TYPE_P (type)
11979 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11980 return omit_one_operand (type, arg1, arg0);
11981 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11982 if (tem)
11983 return tem;
11984 goto associate;
11986 case MAX_EXPR:
11987 if (operand_equal_p (arg0, arg1, 0))
11988 return omit_one_operand (type, arg0, arg1);
11989 if (INTEGRAL_TYPE_P (type)
11990 && TYPE_MAX_VALUE (type)
11991 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11992 return omit_one_operand (type, arg1, arg0);
11993 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11994 if (tem)
11995 return tem;
11996 goto associate;
11998 case TRUTH_ANDIF_EXPR:
11999 /* Note that the operands of this must be ints
12000 and their values must be 0 or 1.
12001 ("true" is a fixed value perhaps depending on the language.) */
12002 /* If first arg is constant zero, return it. */
12003 if (integer_zerop (arg0))
12004 return fold_convert (type, arg0);
12005 case TRUTH_AND_EXPR:
12006 /* If either arg is constant true, drop it. */
12007 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12008 return non_lvalue (fold_convert (type, arg1));
12009 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12010 /* Preserve sequence points. */
12011 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12012 return non_lvalue (fold_convert (type, arg0));
12013 /* If second arg is constant zero, result is zero, but first arg
12014 must be evaluated. */
12015 if (integer_zerop (arg1))
12016 return omit_one_operand (type, arg1, arg0);
12017 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12018 case will be handled here. */
12019 if (integer_zerop (arg0))
12020 return omit_one_operand (type, arg0, arg1);
12022 /* !X && X is always false. */
12023 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12024 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12025 return omit_one_operand (type, integer_zero_node, arg1);
12026 /* X && !X is always false. */
12027 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12028 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12029 return omit_one_operand (type, integer_zero_node, arg0);
12031 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12032 means A >= Y && A != MAX, but in this case we know that
12033 A < X <= MAX. */
12035 if (!TREE_SIDE_EFFECTS (arg0)
12036 && !TREE_SIDE_EFFECTS (arg1))
12038 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
12039 if (tem && !operand_equal_p (tem, arg0, 0))
12040 return fold_build2 (code, type, tem, arg1);
12042 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
12043 if (tem && !operand_equal_p (tem, arg1, 0))
12044 return fold_build2 (code, type, arg0, tem);
12047 truth_andor:
12048 /* We only do these simplifications if we are optimizing. */
12049 if (!optimize)
12050 return NULL_TREE;
12052 /* Check for things like (A || B) && (A || C). We can convert this
12053 to A || (B && C). Note that either operator can be any of the four
12054 truth and/or operations and the transformation will still be
12055 valid. Also note that we only care about order for the
12056 ANDIF and ORIF operators. If B contains side effects, this
12057 might change the truth-value of A. */
12058 if (TREE_CODE (arg0) == TREE_CODE (arg1)
12059 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12060 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12061 || TREE_CODE (arg0) == TRUTH_AND_EXPR
12062 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12063 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12065 tree a00 = TREE_OPERAND (arg0, 0);
12066 tree a01 = TREE_OPERAND (arg0, 1);
12067 tree a10 = TREE_OPERAND (arg1, 0);
12068 tree a11 = TREE_OPERAND (arg1, 1);
12069 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12070 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12071 && (code == TRUTH_AND_EXPR
12072 || code == TRUTH_OR_EXPR));
12074 if (operand_equal_p (a00, a10, 0))
12075 return fold_build2 (TREE_CODE (arg0), type, a00,
12076 fold_build2 (code, type, a01, a11));
12077 else if (commutative && operand_equal_p (a00, a11, 0))
12078 return fold_build2 (TREE_CODE (arg0), type, a00,
12079 fold_build2 (code, type, a01, a10));
12080 else if (commutative && operand_equal_p (a01, a10, 0))
12081 return fold_build2 (TREE_CODE (arg0), type, a01,
12082 fold_build2 (code, type, a00, a11));
12084 /* This case if tricky because we must either have commutative
12085 operators or else A10 must not have side-effects. */
12087 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12088 && operand_equal_p (a01, a11, 0))
12089 return fold_build2 (TREE_CODE (arg0), type,
12090 fold_build2 (code, type, a00, a10),
12091 a01);
12094 /* See if we can build a range comparison. */
12095 if (0 != (tem = fold_range_test (code, type, op0, op1)))
12096 return tem;
12098 /* Check for the possibility of merging component references. If our
12099 lhs is another similar operation, try to merge its rhs with our
12100 rhs. Then try to merge our lhs and rhs. */
12101 if (TREE_CODE (arg0) == code
12102 && 0 != (tem = fold_truthop (code, type,
12103 TREE_OPERAND (arg0, 1), arg1)))
12104 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12106 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
12107 return tem;
12109 return NULL_TREE;
12111 case TRUTH_ORIF_EXPR:
12112 /* Note that the operands of this must be ints
12113 and their values must be 0 or true.
12114 ("true" is a fixed value perhaps depending on the language.) */
12115 /* If first arg is constant true, return it. */
12116 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12117 return fold_convert (type, arg0);
12118 case TRUTH_OR_EXPR:
12119 /* If either arg is constant zero, drop it. */
12120 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12121 return non_lvalue (fold_convert (type, arg1));
12122 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12123 /* Preserve sequence points. */
12124 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12125 return non_lvalue (fold_convert (type, arg0));
12126 /* If second arg is constant true, result is true, but we must
12127 evaluate first arg. */
12128 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12129 return omit_one_operand (type, arg1, arg0);
12130 /* Likewise for first arg, but note this only occurs here for
12131 TRUTH_OR_EXPR. */
12132 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12133 return omit_one_operand (type, arg0, arg1);
12135 /* !X || X is always true. */
12136 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12137 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12138 return omit_one_operand (type, integer_one_node, arg1);
12139 /* X || !X is always true. */
12140 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12141 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12142 return omit_one_operand (type, integer_one_node, arg0);
12144 goto truth_andor;
12146 case TRUTH_XOR_EXPR:
12147 /* If the second arg is constant zero, drop it. */
12148 if (integer_zerop (arg1))
12149 return non_lvalue (fold_convert (type, arg0));
12150 /* If the second arg is constant true, this is a logical inversion. */
12151 if (integer_onep (arg1))
12153 /* Only call invert_truthvalue if operand is a truth value. */
12154 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12155 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12156 else
12157 tem = invert_truthvalue (arg0);
12158 return non_lvalue (fold_convert (type, tem));
12160 /* Identical arguments cancel to zero. */
12161 if (operand_equal_p (arg0, arg1, 0))
12162 return omit_one_operand (type, integer_zero_node, arg0);
12164 /* !X ^ X is always true. */
12165 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12166 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12167 return omit_one_operand (type, integer_one_node, arg1);
12169 /* X ^ !X is always true. */
12170 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12171 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12172 return omit_one_operand (type, integer_one_node, arg0);
12174 return NULL_TREE;
12176 case EQ_EXPR:
12177 case NE_EXPR:
12178 tem = fold_comparison (code, type, op0, op1);
12179 if (tem != NULL_TREE)
12180 return tem;
12182 /* bool_var != 0 becomes bool_var. */
12183 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12184 && code == NE_EXPR)
12185 return non_lvalue (fold_convert (type, arg0));
12187 /* bool_var == 1 becomes bool_var. */
12188 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12189 && code == EQ_EXPR)
12190 return non_lvalue (fold_convert (type, arg0));
12192 /* bool_var != 1 becomes !bool_var. */
12193 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12194 && code == NE_EXPR)
12195 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12197 /* bool_var == 0 becomes !bool_var. */
12198 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12199 && code == EQ_EXPR)
12200 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12202 /* If this is an equality comparison of the address of two non-weak,
12203 unaliased symbols neither of which are extern (since we do not
12204 have access to attributes for externs), then we know the result. */
12205 if (TREE_CODE (arg0) == ADDR_EXPR
12206 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12207 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12208 && ! lookup_attribute ("alias",
12209 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12210 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12211 && TREE_CODE (arg1) == ADDR_EXPR
12212 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12213 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12214 && ! lookup_attribute ("alias",
12215 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12216 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12218 /* We know that we're looking at the address of two
12219 non-weak, unaliased, static _DECL nodes.
12221 It is both wasteful and incorrect to call operand_equal_p
12222 to compare the two ADDR_EXPR nodes. It is wasteful in that
12223 all we need to do is test pointer equality for the arguments
12224 to the two ADDR_EXPR nodes. It is incorrect to use
12225 operand_equal_p as that function is NOT equivalent to a
12226 C equality test. It can in fact return false for two
12227 objects which would test as equal using the C equality
12228 operator. */
12229 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12230 return constant_boolean_node (equal
12231 ? code == EQ_EXPR : code != EQ_EXPR,
12232 type);
12235 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12236 a MINUS_EXPR of a constant, we can convert it into a comparison with
12237 a revised constant as long as no overflow occurs. */
12238 if (TREE_CODE (arg1) == INTEGER_CST
12239 && (TREE_CODE (arg0) == PLUS_EXPR
12240 || TREE_CODE (arg0) == MINUS_EXPR)
12241 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12242 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12243 ? MINUS_EXPR : PLUS_EXPR,
12244 fold_convert (TREE_TYPE (arg0), arg1),
12245 TREE_OPERAND (arg0, 1), 0))
12246 && !TREE_OVERFLOW (tem))
12247 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12249 /* Similarly for a NEGATE_EXPR. */
12250 if (TREE_CODE (arg0) == NEGATE_EXPR
12251 && TREE_CODE (arg1) == INTEGER_CST
12252 && 0 != (tem = negate_expr (arg1))
12253 && TREE_CODE (tem) == INTEGER_CST
12254 && !TREE_OVERFLOW (tem))
12255 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12257 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12258 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12259 && TREE_CODE (arg1) == INTEGER_CST
12260 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12261 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12262 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
12263 fold_convert (TREE_TYPE (arg0), arg1),
12264 TREE_OPERAND (arg0, 1)));
12266 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12267 if ((TREE_CODE (arg0) == PLUS_EXPR
12268 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12269 || TREE_CODE (arg0) == MINUS_EXPR)
12270 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12271 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12272 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12274 tree val = TREE_OPERAND (arg0, 1);
12275 return omit_two_operands (type,
12276 fold_build2 (code, type,
12277 val,
12278 build_int_cst (TREE_TYPE (val),
12279 0)),
12280 TREE_OPERAND (arg0, 0), arg1);
12283 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12284 if (TREE_CODE (arg0) == MINUS_EXPR
12285 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12286 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12287 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12289 return omit_two_operands (type,
12290 code == NE_EXPR
12291 ? boolean_true_node : boolean_false_node,
12292 TREE_OPERAND (arg0, 1), arg1);
12295 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12296 for !=. Don't do this for ordered comparisons due to overflow. */
12297 if (TREE_CODE (arg0) == MINUS_EXPR
12298 && integer_zerop (arg1))
12299 return fold_build2 (code, type,
12300 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12302 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12303 if (TREE_CODE (arg0) == ABS_EXPR
12304 && (integer_zerop (arg1) || real_zerop (arg1)))
12305 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
12307 /* If this is an EQ or NE comparison with zero and ARG0 is
12308 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12309 two operations, but the latter can be done in one less insn
12310 on machines that have only two-operand insns or on which a
12311 constant cannot be the first operand. */
12312 if (TREE_CODE (arg0) == BIT_AND_EXPR
12313 && integer_zerop (arg1))
12315 tree arg00 = TREE_OPERAND (arg0, 0);
12316 tree arg01 = TREE_OPERAND (arg0, 1);
12317 if (TREE_CODE (arg00) == LSHIFT_EXPR
12318 && integer_onep (TREE_OPERAND (arg00, 0)))
12320 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
12321 arg01, TREE_OPERAND (arg00, 1));
12322 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12323 build_int_cst (TREE_TYPE (arg0), 1));
12324 return fold_build2 (code, type,
12325 fold_convert (TREE_TYPE (arg1), tem), arg1);
12327 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12328 && integer_onep (TREE_OPERAND (arg01, 0)))
12330 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
12331 arg00, TREE_OPERAND (arg01, 1));
12332 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12333 build_int_cst (TREE_TYPE (arg0), 1));
12334 return fold_build2 (code, type,
12335 fold_convert (TREE_TYPE (arg1), tem), arg1);
12339 /* If this is an NE or EQ comparison of zero against the result of a
12340 signed MOD operation whose second operand is a power of 2, make
12341 the MOD operation unsigned since it is simpler and equivalent. */
12342 if (integer_zerop (arg1)
12343 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12344 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12345 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12346 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12347 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12348 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12350 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12351 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
12352 fold_convert (newtype,
12353 TREE_OPERAND (arg0, 0)),
12354 fold_convert (newtype,
12355 TREE_OPERAND (arg0, 1)));
12357 return fold_build2 (code, type, newmod,
12358 fold_convert (newtype, arg1));
12361 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12362 C1 is a valid shift constant, and C2 is a power of two, i.e.
12363 a single bit. */
12364 if (TREE_CODE (arg0) == BIT_AND_EXPR
12365 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12366 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12367 == INTEGER_CST
12368 && integer_pow2p (TREE_OPERAND (arg0, 1))
12369 && integer_zerop (arg1))
12371 tree itype = TREE_TYPE (arg0);
12372 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12373 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12375 /* Check for a valid shift count. */
12376 if (TREE_INT_CST_HIGH (arg001) == 0
12377 && TREE_INT_CST_LOW (arg001) < prec)
12379 tree arg01 = TREE_OPERAND (arg0, 1);
12380 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12381 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12382 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12383 can be rewritten as (X & (C2 << C1)) != 0. */
12384 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12386 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
12387 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
12388 return fold_build2 (code, type, tem, arg1);
12390 /* Otherwise, for signed (arithmetic) shifts,
12391 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12392 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12393 else if (!TYPE_UNSIGNED (itype))
12394 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12395 arg000, build_int_cst (itype, 0));
12396 /* Otherwise, of unsigned (logical) shifts,
12397 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12398 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12399 else
12400 return omit_one_operand (type,
12401 code == EQ_EXPR ? integer_one_node
12402 : integer_zero_node,
12403 arg000);
12407 /* If this is an NE comparison of zero with an AND of one, remove the
12408 comparison since the AND will give the correct value. */
12409 if (code == NE_EXPR
12410 && integer_zerop (arg1)
12411 && TREE_CODE (arg0) == BIT_AND_EXPR
12412 && integer_onep (TREE_OPERAND (arg0, 1)))
12413 return fold_convert (type, arg0);
12415 /* If we have (A & C) == C where C is a power of 2, convert this into
12416 (A & C) != 0. Similarly for NE_EXPR. */
12417 if (TREE_CODE (arg0) == BIT_AND_EXPR
12418 && integer_pow2p (TREE_OPERAND (arg0, 1))
12419 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12420 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12421 arg0, fold_convert (TREE_TYPE (arg0),
12422 integer_zero_node));
12424 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12425 bit, then fold the expression into A < 0 or A >= 0. */
12426 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
12427 if (tem)
12428 return tem;
12430 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12431 Similarly for NE_EXPR. */
12432 if (TREE_CODE (arg0) == BIT_AND_EXPR
12433 && TREE_CODE (arg1) == INTEGER_CST
12434 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12436 tree notc = fold_build1 (BIT_NOT_EXPR,
12437 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12438 TREE_OPERAND (arg0, 1));
12439 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12440 arg1, notc);
12441 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12442 if (integer_nonzerop (dandnotc))
12443 return omit_one_operand (type, rslt, arg0);
12446 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12447 Similarly for NE_EXPR. */
12448 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12449 && TREE_CODE (arg1) == INTEGER_CST
12450 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12452 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12453 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12454 TREE_OPERAND (arg0, 1), notd);
12455 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12456 if (integer_nonzerop (candnotd))
12457 return omit_one_operand (type, rslt, arg0);
12460 /* If this is a comparison of a field, we may be able to simplify it. */
12461 if ((TREE_CODE (arg0) == COMPONENT_REF
12462 || TREE_CODE (arg0) == BIT_FIELD_REF)
12463 /* Handle the constant case even without -O
12464 to make sure the warnings are given. */
12465 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12467 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
12468 if (t1)
12469 return t1;
12472 /* Optimize comparisons of strlen vs zero to a compare of the
12473 first character of the string vs zero. To wit,
12474 strlen(ptr) == 0 => *ptr == 0
12475 strlen(ptr) != 0 => *ptr != 0
12476 Other cases should reduce to one of these two (or a constant)
12477 due to the return value of strlen being unsigned. */
12478 if (TREE_CODE (arg0) == CALL_EXPR
12479 && integer_zerop (arg1))
12481 tree fndecl = get_callee_fndecl (arg0);
12483 if (fndecl
12484 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12485 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12486 && call_expr_nargs (arg0) == 1
12487 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12489 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12490 return fold_build2 (code, type, iref,
12491 build_int_cst (TREE_TYPE (iref), 0));
12495 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12496 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12497 if (TREE_CODE (arg0) == RSHIFT_EXPR
12498 && integer_zerop (arg1)
12499 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12501 tree arg00 = TREE_OPERAND (arg0, 0);
12502 tree arg01 = TREE_OPERAND (arg0, 1);
12503 tree itype = TREE_TYPE (arg00);
12504 if (TREE_INT_CST_HIGH (arg01) == 0
12505 && TREE_INT_CST_LOW (arg01)
12506 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12508 if (TYPE_UNSIGNED (itype))
12510 itype = signed_type_for (itype);
12511 arg00 = fold_convert (itype, arg00);
12513 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12514 type, arg00, build_int_cst (itype, 0));
12518 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12519 if (integer_zerop (arg1)
12520 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12521 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12522 TREE_OPERAND (arg0, 1));
12524 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12525 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12526 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12527 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12528 build_int_cst (TREE_TYPE (arg1), 0));
12529 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12530 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12531 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12532 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12533 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12534 build_int_cst (TREE_TYPE (arg1), 0));
12536 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12537 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12538 && TREE_CODE (arg1) == INTEGER_CST
12539 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12540 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12541 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12542 TREE_OPERAND (arg0, 1), arg1));
12544 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12545 (X & C) == 0 when C is a single bit. */
12546 if (TREE_CODE (arg0) == BIT_AND_EXPR
12547 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12548 && integer_zerop (arg1)
12549 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12551 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12552 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12553 TREE_OPERAND (arg0, 1));
12554 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12555 type, tem, arg1);
12558 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12559 constant C is a power of two, i.e. a single bit. */
12560 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12561 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12562 && integer_zerop (arg1)
12563 && integer_pow2p (TREE_OPERAND (arg0, 1))
12564 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12565 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12567 tree arg00 = TREE_OPERAND (arg0, 0);
12568 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12569 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12572 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12573 when is C is a power of two, i.e. a single bit. */
12574 if (TREE_CODE (arg0) == BIT_AND_EXPR
12575 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12576 && integer_zerop (arg1)
12577 && integer_pow2p (TREE_OPERAND (arg0, 1))
12578 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12579 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12581 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12582 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12583 arg000, TREE_OPERAND (arg0, 1));
12584 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12585 tem, build_int_cst (TREE_TYPE (tem), 0));
12588 if (integer_zerop (arg1)
12589 && tree_expr_nonzero_p (arg0))
12591 tree res = constant_boolean_node (code==NE_EXPR, type);
12592 return omit_one_operand (type, res, arg0);
12595 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12596 if (TREE_CODE (arg0) == NEGATE_EXPR
12597 && TREE_CODE (arg1) == NEGATE_EXPR)
12598 return fold_build2 (code, type,
12599 TREE_OPERAND (arg0, 0),
12600 TREE_OPERAND (arg1, 0));
12602 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12603 if (TREE_CODE (arg0) == BIT_AND_EXPR
12604 && TREE_CODE (arg1) == BIT_AND_EXPR)
12606 tree arg00 = TREE_OPERAND (arg0, 0);
12607 tree arg01 = TREE_OPERAND (arg0, 1);
12608 tree arg10 = TREE_OPERAND (arg1, 0);
12609 tree arg11 = TREE_OPERAND (arg1, 1);
12610 tree itype = TREE_TYPE (arg0);
12612 if (operand_equal_p (arg01, arg11, 0))
12613 return fold_build2 (code, type,
12614 fold_build2 (BIT_AND_EXPR, itype,
12615 fold_build2 (BIT_XOR_EXPR, itype,
12616 arg00, arg10),
12617 arg01),
12618 build_int_cst (itype, 0));
12620 if (operand_equal_p (arg01, arg10, 0))
12621 return fold_build2 (code, type,
12622 fold_build2 (BIT_AND_EXPR, itype,
12623 fold_build2 (BIT_XOR_EXPR, itype,
12624 arg00, arg11),
12625 arg01),
12626 build_int_cst (itype, 0));
12628 if (operand_equal_p (arg00, arg11, 0))
12629 return fold_build2 (code, type,
12630 fold_build2 (BIT_AND_EXPR, itype,
12631 fold_build2 (BIT_XOR_EXPR, itype,
12632 arg01, arg10),
12633 arg00),
12634 build_int_cst (itype, 0));
12636 if (operand_equal_p (arg00, arg10, 0))
12637 return fold_build2 (code, type,
12638 fold_build2 (BIT_AND_EXPR, itype,
12639 fold_build2 (BIT_XOR_EXPR, itype,
12640 arg01, arg11),
12641 arg00),
12642 build_int_cst (itype, 0));
12645 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12646 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12648 tree arg00 = TREE_OPERAND (arg0, 0);
12649 tree arg01 = TREE_OPERAND (arg0, 1);
12650 tree arg10 = TREE_OPERAND (arg1, 0);
12651 tree arg11 = TREE_OPERAND (arg1, 1);
12652 tree itype = TREE_TYPE (arg0);
12654 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12655 operand_equal_p guarantees no side-effects so we don't need
12656 to use omit_one_operand on Z. */
12657 if (operand_equal_p (arg01, arg11, 0))
12658 return fold_build2 (code, type, arg00, arg10);
12659 if (operand_equal_p (arg01, arg10, 0))
12660 return fold_build2 (code, type, arg00, arg11);
12661 if (operand_equal_p (arg00, arg11, 0))
12662 return fold_build2 (code, type, arg01, arg10);
12663 if (operand_equal_p (arg00, arg10, 0))
12664 return fold_build2 (code, type, arg01, arg11);
12666 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12667 if (TREE_CODE (arg01) == INTEGER_CST
12668 && TREE_CODE (arg11) == INTEGER_CST)
12669 return fold_build2 (code, type,
12670 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12671 fold_build2 (BIT_XOR_EXPR, itype,
12672 arg01, arg11)),
12673 arg10);
12676 /* Attempt to simplify equality/inequality comparisons of complex
12677 values. Only lower the comparison if the result is known or
12678 can be simplified to a single scalar comparison. */
12679 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12680 || TREE_CODE (arg0) == COMPLEX_CST)
12681 && (TREE_CODE (arg1) == COMPLEX_EXPR
12682 || TREE_CODE (arg1) == COMPLEX_CST))
12684 tree real0, imag0, real1, imag1;
12685 tree rcond, icond;
12687 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12689 real0 = TREE_OPERAND (arg0, 0);
12690 imag0 = TREE_OPERAND (arg0, 1);
12692 else
12694 real0 = TREE_REALPART (arg0);
12695 imag0 = TREE_IMAGPART (arg0);
12698 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12700 real1 = TREE_OPERAND (arg1, 0);
12701 imag1 = TREE_OPERAND (arg1, 1);
12703 else
12705 real1 = TREE_REALPART (arg1);
12706 imag1 = TREE_IMAGPART (arg1);
12709 rcond = fold_binary (code, type, real0, real1);
12710 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12712 if (integer_zerop (rcond))
12714 if (code == EQ_EXPR)
12715 return omit_two_operands (type, boolean_false_node,
12716 imag0, imag1);
12717 return fold_build2 (NE_EXPR, type, imag0, imag1);
12719 else
12721 if (code == NE_EXPR)
12722 return omit_two_operands (type, boolean_true_node,
12723 imag0, imag1);
12724 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12728 icond = fold_binary (code, type, imag0, imag1);
12729 if (icond && TREE_CODE (icond) == INTEGER_CST)
12731 if (integer_zerop (icond))
12733 if (code == EQ_EXPR)
12734 return omit_two_operands (type, boolean_false_node,
12735 real0, real1);
12736 return fold_build2 (NE_EXPR, type, real0, real1);
12738 else
12740 if (code == NE_EXPR)
12741 return omit_two_operands (type, boolean_true_node,
12742 real0, real1);
12743 return fold_build2 (EQ_EXPR, type, real0, real1);
12748 return NULL_TREE;
12750 case LT_EXPR:
12751 case GT_EXPR:
12752 case LE_EXPR:
12753 case GE_EXPR:
12754 tem = fold_comparison (code, type, op0, op1);
12755 if (tem != NULL_TREE)
12756 return tem;
12758 /* Transform comparisons of the form X +- C CMP X. */
12759 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12760 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12761 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12762 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12763 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12764 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12766 tree arg01 = TREE_OPERAND (arg0, 1);
12767 enum tree_code code0 = TREE_CODE (arg0);
12768 int is_positive;
12770 if (TREE_CODE (arg01) == REAL_CST)
12771 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12772 else
12773 is_positive = tree_int_cst_sgn (arg01);
12775 /* (X - c) > X becomes false. */
12776 if (code == GT_EXPR
12777 && ((code0 == MINUS_EXPR && is_positive >= 0)
12778 || (code0 == PLUS_EXPR && is_positive <= 0)))
12780 if (TREE_CODE (arg01) == INTEGER_CST
12781 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12782 fold_overflow_warning (("assuming signed overflow does not "
12783 "occur when assuming that (X - c) > X "
12784 "is always false"),
12785 WARN_STRICT_OVERFLOW_ALL);
12786 return constant_boolean_node (0, type);
12789 /* Likewise (X + c) < X becomes false. */
12790 if (code == LT_EXPR
12791 && ((code0 == PLUS_EXPR && is_positive >= 0)
12792 || (code0 == MINUS_EXPR && is_positive <= 0)))
12794 if (TREE_CODE (arg01) == INTEGER_CST
12795 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12796 fold_overflow_warning (("assuming signed overflow does not "
12797 "occur when assuming that "
12798 "(X + c) < X is always false"),
12799 WARN_STRICT_OVERFLOW_ALL);
12800 return constant_boolean_node (0, type);
12803 /* Convert (X - c) <= X to true. */
12804 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12805 && code == LE_EXPR
12806 && ((code0 == MINUS_EXPR && is_positive >= 0)
12807 || (code0 == PLUS_EXPR && is_positive <= 0)))
12809 if (TREE_CODE (arg01) == INTEGER_CST
12810 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12811 fold_overflow_warning (("assuming signed overflow does not "
12812 "occur when assuming that "
12813 "(X - c) <= X is always true"),
12814 WARN_STRICT_OVERFLOW_ALL);
12815 return constant_boolean_node (1, type);
12818 /* Convert (X + c) >= X to true. */
12819 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12820 && code == GE_EXPR
12821 && ((code0 == PLUS_EXPR && is_positive >= 0)
12822 || (code0 == MINUS_EXPR && is_positive <= 0)))
12824 if (TREE_CODE (arg01) == INTEGER_CST
12825 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12826 fold_overflow_warning (("assuming signed overflow does not "
12827 "occur when assuming that "
12828 "(X + c) >= X is always true"),
12829 WARN_STRICT_OVERFLOW_ALL);
12830 return constant_boolean_node (1, type);
12833 if (TREE_CODE (arg01) == INTEGER_CST)
12835 /* Convert X + c > X and X - c < X to true for integers. */
12836 if (code == GT_EXPR
12837 && ((code0 == PLUS_EXPR && is_positive > 0)
12838 || (code0 == MINUS_EXPR && is_positive < 0)))
12840 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12841 fold_overflow_warning (("assuming signed overflow does "
12842 "not occur when assuming that "
12843 "(X + c) > X is always true"),
12844 WARN_STRICT_OVERFLOW_ALL);
12845 return constant_boolean_node (1, type);
12848 if (code == LT_EXPR
12849 && ((code0 == MINUS_EXPR && is_positive > 0)
12850 || (code0 == PLUS_EXPR && is_positive < 0)))
12852 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12853 fold_overflow_warning (("assuming signed overflow does "
12854 "not occur when assuming that "
12855 "(X - c) < X is always true"),
12856 WARN_STRICT_OVERFLOW_ALL);
12857 return constant_boolean_node (1, type);
12860 /* Convert X + c <= X and X - c >= X to false for integers. */
12861 if (code == LE_EXPR
12862 && ((code0 == PLUS_EXPR && is_positive > 0)
12863 || (code0 == MINUS_EXPR && is_positive < 0)))
12865 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12866 fold_overflow_warning (("assuming signed overflow does "
12867 "not occur when assuming that "
12868 "(X + c) <= X is always false"),
12869 WARN_STRICT_OVERFLOW_ALL);
12870 return constant_boolean_node (0, type);
12873 if (code == GE_EXPR
12874 && ((code0 == MINUS_EXPR && is_positive > 0)
12875 || (code0 == PLUS_EXPR && is_positive < 0)))
12877 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12878 fold_overflow_warning (("assuming signed overflow does "
12879 "not occur when assuming that "
12880 "(X - c) >= X is always false"),
12881 WARN_STRICT_OVERFLOW_ALL);
12882 return constant_boolean_node (0, type);
12887 /* Comparisons with the highest or lowest possible integer of
12888 the specified precision will have known values. */
12890 tree arg1_type = TREE_TYPE (arg1);
12891 unsigned int width = TYPE_PRECISION (arg1_type);
12893 if (TREE_CODE (arg1) == INTEGER_CST
12894 && width <= 2 * HOST_BITS_PER_WIDE_INT
12895 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12897 HOST_WIDE_INT signed_max_hi;
12898 unsigned HOST_WIDE_INT signed_max_lo;
12899 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12901 if (width <= HOST_BITS_PER_WIDE_INT)
12903 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12904 - 1;
12905 signed_max_hi = 0;
12906 max_hi = 0;
12908 if (TYPE_UNSIGNED (arg1_type))
12910 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12911 min_lo = 0;
12912 min_hi = 0;
12914 else
12916 max_lo = signed_max_lo;
12917 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12918 min_hi = -1;
12921 else
12923 width -= HOST_BITS_PER_WIDE_INT;
12924 signed_max_lo = -1;
12925 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12926 - 1;
12927 max_lo = -1;
12928 min_lo = 0;
12930 if (TYPE_UNSIGNED (arg1_type))
12932 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12933 min_hi = 0;
12935 else
12937 max_hi = signed_max_hi;
12938 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12942 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12943 && TREE_INT_CST_LOW (arg1) == max_lo)
12944 switch (code)
12946 case GT_EXPR:
12947 return omit_one_operand (type, integer_zero_node, arg0);
12949 case GE_EXPR:
12950 return fold_build2 (EQ_EXPR, type, op0, op1);
12952 case LE_EXPR:
12953 return omit_one_operand (type, integer_one_node, arg0);
12955 case LT_EXPR:
12956 return fold_build2 (NE_EXPR, type, op0, op1);
12958 /* The GE_EXPR and LT_EXPR cases above are not normally
12959 reached because of previous transformations. */
12961 default:
12962 break;
12964 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12965 == max_hi
12966 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12967 switch (code)
12969 case GT_EXPR:
12970 arg1 = const_binop (PLUS_EXPR, arg1,
12971 build_int_cst (TREE_TYPE (arg1), 1), 0);
12972 return fold_build2 (EQ_EXPR, type,
12973 fold_convert (TREE_TYPE (arg1), arg0),
12974 arg1);
12975 case LE_EXPR:
12976 arg1 = const_binop (PLUS_EXPR, arg1,
12977 build_int_cst (TREE_TYPE (arg1), 1), 0);
12978 return fold_build2 (NE_EXPR, type,
12979 fold_convert (TREE_TYPE (arg1), arg0),
12980 arg1);
12981 default:
12982 break;
12984 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12985 == min_hi
12986 && TREE_INT_CST_LOW (arg1) == min_lo)
12987 switch (code)
12989 case LT_EXPR:
12990 return omit_one_operand (type, integer_zero_node, arg0);
12992 case LE_EXPR:
12993 return fold_build2 (EQ_EXPR, type, op0, op1);
12995 case GE_EXPR:
12996 return omit_one_operand (type, integer_one_node, arg0);
12998 case GT_EXPR:
12999 return fold_build2 (NE_EXPR, type, op0, op1);
13001 default:
13002 break;
13004 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13005 == min_hi
13006 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13007 switch (code)
13009 case GE_EXPR:
13010 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13011 return fold_build2 (NE_EXPR, type,
13012 fold_convert (TREE_TYPE (arg1), arg0),
13013 arg1);
13014 case LT_EXPR:
13015 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13016 return fold_build2 (EQ_EXPR, type,
13017 fold_convert (TREE_TYPE (arg1), arg0),
13018 arg1);
13019 default:
13020 break;
13023 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13024 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13025 && TYPE_UNSIGNED (arg1_type)
13026 /* We will flip the signedness of the comparison operator
13027 associated with the mode of arg1, so the sign bit is
13028 specified by this mode. Check that arg1 is the signed
13029 max associated with this sign bit. */
13030 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13031 /* signed_type does not work on pointer types. */
13032 && INTEGRAL_TYPE_P (arg1_type))
13034 /* The following case also applies to X < signed_max+1
13035 and X >= signed_max+1 because previous transformations. */
13036 if (code == LE_EXPR || code == GT_EXPR)
13038 tree st;
13039 st = signed_type_for (TREE_TYPE (arg1));
13040 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
13041 type, fold_convert (st, arg0),
13042 build_int_cst (st, 0));
13048 /* If we are comparing an ABS_EXPR with a constant, we can
13049 convert all the cases into explicit comparisons, but they may
13050 well not be faster than doing the ABS and one comparison.
13051 But ABS (X) <= C is a range comparison, which becomes a subtraction
13052 and a comparison, and is probably faster. */
13053 if (code == LE_EXPR
13054 && TREE_CODE (arg1) == INTEGER_CST
13055 && TREE_CODE (arg0) == ABS_EXPR
13056 && ! TREE_SIDE_EFFECTS (arg0)
13057 && (0 != (tem = negate_expr (arg1)))
13058 && TREE_CODE (tem) == INTEGER_CST
13059 && !TREE_OVERFLOW (tem))
13060 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13061 build2 (GE_EXPR, type,
13062 TREE_OPERAND (arg0, 0), tem),
13063 build2 (LE_EXPR, type,
13064 TREE_OPERAND (arg0, 0), arg1));
13066 /* Convert ABS_EXPR<x> >= 0 to true. */
13067 strict_overflow_p = false;
13068 if (code == GE_EXPR
13069 && (integer_zerop (arg1)
13070 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13071 && real_zerop (arg1)))
13072 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13074 if (strict_overflow_p)
13075 fold_overflow_warning (("assuming signed overflow does not occur "
13076 "when simplifying comparison of "
13077 "absolute value and zero"),
13078 WARN_STRICT_OVERFLOW_CONDITIONAL);
13079 return omit_one_operand (type, integer_one_node, arg0);
13082 /* Convert ABS_EXPR<x> < 0 to false. */
13083 strict_overflow_p = false;
13084 if (code == LT_EXPR
13085 && (integer_zerop (arg1) || real_zerop (arg1))
13086 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13088 if (strict_overflow_p)
13089 fold_overflow_warning (("assuming signed overflow does not occur "
13090 "when simplifying comparison of "
13091 "absolute value and zero"),
13092 WARN_STRICT_OVERFLOW_CONDITIONAL);
13093 return omit_one_operand (type, integer_zero_node, arg0);
13096 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13097 and similarly for >= into !=. */
13098 if ((code == LT_EXPR || code == GE_EXPR)
13099 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13100 && TREE_CODE (arg1) == LSHIFT_EXPR
13101 && integer_onep (TREE_OPERAND (arg1, 0)))
13102 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13103 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13104 TREE_OPERAND (arg1, 1)),
13105 build_int_cst (TREE_TYPE (arg0), 0));
13107 if ((code == LT_EXPR || code == GE_EXPR)
13108 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13109 && CONVERT_EXPR_P (arg1)
13110 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13111 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13112 return
13113 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13114 fold_convert (TREE_TYPE (arg0),
13115 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13116 TREE_OPERAND (TREE_OPERAND (arg1, 0),
13117 1))),
13118 build_int_cst (TREE_TYPE (arg0), 0));
13120 return NULL_TREE;
13122 case UNORDERED_EXPR:
13123 case ORDERED_EXPR:
13124 case UNLT_EXPR:
13125 case UNLE_EXPR:
13126 case UNGT_EXPR:
13127 case UNGE_EXPR:
13128 case UNEQ_EXPR:
13129 case LTGT_EXPR:
13130 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13132 t1 = fold_relational_const (code, type, arg0, arg1);
13133 if (t1 != NULL_TREE)
13134 return t1;
13137 /* If the first operand is NaN, the result is constant. */
13138 if (TREE_CODE (arg0) == REAL_CST
13139 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13140 && (code != LTGT_EXPR || ! flag_trapping_math))
13142 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13143 ? integer_zero_node
13144 : integer_one_node;
13145 return omit_one_operand (type, t1, arg1);
13148 /* If the second operand is NaN, the result is constant. */
13149 if (TREE_CODE (arg1) == REAL_CST
13150 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13151 && (code != LTGT_EXPR || ! flag_trapping_math))
13153 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13154 ? integer_zero_node
13155 : integer_one_node;
13156 return omit_one_operand (type, t1, arg0);
13159 /* Simplify unordered comparison of something with itself. */
13160 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13161 && operand_equal_p (arg0, arg1, 0))
13162 return constant_boolean_node (1, type);
13164 if (code == LTGT_EXPR
13165 && !flag_trapping_math
13166 && operand_equal_p (arg0, arg1, 0))
13167 return constant_boolean_node (0, type);
13169 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13171 tree targ0 = strip_float_extensions (arg0);
13172 tree targ1 = strip_float_extensions (arg1);
13173 tree newtype = TREE_TYPE (targ0);
13175 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13176 newtype = TREE_TYPE (targ1);
13178 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13179 return fold_build2 (code, type, fold_convert (newtype, targ0),
13180 fold_convert (newtype, targ1));
13183 return NULL_TREE;
13185 case COMPOUND_EXPR:
13186 /* When pedantic, a compound expression can be neither an lvalue
13187 nor an integer constant expression. */
13188 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13189 return NULL_TREE;
13190 /* Don't let (0, 0) be null pointer constant. */
13191 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13192 : fold_convert (type, arg1);
13193 return pedantic_non_lvalue (tem);
13195 case COMPLEX_EXPR:
13196 if ((TREE_CODE (arg0) == REAL_CST
13197 && TREE_CODE (arg1) == REAL_CST)
13198 || (TREE_CODE (arg0) == INTEGER_CST
13199 && TREE_CODE (arg1) == INTEGER_CST))
13200 return build_complex (type, arg0, arg1);
13201 return NULL_TREE;
13203 case ASSERT_EXPR:
13204 /* An ASSERT_EXPR should never be passed to fold_binary. */
13205 gcc_unreachable ();
13207 default:
13208 return NULL_TREE;
13209 } /* switch (code) */
13212 /* Callback for walk_tree, looking for LABEL_EXPR.
13213 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
13214 Do not check the sub-tree of GOTO_EXPR. */
13216 static tree
13217 contains_label_1 (tree *tp,
13218 int *walk_subtrees,
13219 void *data ATTRIBUTE_UNUSED)
13221 switch (TREE_CODE (*tp))
13223 case LABEL_EXPR:
13224 return *tp;
13225 case GOTO_EXPR:
13226 *walk_subtrees = 0;
13227 /* no break */
13228 default:
13229 return NULL_TREE;
13233 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
13234 accessible from outside the sub-tree. Returns NULL_TREE if no
13235 addressable label is found. */
13237 static bool
13238 contains_label_p (tree st)
13240 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
13243 /* Fold a ternary expression of code CODE and type TYPE with operands
13244 OP0, OP1, and OP2. Return the folded expression if folding is
13245 successful. Otherwise, return NULL_TREE. */
13247 tree
13248 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
13250 tree tem;
13251 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13252 enum tree_code_class kind = TREE_CODE_CLASS (code);
13254 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13255 && TREE_CODE_LENGTH (code) == 3);
13257 /* Strip any conversions that don't change the mode. This is safe
13258 for every expression, except for a comparison expression because
13259 its signedness is derived from its operands. So, in the latter
13260 case, only strip conversions that don't change the signedness.
13262 Note that this is done as an internal manipulation within the
13263 constant folder, in order to find the simplest representation of
13264 the arguments so that their form can be studied. In any cases,
13265 the appropriate type conversions should be put back in the tree
13266 that will get out of the constant folder. */
13267 if (op0)
13269 arg0 = op0;
13270 STRIP_NOPS (arg0);
13273 if (op1)
13275 arg1 = op1;
13276 STRIP_NOPS (arg1);
13279 switch (code)
13281 case COMPONENT_REF:
13282 if (TREE_CODE (arg0) == CONSTRUCTOR
13283 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13285 unsigned HOST_WIDE_INT idx;
13286 tree field, value;
13287 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13288 if (field == arg1)
13289 return value;
13291 return NULL_TREE;
13293 case COND_EXPR:
13294 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13295 so all simple results must be passed through pedantic_non_lvalue. */
13296 if (TREE_CODE (arg0) == INTEGER_CST)
13298 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13299 tem = integer_zerop (arg0) ? op2 : op1;
13300 /* Only optimize constant conditions when the selected branch
13301 has the same type as the COND_EXPR. This avoids optimizing
13302 away "c ? x : throw", where the throw has a void type.
13303 Avoid throwing away that operand which contains label. */
13304 if ((!TREE_SIDE_EFFECTS (unused_op)
13305 || !contains_label_p (unused_op))
13306 && (! VOID_TYPE_P (TREE_TYPE (tem))
13307 || VOID_TYPE_P (type)))
13308 return pedantic_non_lvalue (tem);
13309 return NULL_TREE;
13311 if (operand_equal_p (arg1, op2, 0))
13312 return pedantic_omit_one_operand (type, arg1, arg0);
13314 /* If we have A op B ? A : C, we may be able to convert this to a
13315 simpler expression, depending on the operation and the values
13316 of B and C. Signed zeros prevent all of these transformations,
13317 for reasons given above each one.
13319 Also try swapping the arguments and inverting the conditional. */
13320 if (COMPARISON_CLASS_P (arg0)
13321 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13322 arg1, TREE_OPERAND (arg0, 1))
13323 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13325 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
13326 if (tem)
13327 return tem;
13330 if (COMPARISON_CLASS_P (arg0)
13331 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13332 op2,
13333 TREE_OPERAND (arg0, 1))
13334 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13336 tem = fold_truth_not_expr (arg0);
13337 if (tem && COMPARISON_CLASS_P (tem))
13339 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
13340 if (tem)
13341 return tem;
13345 /* If the second operand is simpler than the third, swap them
13346 since that produces better jump optimization results. */
13347 if (truth_value_p (TREE_CODE (arg0))
13348 && tree_swap_operands_p (op1, op2, false))
13350 /* See if this can be inverted. If it can't, possibly because
13351 it was a floating-point inequality comparison, don't do
13352 anything. */
13353 tem = fold_truth_not_expr (arg0);
13354 if (tem)
13355 return fold_build3 (code, type, tem, op2, op1);
13358 /* Convert A ? 1 : 0 to simply A. */
13359 if (integer_onep (op1)
13360 && integer_zerop (op2)
13361 /* If we try to convert OP0 to our type, the
13362 call to fold will try to move the conversion inside
13363 a COND, which will recurse. In that case, the COND_EXPR
13364 is probably the best choice, so leave it alone. */
13365 && type == TREE_TYPE (arg0))
13366 return pedantic_non_lvalue (arg0);
13368 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13369 over COND_EXPR in cases such as floating point comparisons. */
13370 if (integer_zerop (op1)
13371 && integer_onep (op2)
13372 && truth_value_p (TREE_CODE (arg0)))
13373 return pedantic_non_lvalue (fold_convert (type,
13374 invert_truthvalue (arg0)));
13376 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13377 if (TREE_CODE (arg0) == LT_EXPR
13378 && integer_zerop (TREE_OPERAND (arg0, 1))
13379 && integer_zerop (op2)
13380 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13382 /* sign_bit_p only checks ARG1 bits within A's precision.
13383 If <sign bit of A> has wider type than A, bits outside
13384 of A's precision in <sign bit of A> need to be checked.
13385 If they are all 0, this optimization needs to be done
13386 in unsigned A's type, if they are all 1 in signed A's type,
13387 otherwise this can't be done. */
13388 if (TYPE_PRECISION (TREE_TYPE (tem))
13389 < TYPE_PRECISION (TREE_TYPE (arg1))
13390 && TYPE_PRECISION (TREE_TYPE (tem))
13391 < TYPE_PRECISION (type))
13393 unsigned HOST_WIDE_INT mask_lo;
13394 HOST_WIDE_INT mask_hi;
13395 int inner_width, outer_width;
13396 tree tem_type;
13398 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13399 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13400 if (outer_width > TYPE_PRECISION (type))
13401 outer_width = TYPE_PRECISION (type);
13403 if (outer_width > HOST_BITS_PER_WIDE_INT)
13405 mask_hi = ((unsigned HOST_WIDE_INT) -1
13406 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13407 mask_lo = -1;
13409 else
13411 mask_hi = 0;
13412 mask_lo = ((unsigned HOST_WIDE_INT) -1
13413 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13415 if (inner_width > HOST_BITS_PER_WIDE_INT)
13417 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13418 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13419 mask_lo = 0;
13421 else
13422 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13423 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13425 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13426 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13428 tem_type = signed_type_for (TREE_TYPE (tem));
13429 tem = fold_convert (tem_type, tem);
13431 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13432 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13434 tem_type = unsigned_type_for (TREE_TYPE (tem));
13435 tem = fold_convert (tem_type, tem);
13437 else
13438 tem = NULL;
13441 if (tem)
13442 return fold_convert (type,
13443 fold_build2 (BIT_AND_EXPR,
13444 TREE_TYPE (tem), tem,
13445 fold_convert (TREE_TYPE (tem),
13446 arg1)));
13449 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13450 already handled above. */
13451 if (TREE_CODE (arg0) == BIT_AND_EXPR
13452 && integer_onep (TREE_OPERAND (arg0, 1))
13453 && integer_zerop (op2)
13454 && integer_pow2p (arg1))
13456 tree tem = TREE_OPERAND (arg0, 0);
13457 STRIP_NOPS (tem);
13458 if (TREE_CODE (tem) == RSHIFT_EXPR
13459 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13460 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13461 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13462 return fold_build2 (BIT_AND_EXPR, type,
13463 TREE_OPERAND (tem, 0), arg1);
13466 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13467 is probably obsolete because the first operand should be a
13468 truth value (that's why we have the two cases above), but let's
13469 leave it in until we can confirm this for all front-ends. */
13470 if (integer_zerop (op2)
13471 && TREE_CODE (arg0) == NE_EXPR
13472 && integer_zerop (TREE_OPERAND (arg0, 1))
13473 && integer_pow2p (arg1)
13474 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13475 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13476 arg1, OEP_ONLY_CONST))
13477 return pedantic_non_lvalue (fold_convert (type,
13478 TREE_OPERAND (arg0, 0)));
13480 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13481 if (integer_zerop (op2)
13482 && truth_value_p (TREE_CODE (arg0))
13483 && truth_value_p (TREE_CODE (arg1)))
13484 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13485 fold_convert (type, arg0),
13486 arg1);
13488 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13489 if (integer_onep (op2)
13490 && truth_value_p (TREE_CODE (arg0))
13491 && truth_value_p (TREE_CODE (arg1)))
13493 /* Only perform transformation if ARG0 is easily inverted. */
13494 tem = fold_truth_not_expr (arg0);
13495 if (tem)
13496 return fold_build2 (TRUTH_ORIF_EXPR, type,
13497 fold_convert (type, tem),
13498 arg1);
13501 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13502 if (integer_zerop (arg1)
13503 && truth_value_p (TREE_CODE (arg0))
13504 && truth_value_p (TREE_CODE (op2)))
13506 /* Only perform transformation if ARG0 is easily inverted. */
13507 tem = fold_truth_not_expr (arg0);
13508 if (tem)
13509 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13510 fold_convert (type, tem),
13511 op2);
13514 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13515 if (integer_onep (arg1)
13516 && truth_value_p (TREE_CODE (arg0))
13517 && truth_value_p (TREE_CODE (op2)))
13518 return fold_build2 (TRUTH_ORIF_EXPR, type,
13519 fold_convert (type, arg0),
13520 op2);
13522 return NULL_TREE;
13524 case CALL_EXPR:
13525 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13526 of fold_ternary on them. */
13527 gcc_unreachable ();
13529 case BIT_FIELD_REF:
13530 if ((TREE_CODE (arg0) == VECTOR_CST
13531 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13532 && type == TREE_TYPE (TREE_TYPE (arg0)))
13534 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13535 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13537 if (width != 0
13538 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13539 && (idx % width) == 0
13540 && (idx = idx / width)
13541 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13543 tree elements = NULL_TREE;
13545 if (TREE_CODE (arg0) == VECTOR_CST)
13546 elements = TREE_VECTOR_CST_ELTS (arg0);
13547 else
13549 unsigned HOST_WIDE_INT idx;
13550 tree value;
13552 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13553 elements = tree_cons (NULL_TREE, value, elements);
13555 while (idx-- > 0 && elements)
13556 elements = TREE_CHAIN (elements);
13557 if (elements)
13558 return TREE_VALUE (elements);
13559 else
13560 return fold_convert (type, integer_zero_node);
13564 /* A bit-field-ref that referenced the full argument can be stripped. */
13565 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13566 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13567 && integer_zerop (op2))
13568 return fold_convert (type, arg0);
13570 return NULL_TREE;
13572 default:
13573 return NULL_TREE;
13574 } /* switch (code) */
13577 /* Perform constant folding and related simplification of EXPR.
13578 The related simplifications include x*1 => x, x*0 => 0, etc.,
13579 and application of the associative law.
13580 NOP_EXPR conversions may be removed freely (as long as we
13581 are careful not to change the type of the overall expression).
13582 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13583 but we can constant-fold them if they have constant operands. */
13585 #ifdef ENABLE_FOLD_CHECKING
13586 # define fold(x) fold_1 (x)
13587 static tree fold_1 (tree);
13588 static
13589 #endif
13590 tree
13591 fold (tree expr)
13593 const tree t = expr;
13594 enum tree_code code = TREE_CODE (t);
13595 enum tree_code_class kind = TREE_CODE_CLASS (code);
13596 tree tem;
13598 /* Return right away if a constant. */
13599 if (kind == tcc_constant)
13600 return t;
13602 /* CALL_EXPR-like objects with variable numbers of operands are
13603 treated specially. */
13604 if (kind == tcc_vl_exp)
13606 if (code == CALL_EXPR)
13608 tem = fold_call_expr (expr, false);
13609 return tem ? tem : expr;
13611 return expr;
13614 if (IS_EXPR_CODE_CLASS (kind))
13616 tree type = TREE_TYPE (t);
13617 tree op0, op1, op2;
13619 switch (TREE_CODE_LENGTH (code))
13621 case 1:
13622 op0 = TREE_OPERAND (t, 0);
13623 tem = fold_unary (code, type, op0);
13624 return tem ? tem : expr;
13625 case 2:
13626 op0 = TREE_OPERAND (t, 0);
13627 op1 = TREE_OPERAND (t, 1);
13628 tem = fold_binary (code, type, op0, op1);
13629 return tem ? tem : expr;
13630 case 3:
13631 op0 = TREE_OPERAND (t, 0);
13632 op1 = TREE_OPERAND (t, 1);
13633 op2 = TREE_OPERAND (t, 2);
13634 tem = fold_ternary (code, type, op0, op1, op2);
13635 return tem ? tem : expr;
13636 default:
13637 break;
13641 switch (code)
13643 case ARRAY_REF:
13645 tree op0 = TREE_OPERAND (t, 0);
13646 tree op1 = TREE_OPERAND (t, 1);
13648 if (TREE_CODE (op1) == INTEGER_CST
13649 && TREE_CODE (op0) == CONSTRUCTOR
13650 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13652 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13653 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13654 unsigned HOST_WIDE_INT begin = 0;
13656 /* Find a matching index by means of a binary search. */
13657 while (begin != end)
13659 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13660 tree index = VEC_index (constructor_elt, elts, middle)->index;
13662 if (TREE_CODE (index) == INTEGER_CST
13663 && tree_int_cst_lt (index, op1))
13664 begin = middle + 1;
13665 else if (TREE_CODE (index) == INTEGER_CST
13666 && tree_int_cst_lt (op1, index))
13667 end = middle;
13668 else if (TREE_CODE (index) == RANGE_EXPR
13669 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13670 begin = middle + 1;
13671 else if (TREE_CODE (index) == RANGE_EXPR
13672 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13673 end = middle;
13674 else
13675 return VEC_index (constructor_elt, elts, middle)->value;
13679 return t;
13682 case CONST_DECL:
13683 return fold (DECL_INITIAL (t));
13685 default:
13686 return t;
13687 } /* switch (code) */
13690 #ifdef ENABLE_FOLD_CHECKING
13691 #undef fold
13693 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13694 static void fold_check_failed (const_tree, const_tree);
13695 void print_fold_checksum (const_tree);
13697 /* When --enable-checking=fold, compute a digest of expr before
13698 and after actual fold call to see if fold did not accidentally
13699 change original expr. */
13701 tree
13702 fold (tree expr)
13704 tree ret;
13705 struct md5_ctx ctx;
13706 unsigned char checksum_before[16], checksum_after[16];
13707 htab_t ht;
13709 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13710 md5_init_ctx (&ctx);
13711 fold_checksum_tree (expr, &ctx, ht);
13712 md5_finish_ctx (&ctx, checksum_before);
13713 htab_empty (ht);
13715 ret = fold_1 (expr);
13717 md5_init_ctx (&ctx);
13718 fold_checksum_tree (expr, &ctx, ht);
13719 md5_finish_ctx (&ctx, checksum_after);
13720 htab_delete (ht);
13722 if (memcmp (checksum_before, checksum_after, 16))
13723 fold_check_failed (expr, ret);
13725 return ret;
13728 void
13729 print_fold_checksum (const_tree expr)
13731 struct md5_ctx ctx;
13732 unsigned char checksum[16], cnt;
13733 htab_t ht;
13735 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13736 md5_init_ctx (&ctx);
13737 fold_checksum_tree (expr, &ctx, ht);
13738 md5_finish_ctx (&ctx, checksum);
13739 htab_delete (ht);
13740 for (cnt = 0; cnt < 16; ++cnt)
13741 fprintf (stderr, "%02x", checksum[cnt]);
13742 putc ('\n', stderr);
13745 static void
13746 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13748 internal_error ("fold check: original tree changed by fold");
13751 static void
13752 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13754 const void **slot;
13755 enum tree_code code;
13756 union tree_node buf;
13757 int i, len;
13759 recursive_label:
13761 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13762 <= sizeof (struct tree_function_decl))
13763 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13764 if (expr == NULL)
13765 return;
13766 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13767 if (*slot != NULL)
13768 return;
13769 *slot = expr;
13770 code = TREE_CODE (expr);
13771 if (TREE_CODE_CLASS (code) == tcc_declaration
13772 && DECL_ASSEMBLER_NAME_SET_P (expr))
13774 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13775 memcpy ((char *) &buf, expr, tree_size (expr));
13776 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13777 expr = (tree) &buf;
13779 else if (TREE_CODE_CLASS (code) == tcc_type
13780 && (TYPE_POINTER_TO (expr)
13781 || TYPE_REFERENCE_TO (expr)
13782 || TYPE_CACHED_VALUES_P (expr)
13783 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13784 || TYPE_NEXT_VARIANT (expr)))
13786 /* Allow these fields to be modified. */
13787 tree tmp;
13788 memcpy ((char *) &buf, expr, tree_size (expr));
13789 expr = tmp = (tree) &buf;
13790 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13791 TYPE_POINTER_TO (tmp) = NULL;
13792 TYPE_REFERENCE_TO (tmp) = NULL;
13793 TYPE_NEXT_VARIANT (tmp) = NULL;
13794 if (TYPE_CACHED_VALUES_P (tmp))
13796 TYPE_CACHED_VALUES_P (tmp) = 0;
13797 TYPE_CACHED_VALUES (tmp) = NULL;
13800 md5_process_bytes (expr, tree_size (expr), ctx);
13801 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13802 if (TREE_CODE_CLASS (code) != tcc_type
13803 && TREE_CODE_CLASS (code) != tcc_declaration
13804 && code != TREE_LIST
13805 && code != SSA_NAME)
13806 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13807 switch (TREE_CODE_CLASS (code))
13809 case tcc_constant:
13810 switch (code)
13812 case STRING_CST:
13813 md5_process_bytes (TREE_STRING_POINTER (expr),
13814 TREE_STRING_LENGTH (expr), ctx);
13815 break;
13816 case COMPLEX_CST:
13817 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13818 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13819 break;
13820 case VECTOR_CST:
13821 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13822 break;
13823 default:
13824 break;
13826 break;
13827 case tcc_exceptional:
13828 switch (code)
13830 case TREE_LIST:
13831 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13832 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13833 expr = TREE_CHAIN (expr);
13834 goto recursive_label;
13835 break;
13836 case TREE_VEC:
13837 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13838 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13839 break;
13840 default:
13841 break;
13843 break;
13844 case tcc_expression:
13845 case tcc_reference:
13846 case tcc_comparison:
13847 case tcc_unary:
13848 case tcc_binary:
13849 case tcc_statement:
13850 case tcc_vl_exp:
13851 len = TREE_OPERAND_LENGTH (expr);
13852 for (i = 0; i < len; ++i)
13853 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13854 break;
13855 case tcc_declaration:
13856 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13857 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13858 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13860 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13861 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13862 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13863 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13864 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13866 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13867 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13869 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13871 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13872 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13873 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13875 break;
13876 case tcc_type:
13877 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13878 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13879 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13880 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13881 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13882 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13883 if (INTEGRAL_TYPE_P (expr)
13884 || SCALAR_FLOAT_TYPE_P (expr))
13886 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13887 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13889 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13890 if (TREE_CODE (expr) == RECORD_TYPE
13891 || TREE_CODE (expr) == UNION_TYPE
13892 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13893 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13894 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13895 break;
13896 default:
13897 break;
13901 /* Helper function for outputting the checksum of a tree T. When
13902 debugging with gdb, you can "define mynext" to be "next" followed
13903 by "call debug_fold_checksum (op0)", then just trace down till the
13904 outputs differ. */
13906 void
13907 debug_fold_checksum (const_tree t)
13909 int i;
13910 unsigned char checksum[16];
13911 struct md5_ctx ctx;
13912 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13914 md5_init_ctx (&ctx);
13915 fold_checksum_tree (t, &ctx, ht);
13916 md5_finish_ctx (&ctx, checksum);
13917 htab_empty (ht);
13919 for (i = 0; i < 16; i++)
13920 fprintf (stderr, "%d ", checksum[i]);
13922 fprintf (stderr, "\n");
13925 #endif
13927 /* Fold a unary tree expression with code CODE of type TYPE with an
13928 operand OP0. Return a folded expression if successful. Otherwise,
13929 return a tree expression with code CODE of type TYPE with an
13930 operand OP0. */
13932 tree
13933 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13935 tree tem;
13936 #ifdef ENABLE_FOLD_CHECKING
13937 unsigned char checksum_before[16], checksum_after[16];
13938 struct md5_ctx ctx;
13939 htab_t ht;
13941 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13942 md5_init_ctx (&ctx);
13943 fold_checksum_tree (op0, &ctx, ht);
13944 md5_finish_ctx (&ctx, checksum_before);
13945 htab_empty (ht);
13946 #endif
13948 tem = fold_unary (code, type, op0);
13949 if (!tem)
13950 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13952 #ifdef ENABLE_FOLD_CHECKING
13953 md5_init_ctx (&ctx);
13954 fold_checksum_tree (op0, &ctx, ht);
13955 md5_finish_ctx (&ctx, checksum_after);
13956 htab_delete (ht);
13958 if (memcmp (checksum_before, checksum_after, 16))
13959 fold_check_failed (op0, tem);
13960 #endif
13961 return tem;
13964 /* Fold a binary tree expression with code CODE of type TYPE with
13965 operands OP0 and OP1. Return a folded expression if successful.
13966 Otherwise, return a tree expression with code CODE of type TYPE
13967 with operands OP0 and OP1. */
13969 tree
13970 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13971 MEM_STAT_DECL)
13973 tree tem;
13974 #ifdef ENABLE_FOLD_CHECKING
13975 unsigned char checksum_before_op0[16],
13976 checksum_before_op1[16],
13977 checksum_after_op0[16],
13978 checksum_after_op1[16];
13979 struct md5_ctx ctx;
13980 htab_t ht;
13982 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13983 md5_init_ctx (&ctx);
13984 fold_checksum_tree (op0, &ctx, ht);
13985 md5_finish_ctx (&ctx, checksum_before_op0);
13986 htab_empty (ht);
13988 md5_init_ctx (&ctx);
13989 fold_checksum_tree (op1, &ctx, ht);
13990 md5_finish_ctx (&ctx, checksum_before_op1);
13991 htab_empty (ht);
13992 #endif
13994 tem = fold_binary (code, type, op0, op1);
13995 if (!tem)
13996 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13998 #ifdef ENABLE_FOLD_CHECKING
13999 md5_init_ctx (&ctx);
14000 fold_checksum_tree (op0, &ctx, ht);
14001 md5_finish_ctx (&ctx, checksum_after_op0);
14002 htab_empty (ht);
14004 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14005 fold_check_failed (op0, tem);
14007 md5_init_ctx (&ctx);
14008 fold_checksum_tree (op1, &ctx, ht);
14009 md5_finish_ctx (&ctx, checksum_after_op1);
14010 htab_delete (ht);
14012 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14013 fold_check_failed (op1, tem);
14014 #endif
14015 return tem;
14018 /* Fold a ternary tree expression with code CODE of type TYPE with
14019 operands OP0, OP1, and OP2. Return a folded expression if
14020 successful. Otherwise, return a tree expression with code CODE of
14021 type TYPE with operands OP0, OP1, and OP2. */
14023 tree
14024 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
14025 MEM_STAT_DECL)
14027 tree tem;
14028 #ifdef ENABLE_FOLD_CHECKING
14029 unsigned char checksum_before_op0[16],
14030 checksum_before_op1[16],
14031 checksum_before_op2[16],
14032 checksum_after_op0[16],
14033 checksum_after_op1[16],
14034 checksum_after_op2[16];
14035 struct md5_ctx ctx;
14036 htab_t ht;
14038 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14039 md5_init_ctx (&ctx);
14040 fold_checksum_tree (op0, &ctx, ht);
14041 md5_finish_ctx (&ctx, checksum_before_op0);
14042 htab_empty (ht);
14044 md5_init_ctx (&ctx);
14045 fold_checksum_tree (op1, &ctx, ht);
14046 md5_finish_ctx (&ctx, checksum_before_op1);
14047 htab_empty (ht);
14049 md5_init_ctx (&ctx);
14050 fold_checksum_tree (op2, &ctx, ht);
14051 md5_finish_ctx (&ctx, checksum_before_op2);
14052 htab_empty (ht);
14053 #endif
14055 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14056 tem = fold_ternary (code, type, op0, op1, op2);
14057 if (!tem)
14058 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14060 #ifdef ENABLE_FOLD_CHECKING
14061 md5_init_ctx (&ctx);
14062 fold_checksum_tree (op0, &ctx, ht);
14063 md5_finish_ctx (&ctx, checksum_after_op0);
14064 htab_empty (ht);
14066 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14067 fold_check_failed (op0, tem);
14069 md5_init_ctx (&ctx);
14070 fold_checksum_tree (op1, &ctx, ht);
14071 md5_finish_ctx (&ctx, checksum_after_op1);
14072 htab_empty (ht);
14074 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14075 fold_check_failed (op1, tem);
14077 md5_init_ctx (&ctx);
14078 fold_checksum_tree (op2, &ctx, ht);
14079 md5_finish_ctx (&ctx, checksum_after_op2);
14080 htab_delete (ht);
14082 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14083 fold_check_failed (op2, tem);
14084 #endif
14085 return tem;
14088 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14089 arguments in ARGARRAY, and a null static chain.
14090 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14091 of type TYPE from the given operands as constructed by build_call_array. */
14093 tree
14094 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
14096 tree tem;
14097 #ifdef ENABLE_FOLD_CHECKING
14098 unsigned char checksum_before_fn[16],
14099 checksum_before_arglist[16],
14100 checksum_after_fn[16],
14101 checksum_after_arglist[16];
14102 struct md5_ctx ctx;
14103 htab_t ht;
14104 int i;
14106 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14107 md5_init_ctx (&ctx);
14108 fold_checksum_tree (fn, &ctx, ht);
14109 md5_finish_ctx (&ctx, checksum_before_fn);
14110 htab_empty (ht);
14112 md5_init_ctx (&ctx);
14113 for (i = 0; i < nargs; i++)
14114 fold_checksum_tree (argarray[i], &ctx, ht);
14115 md5_finish_ctx (&ctx, checksum_before_arglist);
14116 htab_empty (ht);
14117 #endif
14119 tem = fold_builtin_call_array (type, fn, nargs, argarray);
14121 #ifdef ENABLE_FOLD_CHECKING
14122 md5_init_ctx (&ctx);
14123 fold_checksum_tree (fn, &ctx, ht);
14124 md5_finish_ctx (&ctx, checksum_after_fn);
14125 htab_empty (ht);
14127 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14128 fold_check_failed (fn, tem);
14130 md5_init_ctx (&ctx);
14131 for (i = 0; i < nargs; i++)
14132 fold_checksum_tree (argarray[i], &ctx, ht);
14133 md5_finish_ctx (&ctx, checksum_after_arglist);
14134 htab_delete (ht);
14136 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14137 fold_check_failed (NULL_TREE, tem);
14138 #endif
14139 return tem;
14142 /* Perform constant folding and related simplification of initializer
14143 expression EXPR. These behave identically to "fold_buildN" but ignore
14144 potential run-time traps and exceptions that fold must preserve. */
14146 #define START_FOLD_INIT \
14147 int saved_signaling_nans = flag_signaling_nans;\
14148 int saved_trapping_math = flag_trapping_math;\
14149 int saved_rounding_math = flag_rounding_math;\
14150 int saved_trapv = flag_trapv;\
14151 int saved_folding_initializer = folding_initializer;\
14152 flag_signaling_nans = 0;\
14153 flag_trapping_math = 0;\
14154 flag_rounding_math = 0;\
14155 flag_trapv = 0;\
14156 folding_initializer = 1;
14158 #define END_FOLD_INIT \
14159 flag_signaling_nans = saved_signaling_nans;\
14160 flag_trapping_math = saved_trapping_math;\
14161 flag_rounding_math = saved_rounding_math;\
14162 flag_trapv = saved_trapv;\
14163 folding_initializer = saved_folding_initializer;
14165 tree
14166 fold_build1_initializer (enum tree_code code, tree type, tree op)
14168 tree result;
14169 START_FOLD_INIT;
14171 result = fold_build1 (code, type, op);
14173 END_FOLD_INIT;
14174 return result;
14177 tree
14178 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
14180 tree result;
14181 START_FOLD_INIT;
14183 result = fold_build2 (code, type, op0, op1);
14185 END_FOLD_INIT;
14186 return result;
14189 tree
14190 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
14191 tree op2)
14193 tree result;
14194 START_FOLD_INIT;
14196 result = fold_build3 (code, type, op0, op1, op2);
14198 END_FOLD_INIT;
14199 return result;
14202 tree
14203 fold_build_call_array_initializer (tree type, tree fn,
14204 int nargs, tree *argarray)
14206 tree result;
14207 START_FOLD_INIT;
14209 result = fold_build_call_array (type, fn, nargs, argarray);
14211 END_FOLD_INIT;
14212 return result;
14215 #undef START_FOLD_INIT
14216 #undef END_FOLD_INIT
14218 /* Determine if first argument is a multiple of second argument. Return 0 if
14219 it is not, or we cannot easily determined it to be.
14221 An example of the sort of thing we care about (at this point; this routine
14222 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14223 fold cases do now) is discovering that
14225 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14227 is a multiple of
14229 SAVE_EXPR (J * 8)
14231 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14233 This code also handles discovering that
14235 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14237 is a multiple of 8 so we don't have to worry about dealing with a
14238 possible remainder.
14240 Note that we *look* inside a SAVE_EXPR only to determine how it was
14241 calculated; it is not safe for fold to do much of anything else with the
14242 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14243 at run time. For example, the latter example above *cannot* be implemented
14244 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14245 evaluation time of the original SAVE_EXPR is not necessarily the same at
14246 the time the new expression is evaluated. The only optimization of this
14247 sort that would be valid is changing
14249 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14251 divided by 8 to
14253 SAVE_EXPR (I) * SAVE_EXPR (J)
14255 (where the same SAVE_EXPR (J) is used in the original and the
14256 transformed version). */
14259 multiple_of_p (tree type, const_tree top, const_tree bottom)
14261 if (operand_equal_p (top, bottom, 0))
14262 return 1;
14264 if (TREE_CODE (type) != INTEGER_TYPE)
14265 return 0;
14267 switch (TREE_CODE (top))
14269 case BIT_AND_EXPR:
14270 /* Bitwise and provides a power of two multiple. If the mask is
14271 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14272 if (!integer_pow2p (bottom))
14273 return 0;
14274 /* FALLTHRU */
14276 case MULT_EXPR:
14277 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14278 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14280 case PLUS_EXPR:
14281 case MINUS_EXPR:
14282 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14283 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14285 case LSHIFT_EXPR:
14286 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14288 tree op1, t1;
14290 op1 = TREE_OPERAND (top, 1);
14291 /* const_binop may not detect overflow correctly,
14292 so check for it explicitly here. */
14293 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14294 > TREE_INT_CST_LOW (op1)
14295 && TREE_INT_CST_HIGH (op1) == 0
14296 && 0 != (t1 = fold_convert (type,
14297 const_binop (LSHIFT_EXPR,
14298 size_one_node,
14299 op1, 0)))
14300 && !TREE_OVERFLOW (t1))
14301 return multiple_of_p (type, t1, bottom);
14303 return 0;
14305 case NOP_EXPR:
14306 /* Can't handle conversions from non-integral or wider integral type. */
14307 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14308 || (TYPE_PRECISION (type)
14309 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14310 return 0;
14312 /* .. fall through ... */
14314 case SAVE_EXPR:
14315 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14317 case INTEGER_CST:
14318 if (TREE_CODE (bottom) != INTEGER_CST
14319 || integer_zerop (bottom)
14320 || (TYPE_UNSIGNED (type)
14321 && (tree_int_cst_sgn (top) < 0
14322 || tree_int_cst_sgn (bottom) < 0)))
14323 return 0;
14324 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14325 top, bottom, 0));
14327 default:
14328 return 0;
14332 /* Return true if CODE or TYPE is known to be non-negative. */
14334 static bool
14335 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14337 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14338 && truth_value_p (code))
14339 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14340 have a signed:1 type (where the value is -1 and 0). */
14341 return true;
14342 return false;
14345 /* Return true if (CODE OP0) is known to be non-negative. If the return
14346 value is based on the assumption that signed overflow is undefined,
14347 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14348 *STRICT_OVERFLOW_P. */
14350 bool
14351 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14352 bool *strict_overflow_p)
14354 if (TYPE_UNSIGNED (type))
14355 return true;
14357 switch (code)
14359 case ABS_EXPR:
14360 /* We can't return 1 if flag_wrapv is set because
14361 ABS_EXPR<INT_MIN> = INT_MIN. */
14362 if (!INTEGRAL_TYPE_P (type))
14363 return true;
14364 if (TYPE_OVERFLOW_UNDEFINED (type))
14366 *strict_overflow_p = true;
14367 return true;
14369 break;
14371 case NON_LVALUE_EXPR:
14372 case FLOAT_EXPR:
14373 case FIX_TRUNC_EXPR:
14374 return tree_expr_nonnegative_warnv_p (op0,
14375 strict_overflow_p);
14377 case NOP_EXPR:
14379 tree inner_type = TREE_TYPE (op0);
14380 tree outer_type = type;
14382 if (TREE_CODE (outer_type) == REAL_TYPE)
14384 if (TREE_CODE (inner_type) == REAL_TYPE)
14385 return tree_expr_nonnegative_warnv_p (op0,
14386 strict_overflow_p);
14387 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14389 if (TYPE_UNSIGNED (inner_type))
14390 return true;
14391 return tree_expr_nonnegative_warnv_p (op0,
14392 strict_overflow_p);
14395 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14397 if (TREE_CODE (inner_type) == REAL_TYPE)
14398 return tree_expr_nonnegative_warnv_p (op0,
14399 strict_overflow_p);
14400 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14401 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14402 && TYPE_UNSIGNED (inner_type);
14405 break;
14407 default:
14408 return tree_simple_nonnegative_warnv_p (code, type);
14411 /* We don't know sign of `t', so be conservative and return false. */
14412 return false;
14415 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14416 value is based on the assumption that signed overflow is undefined,
14417 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14418 *STRICT_OVERFLOW_P. */
14420 bool
14421 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14422 tree op1, bool *strict_overflow_p)
14424 if (TYPE_UNSIGNED (type))
14425 return true;
14427 switch (code)
14429 case POINTER_PLUS_EXPR:
14430 case PLUS_EXPR:
14431 if (FLOAT_TYPE_P (type))
14432 return (tree_expr_nonnegative_warnv_p (op0,
14433 strict_overflow_p)
14434 && tree_expr_nonnegative_warnv_p (op1,
14435 strict_overflow_p));
14437 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14438 both unsigned and at least 2 bits shorter than the result. */
14439 if (TREE_CODE (type) == INTEGER_TYPE
14440 && TREE_CODE (op0) == NOP_EXPR
14441 && TREE_CODE (op1) == NOP_EXPR)
14443 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14444 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14445 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14446 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14448 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14449 TYPE_PRECISION (inner2)) + 1;
14450 return prec < TYPE_PRECISION (type);
14453 break;
14455 case MULT_EXPR:
14456 if (FLOAT_TYPE_P (type))
14458 /* x * x for floating point x is always non-negative. */
14459 if (operand_equal_p (op0, op1, 0))
14460 return true;
14461 return (tree_expr_nonnegative_warnv_p (op0,
14462 strict_overflow_p)
14463 && tree_expr_nonnegative_warnv_p (op1,
14464 strict_overflow_p));
14467 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14468 both unsigned and their total bits is shorter than the result. */
14469 if (TREE_CODE (type) == INTEGER_TYPE
14470 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14471 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14473 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14474 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14475 : TREE_TYPE (op0);
14476 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14477 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14478 : TREE_TYPE (op1);
14480 bool unsigned0 = TYPE_UNSIGNED (inner0);
14481 bool unsigned1 = TYPE_UNSIGNED (inner1);
14483 if (TREE_CODE (op0) == INTEGER_CST)
14484 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14486 if (TREE_CODE (op1) == INTEGER_CST)
14487 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14489 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14490 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14492 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14493 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14494 : TYPE_PRECISION (inner0);
14496 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14497 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14498 : TYPE_PRECISION (inner1);
14500 return precision0 + precision1 < TYPE_PRECISION (type);
14503 return false;
14505 case BIT_AND_EXPR:
14506 case MAX_EXPR:
14507 return (tree_expr_nonnegative_warnv_p (op0,
14508 strict_overflow_p)
14509 || tree_expr_nonnegative_warnv_p (op1,
14510 strict_overflow_p));
14512 case BIT_IOR_EXPR:
14513 case BIT_XOR_EXPR:
14514 case MIN_EXPR:
14515 case RDIV_EXPR:
14516 case TRUNC_DIV_EXPR:
14517 case CEIL_DIV_EXPR:
14518 case FLOOR_DIV_EXPR:
14519 case ROUND_DIV_EXPR:
14520 return (tree_expr_nonnegative_warnv_p (op0,
14521 strict_overflow_p)
14522 && tree_expr_nonnegative_warnv_p (op1,
14523 strict_overflow_p));
14525 case TRUNC_MOD_EXPR:
14526 case CEIL_MOD_EXPR:
14527 case FLOOR_MOD_EXPR:
14528 case ROUND_MOD_EXPR:
14529 return tree_expr_nonnegative_warnv_p (op0,
14530 strict_overflow_p);
14531 default:
14532 return tree_simple_nonnegative_warnv_p (code, type);
14535 /* We don't know sign of `t', so be conservative and return false. */
14536 return false;
14539 /* Return true if T is known to be non-negative. If the return
14540 value is based on the assumption that signed overflow is undefined,
14541 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14542 *STRICT_OVERFLOW_P. */
14544 bool
14545 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14547 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14548 return true;
14550 switch (TREE_CODE (t))
14552 case INTEGER_CST:
14553 return tree_int_cst_sgn (t) >= 0;
14555 case REAL_CST:
14556 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14558 case FIXED_CST:
14559 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14561 case COND_EXPR:
14562 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14563 strict_overflow_p)
14564 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14565 strict_overflow_p));
14566 default:
14567 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14568 TREE_TYPE (t));
14570 /* We don't know sign of `t', so be conservative and return false. */
14571 return false;
14574 /* Return true if T is known to be non-negative. If the return
14575 value is based on the assumption that signed overflow is undefined,
14576 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14577 *STRICT_OVERFLOW_P. */
14579 bool
14580 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14581 tree arg0, tree arg1, bool *strict_overflow_p)
14583 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14584 switch (DECL_FUNCTION_CODE (fndecl))
14586 CASE_FLT_FN (BUILT_IN_ACOS):
14587 CASE_FLT_FN (BUILT_IN_ACOSH):
14588 CASE_FLT_FN (BUILT_IN_CABS):
14589 CASE_FLT_FN (BUILT_IN_COSH):
14590 CASE_FLT_FN (BUILT_IN_ERFC):
14591 CASE_FLT_FN (BUILT_IN_EXP):
14592 CASE_FLT_FN (BUILT_IN_EXP10):
14593 CASE_FLT_FN (BUILT_IN_EXP2):
14594 CASE_FLT_FN (BUILT_IN_FABS):
14595 CASE_FLT_FN (BUILT_IN_FDIM):
14596 CASE_FLT_FN (BUILT_IN_HYPOT):
14597 CASE_FLT_FN (BUILT_IN_POW10):
14598 CASE_INT_FN (BUILT_IN_FFS):
14599 CASE_INT_FN (BUILT_IN_PARITY):
14600 CASE_INT_FN (BUILT_IN_POPCOUNT):
14601 case BUILT_IN_BSWAP32:
14602 case BUILT_IN_BSWAP64:
14603 /* Always true. */
14604 return true;
14606 CASE_FLT_FN (BUILT_IN_SQRT):
14607 /* sqrt(-0.0) is -0.0. */
14608 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14609 return true;
14610 return tree_expr_nonnegative_warnv_p (arg0,
14611 strict_overflow_p);
14613 CASE_FLT_FN (BUILT_IN_ASINH):
14614 CASE_FLT_FN (BUILT_IN_ATAN):
14615 CASE_FLT_FN (BUILT_IN_ATANH):
14616 CASE_FLT_FN (BUILT_IN_CBRT):
14617 CASE_FLT_FN (BUILT_IN_CEIL):
14618 CASE_FLT_FN (BUILT_IN_ERF):
14619 CASE_FLT_FN (BUILT_IN_EXPM1):
14620 CASE_FLT_FN (BUILT_IN_FLOOR):
14621 CASE_FLT_FN (BUILT_IN_FMOD):
14622 CASE_FLT_FN (BUILT_IN_FREXP):
14623 CASE_FLT_FN (BUILT_IN_LCEIL):
14624 CASE_FLT_FN (BUILT_IN_LDEXP):
14625 CASE_FLT_FN (BUILT_IN_LFLOOR):
14626 CASE_FLT_FN (BUILT_IN_LLCEIL):
14627 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14628 CASE_FLT_FN (BUILT_IN_LLRINT):
14629 CASE_FLT_FN (BUILT_IN_LLROUND):
14630 CASE_FLT_FN (BUILT_IN_LRINT):
14631 CASE_FLT_FN (BUILT_IN_LROUND):
14632 CASE_FLT_FN (BUILT_IN_MODF):
14633 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14634 CASE_FLT_FN (BUILT_IN_RINT):
14635 CASE_FLT_FN (BUILT_IN_ROUND):
14636 CASE_FLT_FN (BUILT_IN_SCALB):
14637 CASE_FLT_FN (BUILT_IN_SCALBLN):
14638 CASE_FLT_FN (BUILT_IN_SCALBN):
14639 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14640 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14641 CASE_FLT_FN (BUILT_IN_SINH):
14642 CASE_FLT_FN (BUILT_IN_TANH):
14643 CASE_FLT_FN (BUILT_IN_TRUNC):
14644 /* True if the 1st argument is nonnegative. */
14645 return tree_expr_nonnegative_warnv_p (arg0,
14646 strict_overflow_p);
14648 CASE_FLT_FN (BUILT_IN_FMAX):
14649 /* True if the 1st OR 2nd arguments are nonnegative. */
14650 return (tree_expr_nonnegative_warnv_p (arg0,
14651 strict_overflow_p)
14652 || (tree_expr_nonnegative_warnv_p (arg1,
14653 strict_overflow_p)));
14655 CASE_FLT_FN (BUILT_IN_FMIN):
14656 /* True if the 1st AND 2nd arguments are nonnegative. */
14657 return (tree_expr_nonnegative_warnv_p (arg0,
14658 strict_overflow_p)
14659 && (tree_expr_nonnegative_warnv_p (arg1,
14660 strict_overflow_p)));
14662 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14663 /* True if the 2nd argument is nonnegative. */
14664 return tree_expr_nonnegative_warnv_p (arg1,
14665 strict_overflow_p);
14667 CASE_FLT_FN (BUILT_IN_POWI):
14668 /* True if the 1st argument is nonnegative or the second
14669 argument is an even integer. */
14670 if (TREE_CODE (arg1) == INTEGER_CST
14671 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14672 return true;
14673 return tree_expr_nonnegative_warnv_p (arg0,
14674 strict_overflow_p);
14676 CASE_FLT_FN (BUILT_IN_POW):
14677 /* True if the 1st argument is nonnegative or the second
14678 argument is an even integer valued real. */
14679 if (TREE_CODE (arg1) == REAL_CST)
14681 REAL_VALUE_TYPE c;
14682 HOST_WIDE_INT n;
14684 c = TREE_REAL_CST (arg1);
14685 n = real_to_integer (&c);
14686 if ((n & 1) == 0)
14688 REAL_VALUE_TYPE cint;
14689 real_from_integer (&cint, VOIDmode, n,
14690 n < 0 ? -1 : 0, 0);
14691 if (real_identical (&c, &cint))
14692 return true;
14695 return tree_expr_nonnegative_warnv_p (arg0,
14696 strict_overflow_p);
14698 default:
14699 break;
14701 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14702 type);
14705 /* Return true if T is known to be non-negative. If the return
14706 value is based on the assumption that signed overflow is undefined,
14707 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14708 *STRICT_OVERFLOW_P. */
14710 bool
14711 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14713 enum tree_code code = TREE_CODE (t);
14714 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14715 return true;
14717 switch (code)
14719 case TARGET_EXPR:
14721 tree temp = TARGET_EXPR_SLOT (t);
14722 t = TARGET_EXPR_INITIAL (t);
14724 /* If the initializer is non-void, then it's a normal expression
14725 that will be assigned to the slot. */
14726 if (!VOID_TYPE_P (t))
14727 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14729 /* Otherwise, the initializer sets the slot in some way. One common
14730 way is an assignment statement at the end of the initializer. */
14731 while (1)
14733 if (TREE_CODE (t) == BIND_EXPR)
14734 t = expr_last (BIND_EXPR_BODY (t));
14735 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14736 || TREE_CODE (t) == TRY_CATCH_EXPR)
14737 t = expr_last (TREE_OPERAND (t, 0));
14738 else if (TREE_CODE (t) == STATEMENT_LIST)
14739 t = expr_last (t);
14740 else
14741 break;
14743 if (TREE_CODE (t) == MODIFY_EXPR
14744 && TREE_OPERAND (t, 0) == temp)
14745 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14746 strict_overflow_p);
14748 return false;
14751 case CALL_EXPR:
14753 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14754 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14756 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14757 get_callee_fndecl (t),
14758 arg0,
14759 arg1,
14760 strict_overflow_p);
14762 case COMPOUND_EXPR:
14763 case MODIFY_EXPR:
14764 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14765 strict_overflow_p);
14766 case BIND_EXPR:
14767 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14768 strict_overflow_p);
14769 case SAVE_EXPR:
14770 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14771 strict_overflow_p);
14773 default:
14774 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14775 TREE_TYPE (t));
14778 /* We don't know sign of `t', so be conservative and return false. */
14779 return false;
14782 /* Return true if T is known to be non-negative. If the return
14783 value is based on the assumption that signed overflow is undefined,
14784 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14785 *STRICT_OVERFLOW_P. */
14787 bool
14788 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14790 enum tree_code code;
14791 if (t == error_mark_node)
14792 return false;
14794 code = TREE_CODE (t);
14795 switch (TREE_CODE_CLASS (code))
14797 case tcc_binary:
14798 case tcc_comparison:
14799 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14800 TREE_TYPE (t),
14801 TREE_OPERAND (t, 0),
14802 TREE_OPERAND (t, 1),
14803 strict_overflow_p);
14805 case tcc_unary:
14806 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14807 TREE_TYPE (t),
14808 TREE_OPERAND (t, 0),
14809 strict_overflow_p);
14811 case tcc_constant:
14812 case tcc_declaration:
14813 case tcc_reference:
14814 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14816 default:
14817 break;
14820 switch (code)
14822 case TRUTH_AND_EXPR:
14823 case TRUTH_OR_EXPR:
14824 case TRUTH_XOR_EXPR:
14825 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14826 TREE_TYPE (t),
14827 TREE_OPERAND (t, 0),
14828 TREE_OPERAND (t, 1),
14829 strict_overflow_p);
14830 case TRUTH_NOT_EXPR:
14831 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14832 TREE_TYPE (t),
14833 TREE_OPERAND (t, 0),
14834 strict_overflow_p);
14836 case COND_EXPR:
14837 case CONSTRUCTOR:
14838 case OBJ_TYPE_REF:
14839 case ASSERT_EXPR:
14840 case ADDR_EXPR:
14841 case WITH_SIZE_EXPR:
14842 case EXC_PTR_EXPR:
14843 case SSA_NAME:
14844 case FILTER_EXPR:
14845 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14847 default:
14848 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14852 /* Return true if `t' is known to be non-negative. Handle warnings
14853 about undefined signed overflow. */
14855 bool
14856 tree_expr_nonnegative_p (tree t)
14858 bool ret, strict_overflow_p;
14860 strict_overflow_p = false;
14861 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14862 if (strict_overflow_p)
14863 fold_overflow_warning (("assuming signed overflow does not occur when "
14864 "determining that expression is always "
14865 "non-negative"),
14866 WARN_STRICT_OVERFLOW_MISC);
14867 return ret;
14871 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14872 For floating point we further ensure that T is not denormal.
14873 Similar logic is present in nonzero_address in rtlanal.h.
14875 If the return value is based on the assumption that signed overflow
14876 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14877 change *STRICT_OVERFLOW_P. */
14879 bool
14880 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14881 bool *strict_overflow_p)
14883 switch (code)
14885 case ABS_EXPR:
14886 return tree_expr_nonzero_warnv_p (op0,
14887 strict_overflow_p);
14889 case NOP_EXPR:
14891 tree inner_type = TREE_TYPE (op0);
14892 tree outer_type = type;
14894 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14895 && tree_expr_nonzero_warnv_p (op0,
14896 strict_overflow_p));
14898 break;
14900 case NON_LVALUE_EXPR:
14901 return tree_expr_nonzero_warnv_p (op0,
14902 strict_overflow_p);
14904 default:
14905 break;
14908 return false;
14911 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14912 For floating point we further ensure that T is not denormal.
14913 Similar logic is present in nonzero_address in rtlanal.h.
14915 If the return value is based on the assumption that signed overflow
14916 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14917 change *STRICT_OVERFLOW_P. */
14919 bool
14920 tree_binary_nonzero_warnv_p (enum tree_code code,
14921 tree type,
14922 tree op0,
14923 tree op1, bool *strict_overflow_p)
14925 bool sub_strict_overflow_p;
14926 switch (code)
14928 case POINTER_PLUS_EXPR:
14929 case PLUS_EXPR:
14930 if (TYPE_OVERFLOW_UNDEFINED (type))
14932 /* With the presence of negative values it is hard
14933 to say something. */
14934 sub_strict_overflow_p = false;
14935 if (!tree_expr_nonnegative_warnv_p (op0,
14936 &sub_strict_overflow_p)
14937 || !tree_expr_nonnegative_warnv_p (op1,
14938 &sub_strict_overflow_p))
14939 return false;
14940 /* One of operands must be positive and the other non-negative. */
14941 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14942 overflows, on a twos-complement machine the sum of two
14943 nonnegative numbers can never be zero. */
14944 return (tree_expr_nonzero_warnv_p (op0,
14945 strict_overflow_p)
14946 || tree_expr_nonzero_warnv_p (op1,
14947 strict_overflow_p));
14949 break;
14951 case MULT_EXPR:
14952 if (TYPE_OVERFLOW_UNDEFINED (type))
14954 if (tree_expr_nonzero_warnv_p (op0,
14955 strict_overflow_p)
14956 && tree_expr_nonzero_warnv_p (op1,
14957 strict_overflow_p))
14959 *strict_overflow_p = true;
14960 return true;
14963 break;
14965 case MIN_EXPR:
14966 sub_strict_overflow_p = false;
14967 if (tree_expr_nonzero_warnv_p (op0,
14968 &sub_strict_overflow_p)
14969 && tree_expr_nonzero_warnv_p (op1,
14970 &sub_strict_overflow_p))
14972 if (sub_strict_overflow_p)
14973 *strict_overflow_p = true;
14975 break;
14977 case MAX_EXPR:
14978 sub_strict_overflow_p = false;
14979 if (tree_expr_nonzero_warnv_p (op0,
14980 &sub_strict_overflow_p))
14982 if (sub_strict_overflow_p)
14983 *strict_overflow_p = true;
14985 /* When both operands are nonzero, then MAX must be too. */
14986 if (tree_expr_nonzero_warnv_p (op1,
14987 strict_overflow_p))
14988 return true;
14990 /* MAX where operand 0 is positive is positive. */
14991 return tree_expr_nonnegative_warnv_p (op0,
14992 strict_overflow_p);
14994 /* MAX where operand 1 is positive is positive. */
14995 else if (tree_expr_nonzero_warnv_p (op1,
14996 &sub_strict_overflow_p)
14997 && tree_expr_nonnegative_warnv_p (op1,
14998 &sub_strict_overflow_p))
15000 if (sub_strict_overflow_p)
15001 *strict_overflow_p = true;
15002 return true;
15004 break;
15006 case BIT_IOR_EXPR:
15007 return (tree_expr_nonzero_warnv_p (op1,
15008 strict_overflow_p)
15009 || tree_expr_nonzero_warnv_p (op0,
15010 strict_overflow_p));
15012 default:
15013 break;
15016 return false;
15019 /* Return true when T is an address and is known to be nonzero.
15020 For floating point we further ensure that T is not denormal.
15021 Similar logic is present in nonzero_address in rtlanal.h.
15023 If the return value is based on the assumption that signed overflow
15024 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15025 change *STRICT_OVERFLOW_P. */
15027 bool
15028 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15030 bool sub_strict_overflow_p;
15031 switch (TREE_CODE (t))
15033 case INTEGER_CST:
15034 return !integer_zerop (t);
15036 case ADDR_EXPR:
15038 tree base = get_base_address (TREE_OPERAND (t, 0));
15040 if (!base)
15041 return false;
15043 /* Weak declarations may link to NULL. Other things may also be NULL
15044 so protect with -fdelete-null-pointer-checks; but not variables
15045 allocated on the stack. */
15046 if (DECL_P (base)
15047 && (flag_delete_null_pointer_checks
15048 || (TREE_CODE (base) == VAR_DECL && !TREE_STATIC (base))))
15049 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15051 /* Constants are never weak. */
15052 if (CONSTANT_CLASS_P (base))
15053 return true;
15055 return false;
15058 case COND_EXPR:
15059 sub_strict_overflow_p = false;
15060 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15061 &sub_strict_overflow_p)
15062 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15063 &sub_strict_overflow_p))
15065 if (sub_strict_overflow_p)
15066 *strict_overflow_p = true;
15067 return true;
15069 break;
15071 default:
15072 break;
15074 return false;
15077 /* Return true when T is an address and is known to be nonzero.
15078 For floating point we further ensure that T is not denormal.
15079 Similar logic is present in nonzero_address in rtlanal.h.
15081 If the return value is based on the assumption that signed overflow
15082 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15083 change *STRICT_OVERFLOW_P. */
15085 bool
15086 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15088 tree type = TREE_TYPE (t);
15089 enum tree_code code;
15091 /* Doing something useful for floating point would need more work. */
15092 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15093 return false;
15095 code = TREE_CODE (t);
15096 switch (TREE_CODE_CLASS (code))
15098 case tcc_unary:
15099 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15100 strict_overflow_p);
15101 case tcc_binary:
15102 case tcc_comparison:
15103 return tree_binary_nonzero_warnv_p (code, type,
15104 TREE_OPERAND (t, 0),
15105 TREE_OPERAND (t, 1),
15106 strict_overflow_p);
15107 case tcc_constant:
15108 case tcc_declaration:
15109 case tcc_reference:
15110 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15112 default:
15113 break;
15116 switch (code)
15118 case TRUTH_NOT_EXPR:
15119 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15120 strict_overflow_p);
15122 case TRUTH_AND_EXPR:
15123 case TRUTH_OR_EXPR:
15124 case TRUTH_XOR_EXPR:
15125 return tree_binary_nonzero_warnv_p (code, type,
15126 TREE_OPERAND (t, 0),
15127 TREE_OPERAND (t, 1),
15128 strict_overflow_p);
15130 case COND_EXPR:
15131 case CONSTRUCTOR:
15132 case OBJ_TYPE_REF:
15133 case ASSERT_EXPR:
15134 case ADDR_EXPR:
15135 case WITH_SIZE_EXPR:
15136 case EXC_PTR_EXPR:
15137 case SSA_NAME:
15138 case FILTER_EXPR:
15139 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15141 case COMPOUND_EXPR:
15142 case MODIFY_EXPR:
15143 case BIND_EXPR:
15144 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15145 strict_overflow_p);
15147 case SAVE_EXPR:
15148 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15149 strict_overflow_p);
15151 case CALL_EXPR:
15152 return alloca_call_p (t);
15154 default:
15155 break;
15157 return false;
15160 /* Return true when T is an address and is known to be nonzero.
15161 Handle warnings about undefined signed overflow. */
15163 bool
15164 tree_expr_nonzero_p (tree t)
15166 bool ret, strict_overflow_p;
15168 strict_overflow_p = false;
15169 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15170 if (strict_overflow_p)
15171 fold_overflow_warning (("assuming signed overflow does not occur when "
15172 "determining that expression is always "
15173 "non-zero"),
15174 WARN_STRICT_OVERFLOW_MISC);
15175 return ret;
15178 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15179 attempt to fold the expression to a constant without modifying TYPE,
15180 OP0 or OP1.
15182 If the expression could be simplified to a constant, then return
15183 the constant. If the expression would not be simplified to a
15184 constant, then return NULL_TREE. */
15186 tree
15187 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15189 tree tem = fold_binary (code, type, op0, op1);
15190 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15193 /* Given the components of a unary expression CODE, TYPE and OP0,
15194 attempt to fold the expression to a constant without modifying
15195 TYPE or OP0.
15197 If the expression could be simplified to a constant, then return
15198 the constant. If the expression would not be simplified to a
15199 constant, then return NULL_TREE. */
15201 tree
15202 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15204 tree tem = fold_unary (code, type, op0);
15205 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15208 /* If EXP represents referencing an element in a constant string
15209 (either via pointer arithmetic or array indexing), return the
15210 tree representing the value accessed, otherwise return NULL. */
15212 tree
15213 fold_read_from_constant_string (tree exp)
15215 if ((TREE_CODE (exp) == INDIRECT_REF
15216 || TREE_CODE (exp) == ARRAY_REF)
15217 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15219 tree exp1 = TREE_OPERAND (exp, 0);
15220 tree index;
15221 tree string;
15223 if (TREE_CODE (exp) == INDIRECT_REF)
15224 string = string_constant (exp1, &index);
15225 else
15227 tree low_bound = array_ref_low_bound (exp);
15228 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
15230 /* Optimize the special-case of a zero lower bound.
15232 We convert the low_bound to sizetype to avoid some problems
15233 with constant folding. (E.g. suppose the lower bound is 1,
15234 and its mode is QI. Without the conversion,l (ARRAY
15235 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15236 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15237 if (! integer_zerop (low_bound))
15238 index = size_diffop (index, fold_convert (sizetype, low_bound));
15240 string = exp1;
15243 if (string
15244 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15245 && TREE_CODE (string) == STRING_CST
15246 && TREE_CODE (index) == INTEGER_CST
15247 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15248 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15249 == MODE_INT)
15250 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15251 return build_int_cst_type (TREE_TYPE (exp),
15252 (TREE_STRING_POINTER (string)
15253 [TREE_INT_CST_LOW (index)]));
15255 return NULL;
15258 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15259 an integer constant, real, or fixed-point constant.
15261 TYPE is the type of the result. */
15263 static tree
15264 fold_negate_const (tree arg0, tree type)
15266 tree t = NULL_TREE;
15268 switch (TREE_CODE (arg0))
15270 case INTEGER_CST:
15272 unsigned HOST_WIDE_INT low;
15273 HOST_WIDE_INT high;
15274 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15275 TREE_INT_CST_HIGH (arg0),
15276 &low, &high);
15277 t = force_fit_type_double (type, low, high, 1,
15278 (overflow | TREE_OVERFLOW (arg0))
15279 && !TYPE_UNSIGNED (type));
15280 break;
15283 case REAL_CST:
15284 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15285 break;
15287 case FIXED_CST:
15289 FIXED_VALUE_TYPE f;
15290 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15291 &(TREE_FIXED_CST (arg0)), NULL,
15292 TYPE_SATURATING (type));
15293 t = build_fixed (type, f);
15294 /* Propagate overflow flags. */
15295 if (overflow_p | TREE_OVERFLOW (arg0))
15296 TREE_OVERFLOW (t) = 1;
15297 break;
15300 default:
15301 gcc_unreachable ();
15304 return t;
15307 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15308 an integer constant or real constant.
15310 TYPE is the type of the result. */
15312 tree
15313 fold_abs_const (tree arg0, tree type)
15315 tree t = NULL_TREE;
15317 switch (TREE_CODE (arg0))
15319 case INTEGER_CST:
15320 /* If the value is unsigned, then the absolute value is
15321 the same as the ordinary value. */
15322 if (TYPE_UNSIGNED (type))
15323 t = arg0;
15324 /* Similarly, if the value is non-negative. */
15325 else if (INT_CST_LT (integer_minus_one_node, arg0))
15326 t = arg0;
15327 /* If the value is negative, then the absolute value is
15328 its negation. */
15329 else
15331 unsigned HOST_WIDE_INT low;
15332 HOST_WIDE_INT high;
15333 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15334 TREE_INT_CST_HIGH (arg0),
15335 &low, &high);
15336 t = force_fit_type_double (type, low, high, -1,
15337 overflow | TREE_OVERFLOW (arg0));
15339 break;
15341 case REAL_CST:
15342 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15343 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15344 else
15345 t = arg0;
15346 break;
15348 default:
15349 gcc_unreachable ();
15352 return t;
15355 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15356 constant. TYPE is the type of the result. */
15358 static tree
15359 fold_not_const (tree arg0, tree type)
15361 tree t = NULL_TREE;
15363 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15365 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15366 ~TREE_INT_CST_HIGH (arg0), 0,
15367 TREE_OVERFLOW (arg0));
15369 return t;
15372 /* Given CODE, a relational operator, the target type, TYPE and two
15373 constant operands OP0 and OP1, return the result of the
15374 relational operation. If the result is not a compile time
15375 constant, then return NULL_TREE. */
15377 static tree
15378 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15380 int result, invert;
15382 /* From here on, the only cases we handle are when the result is
15383 known to be a constant. */
15385 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15387 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15388 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15390 /* Handle the cases where either operand is a NaN. */
15391 if (real_isnan (c0) || real_isnan (c1))
15393 switch (code)
15395 case EQ_EXPR:
15396 case ORDERED_EXPR:
15397 result = 0;
15398 break;
15400 case NE_EXPR:
15401 case UNORDERED_EXPR:
15402 case UNLT_EXPR:
15403 case UNLE_EXPR:
15404 case UNGT_EXPR:
15405 case UNGE_EXPR:
15406 case UNEQ_EXPR:
15407 result = 1;
15408 break;
15410 case LT_EXPR:
15411 case LE_EXPR:
15412 case GT_EXPR:
15413 case GE_EXPR:
15414 case LTGT_EXPR:
15415 if (flag_trapping_math)
15416 return NULL_TREE;
15417 result = 0;
15418 break;
15420 default:
15421 gcc_unreachable ();
15424 return constant_boolean_node (result, type);
15427 return constant_boolean_node (real_compare (code, c0, c1), type);
15430 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15432 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15433 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15434 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15437 /* Handle equality/inequality of complex constants. */
15438 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15440 tree rcond = fold_relational_const (code, type,
15441 TREE_REALPART (op0),
15442 TREE_REALPART (op1));
15443 tree icond = fold_relational_const (code, type,
15444 TREE_IMAGPART (op0),
15445 TREE_IMAGPART (op1));
15446 if (code == EQ_EXPR)
15447 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15448 else if (code == NE_EXPR)
15449 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15450 else
15451 return NULL_TREE;
15454 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15456 To compute GT, swap the arguments and do LT.
15457 To compute GE, do LT and invert the result.
15458 To compute LE, swap the arguments, do LT and invert the result.
15459 To compute NE, do EQ and invert the result.
15461 Therefore, the code below must handle only EQ and LT. */
15463 if (code == LE_EXPR || code == GT_EXPR)
15465 tree tem = op0;
15466 op0 = op1;
15467 op1 = tem;
15468 code = swap_tree_comparison (code);
15471 /* Note that it is safe to invert for real values here because we
15472 have already handled the one case that it matters. */
15474 invert = 0;
15475 if (code == NE_EXPR || code == GE_EXPR)
15477 invert = 1;
15478 code = invert_tree_comparison (code, false);
15481 /* Compute a result for LT or EQ if args permit;
15482 Otherwise return T. */
15483 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15485 if (code == EQ_EXPR)
15486 result = tree_int_cst_equal (op0, op1);
15487 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15488 result = INT_CST_LT_UNSIGNED (op0, op1);
15489 else
15490 result = INT_CST_LT (op0, op1);
15492 else
15493 return NULL_TREE;
15495 if (invert)
15496 result ^= 1;
15497 return constant_boolean_node (result, type);
15500 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15501 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15502 itself. */
15504 tree
15505 fold_build_cleanup_point_expr (tree type, tree expr)
15507 /* If the expression does not have side effects then we don't have to wrap
15508 it with a cleanup point expression. */
15509 if (!TREE_SIDE_EFFECTS (expr))
15510 return expr;
15512 /* If the expression is a return, check to see if the expression inside the
15513 return has no side effects or the right hand side of the modify expression
15514 inside the return. If either don't have side effects set we don't need to
15515 wrap the expression in a cleanup point expression. Note we don't check the
15516 left hand side of the modify because it should always be a return decl. */
15517 if (TREE_CODE (expr) == RETURN_EXPR)
15519 tree op = TREE_OPERAND (expr, 0);
15520 if (!op || !TREE_SIDE_EFFECTS (op))
15521 return expr;
15522 op = TREE_OPERAND (op, 1);
15523 if (!TREE_SIDE_EFFECTS (op))
15524 return expr;
15527 return build1 (CLEANUP_POINT_EXPR, type, expr);
15530 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15531 of an indirection through OP0, or NULL_TREE if no simplification is
15532 possible. */
15534 tree
15535 fold_indirect_ref_1 (tree type, tree op0)
15537 tree sub = op0;
15538 tree subtype;
15540 STRIP_NOPS (sub);
15541 subtype = TREE_TYPE (sub);
15542 if (!POINTER_TYPE_P (subtype))
15543 return NULL_TREE;
15545 if (TREE_CODE (sub) == ADDR_EXPR)
15547 tree op = TREE_OPERAND (sub, 0);
15548 tree optype = TREE_TYPE (op);
15549 /* *&CONST_DECL -> to the value of the const decl. */
15550 if (TREE_CODE (op) == CONST_DECL)
15551 return DECL_INITIAL (op);
15552 /* *&p => p; make sure to handle *&"str"[cst] here. */
15553 if (type == optype)
15555 tree fop = fold_read_from_constant_string (op);
15556 if (fop)
15557 return fop;
15558 else
15559 return op;
15561 /* *(foo *)&fooarray => fooarray[0] */
15562 else if (TREE_CODE (optype) == ARRAY_TYPE
15563 && type == TREE_TYPE (optype))
15565 tree type_domain = TYPE_DOMAIN (optype);
15566 tree min_val = size_zero_node;
15567 if (type_domain && TYPE_MIN_VALUE (type_domain))
15568 min_val = TYPE_MIN_VALUE (type_domain);
15569 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15571 /* *(foo *)&complexfoo => __real__ complexfoo */
15572 else if (TREE_CODE (optype) == COMPLEX_TYPE
15573 && type == TREE_TYPE (optype))
15574 return fold_build1 (REALPART_EXPR, type, op);
15575 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15576 else if (TREE_CODE (optype) == VECTOR_TYPE
15577 && type == TREE_TYPE (optype))
15579 tree part_width = TYPE_SIZE (type);
15580 tree index = bitsize_int (0);
15581 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15585 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15586 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15587 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15589 tree op00 = TREE_OPERAND (sub, 0);
15590 tree op01 = TREE_OPERAND (sub, 1);
15591 tree op00type;
15593 STRIP_NOPS (op00);
15594 op00type = TREE_TYPE (op00);
15595 if (TREE_CODE (op00) == ADDR_EXPR
15596 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15597 && type == TREE_TYPE (TREE_TYPE (op00type)))
15599 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15600 tree part_width = TYPE_SIZE (type);
15601 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15602 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15603 tree index = bitsize_int (indexi);
15605 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15606 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15607 part_width, index);
15613 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15614 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15615 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15617 tree op00 = TREE_OPERAND (sub, 0);
15618 tree op01 = TREE_OPERAND (sub, 1);
15619 tree op00type;
15621 STRIP_NOPS (op00);
15622 op00type = TREE_TYPE (op00);
15623 if (TREE_CODE (op00) == ADDR_EXPR
15624 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15625 && type == TREE_TYPE (TREE_TYPE (op00type)))
15627 tree size = TYPE_SIZE_UNIT (type);
15628 if (tree_int_cst_equal (size, op01))
15629 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15633 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15634 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15635 && type == TREE_TYPE (TREE_TYPE (subtype)))
15637 tree type_domain;
15638 tree min_val = size_zero_node;
15639 sub = build_fold_indirect_ref (sub);
15640 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15641 if (type_domain && TYPE_MIN_VALUE (type_domain))
15642 min_val = TYPE_MIN_VALUE (type_domain);
15643 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15646 return NULL_TREE;
15649 /* Builds an expression for an indirection through T, simplifying some
15650 cases. */
15652 tree
15653 build_fold_indirect_ref (tree t)
15655 tree type = TREE_TYPE (TREE_TYPE (t));
15656 tree sub = fold_indirect_ref_1 (type, t);
15658 if (sub)
15659 return sub;
15660 else
15661 return build1 (INDIRECT_REF, type, t);
15664 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15666 tree
15667 fold_indirect_ref (tree t)
15669 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15671 if (sub)
15672 return sub;
15673 else
15674 return t;
15677 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15678 whose result is ignored. The type of the returned tree need not be
15679 the same as the original expression. */
15681 tree
15682 fold_ignored_result (tree t)
15684 if (!TREE_SIDE_EFFECTS (t))
15685 return integer_zero_node;
15687 for (;;)
15688 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15690 case tcc_unary:
15691 t = TREE_OPERAND (t, 0);
15692 break;
15694 case tcc_binary:
15695 case tcc_comparison:
15696 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15697 t = TREE_OPERAND (t, 0);
15698 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15699 t = TREE_OPERAND (t, 1);
15700 else
15701 return t;
15702 break;
15704 case tcc_expression:
15705 switch (TREE_CODE (t))
15707 case COMPOUND_EXPR:
15708 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15709 return t;
15710 t = TREE_OPERAND (t, 0);
15711 break;
15713 case COND_EXPR:
15714 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15715 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15716 return t;
15717 t = TREE_OPERAND (t, 0);
15718 break;
15720 default:
15721 return t;
15723 break;
15725 default:
15726 return t;
15730 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15731 This can only be applied to objects of a sizetype. */
15733 tree
15734 round_up (tree value, int divisor)
15736 tree div = NULL_TREE;
15738 gcc_assert (divisor > 0);
15739 if (divisor == 1)
15740 return value;
15742 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15743 have to do anything. Only do this when we are not given a const,
15744 because in that case, this check is more expensive than just
15745 doing it. */
15746 if (TREE_CODE (value) != INTEGER_CST)
15748 div = build_int_cst (TREE_TYPE (value), divisor);
15750 if (multiple_of_p (TREE_TYPE (value), value, div))
15751 return value;
15754 /* If divisor is a power of two, simplify this to bit manipulation. */
15755 if (divisor == (divisor & -divisor))
15757 if (TREE_CODE (value) == INTEGER_CST)
15759 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15760 unsigned HOST_WIDE_INT high;
15761 bool overflow_p;
15763 if ((low & (divisor - 1)) == 0)
15764 return value;
15766 overflow_p = TREE_OVERFLOW (value);
15767 high = TREE_INT_CST_HIGH (value);
15768 low &= ~(divisor - 1);
15769 low += divisor;
15770 if (low == 0)
15772 high++;
15773 if (high == 0)
15774 overflow_p = true;
15777 return force_fit_type_double (TREE_TYPE (value), low, high,
15778 -1, overflow_p);
15780 else
15782 tree t;
15784 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15785 value = size_binop (PLUS_EXPR, value, t);
15786 t = build_int_cst (TREE_TYPE (value), -divisor);
15787 value = size_binop (BIT_AND_EXPR, value, t);
15790 else
15792 if (!div)
15793 div = build_int_cst (TREE_TYPE (value), divisor);
15794 value = size_binop (CEIL_DIV_EXPR, value, div);
15795 value = size_binop (MULT_EXPR, value, div);
15798 return value;
15801 /* Likewise, but round down. */
15803 tree
15804 round_down (tree value, int divisor)
15806 tree div = NULL_TREE;
15808 gcc_assert (divisor > 0);
15809 if (divisor == 1)
15810 return value;
15812 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15813 have to do anything. Only do this when we are not given a const,
15814 because in that case, this check is more expensive than just
15815 doing it. */
15816 if (TREE_CODE (value) != INTEGER_CST)
15818 div = build_int_cst (TREE_TYPE (value), divisor);
15820 if (multiple_of_p (TREE_TYPE (value), value, div))
15821 return value;
15824 /* If divisor is a power of two, simplify this to bit manipulation. */
15825 if (divisor == (divisor & -divisor))
15827 tree t;
15829 t = build_int_cst (TREE_TYPE (value), -divisor);
15830 value = size_binop (BIT_AND_EXPR, value, t);
15832 else
15834 if (!div)
15835 div = build_int_cst (TREE_TYPE (value), divisor);
15836 value = size_binop (FLOOR_DIV_EXPR, value, div);
15837 value = size_binop (MULT_EXPR, value, div);
15840 return value;
15843 /* Returns the pointer to the base of the object addressed by EXP and
15844 extracts the information about the offset of the access, storing it
15845 to PBITPOS and POFFSET. */
15847 static tree
15848 split_address_to_core_and_offset (tree exp,
15849 HOST_WIDE_INT *pbitpos, tree *poffset)
15851 tree core;
15852 enum machine_mode mode;
15853 int unsignedp, volatilep;
15854 HOST_WIDE_INT bitsize;
15856 if (TREE_CODE (exp) == ADDR_EXPR)
15858 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15859 poffset, &mode, &unsignedp, &volatilep,
15860 false);
15861 core = build_fold_addr_expr (core);
15863 else
15865 core = exp;
15866 *pbitpos = 0;
15867 *poffset = NULL_TREE;
15870 return core;
15873 /* Returns true if addresses of E1 and E2 differ by a constant, false
15874 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15876 bool
15877 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15879 tree core1, core2;
15880 HOST_WIDE_INT bitpos1, bitpos2;
15881 tree toffset1, toffset2, tdiff, type;
15883 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15884 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15886 if (bitpos1 % BITS_PER_UNIT != 0
15887 || bitpos2 % BITS_PER_UNIT != 0
15888 || !operand_equal_p (core1, core2, 0))
15889 return false;
15891 if (toffset1 && toffset2)
15893 type = TREE_TYPE (toffset1);
15894 if (type != TREE_TYPE (toffset2))
15895 toffset2 = fold_convert (type, toffset2);
15897 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15898 if (!cst_and_fits_in_hwi (tdiff))
15899 return false;
15901 *diff = int_cst_value (tdiff);
15903 else if (toffset1 || toffset2)
15905 /* If only one of the offsets is non-constant, the difference cannot
15906 be a constant. */
15907 return false;
15909 else
15910 *diff = 0;
15912 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15913 return true;
15916 /* Simplify the floating point expression EXP when the sign of the
15917 result is not significant. Return NULL_TREE if no simplification
15918 is possible. */
15920 tree
15921 fold_strip_sign_ops (tree exp)
15923 tree arg0, arg1;
15925 switch (TREE_CODE (exp))
15927 case ABS_EXPR:
15928 case NEGATE_EXPR:
15929 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15930 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15932 case MULT_EXPR:
15933 case RDIV_EXPR:
15934 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15935 return NULL_TREE;
15936 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15937 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15938 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15939 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15940 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15941 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15942 break;
15944 case COMPOUND_EXPR:
15945 arg0 = TREE_OPERAND (exp, 0);
15946 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15947 if (arg1)
15948 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15949 break;
15951 case COND_EXPR:
15952 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15953 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15954 if (arg0 || arg1)
15955 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15956 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15957 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15958 break;
15960 case CALL_EXPR:
15962 const enum built_in_function fcode = builtin_mathfn_code (exp);
15963 switch (fcode)
15965 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15966 /* Strip copysign function call, return the 1st argument. */
15967 arg0 = CALL_EXPR_ARG (exp, 0);
15968 arg1 = CALL_EXPR_ARG (exp, 1);
15969 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15971 default:
15972 /* Strip sign ops from the argument of "odd" math functions. */
15973 if (negate_mathfn_p (fcode))
15975 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15976 if (arg0)
15977 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
15979 break;
15982 break;
15984 default:
15985 break;
15987 return NULL_TREE;