2007-02-15 Andrew Pinski <andrew_pinski@playstation.sony.com>
[official-gcc.git] / gcc / fold-const.c
blob3c8636e8fc3a6e35003577dcd67a8d0bfd29143b
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type_double.
34 fold takes a tree as argument and returns a simplified tree.
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
43 force_fit_type_double takes a constant, an overflowable flag and a
44 prior overflow indicator. It forces the value to fit the type and
45 sets TREE_OVERFLOW.
47 Note: Since the folders get called on non-gimple code as well as
48 gimple code, we need to handle GIMPLE tuples as well as their
49 corresponding tree equivalents. */
51 #include "config.h"
52 #include "system.h"
53 #include "coretypes.h"
54 #include "tm.h"
55 #include "flags.h"
56 #include "tree.h"
57 #include "real.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "toplev.h"
62 #include "intl.h"
63 #include "ggc.h"
64 #include "hashtab.h"
65 #include "langhooks.h"
66 #include "md5.h"
68 /* Non-zero if we are folding constants inside an initializer; zero
69 otherwise. */
70 int folding_initializer = 0;
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code {
76 COMPCODE_FALSE = 0,
77 COMPCODE_LT = 1,
78 COMPCODE_EQ = 2,
79 COMPCODE_LE = 3,
80 COMPCODE_GT = 4,
81 COMPCODE_LTGT = 5,
82 COMPCODE_GE = 6,
83 COMPCODE_ORD = 7,
84 COMPCODE_UNORD = 8,
85 COMPCODE_UNLT = 9,
86 COMPCODE_UNEQ = 10,
87 COMPCODE_UNLE = 11,
88 COMPCODE_UNGT = 12,
89 COMPCODE_NE = 13,
90 COMPCODE_UNGE = 14,
91 COMPCODE_TRUE = 15
94 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
95 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
96 static bool negate_mathfn_p (enum built_in_function);
97 static bool negate_expr_p (tree);
98 static tree negate_expr (tree);
99 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
100 static tree associate_trees (tree, tree, enum tree_code, tree);
101 static tree const_binop (enum tree_code, tree, tree, int);
102 static enum comparison_code comparison_to_compcode (enum tree_code);
103 static enum tree_code compcode_to_comparison (enum comparison_code);
104 static tree combine_comparisons (enum tree_code, enum tree_code,
105 enum tree_code, tree, tree, tree);
106 static int truth_value_p (enum tree_code);
107 static int operand_equal_for_comparison_p (tree, tree, tree);
108 static int twoval_comparison_p (tree, tree *, tree *, int *);
109 static tree eval_subst (tree, tree, tree, tree, tree);
110 static tree pedantic_omit_one_operand (tree, tree, tree);
111 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
112 static tree make_bit_field_ref (tree, tree, int, int, int);
113 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
114 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
116 tree *, tree *);
117 static int all_ones_mask_p (tree, int);
118 static tree sign_bit_p (tree, tree);
119 static int simple_operand_p (tree);
120 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
121 static tree range_predecessor (tree);
122 static tree range_successor (tree);
123 static tree make_range (tree, int *, tree *, tree *, bool *);
124 static tree build_range_check (tree, tree, int, tree, tree);
125 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
126 tree);
127 static tree fold_range_test (enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree fold_truthop (enum tree_code, tree, tree, tree);
131 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static int multiple_of_p (tree, tree, tree);
135 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
136 tree, tree,
137 tree, tree, int);
138 static bool fold_real_zero_addition_p (tree, tree, int);
139 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
140 tree, tree, tree);
141 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
142 static tree fold_div_compare (enum tree_code, tree, tree, tree);
143 static bool reorder_operands_p (tree, tree);
144 static tree fold_negate_const (tree, tree);
145 static tree fold_not_const (tree, tree);
146 static tree fold_relational_const (enum tree_code, tree, tree, tree);
147 static int native_encode_expr (tree, unsigned char *, int);
148 static tree native_interpret_expr (tree, unsigned char *, int);
151 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
152 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
153 and SUM1. Then this yields nonzero if overflow occurred during the
154 addition.
156 Overflow occurs if A and B have the same sign, but A and SUM differ in
157 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
158 sign. */
159 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
161 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
162 We do that by representing the two-word integer in 4 words, with only
163 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
164 number. The value of the word is LOWPART + HIGHPART * BASE. */
166 #define LOWPART(x) \
167 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
168 #define HIGHPART(x) \
169 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
170 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
172 /* Unpack a two-word integer into 4 words.
173 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
174 WORDS points to the array of HOST_WIDE_INTs. */
176 static void
177 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
179 words[0] = LOWPART (low);
180 words[1] = HIGHPART (low);
181 words[2] = LOWPART (hi);
182 words[3] = HIGHPART (hi);
185 /* Pack an array of 4 words into a two-word integer.
186 WORDS points to the array of words.
187 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
189 static void
190 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
191 HOST_WIDE_INT *hi)
193 *low = words[0] + words[1] * BASE;
194 *hi = words[2] + words[3] * BASE;
197 /* Force the double-word integer L1, H1 to be within the range of the
198 integer type TYPE. Stores the properly truncated and sign-extended
199 double-word integer in *LV, *HV. Returns true if the operation
200 overflows, that is, argument and result are different. */
203 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
204 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
206 unsigned HOST_WIDE_INT low0 = l1;
207 HOST_WIDE_INT high0 = h1;
208 unsigned int prec;
209 int sign_extended_type;
211 if (POINTER_TYPE_P (type)
212 || TREE_CODE (type) == OFFSET_TYPE)
213 prec = POINTER_SIZE;
214 else
215 prec = TYPE_PRECISION (type);
217 /* Size types *are* sign extended. */
218 sign_extended_type = (!TYPE_UNSIGNED (type)
219 || (TREE_CODE (type) == INTEGER_TYPE
220 && TYPE_IS_SIZETYPE (type)));
222 /* First clear all bits that are beyond the type's precision. */
223 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
227 else
229 h1 = 0;
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
234 /* Then do sign extension if necessary. */
235 if (!sign_extended_type)
236 /* No sign extension */;
237 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
238 /* Correct width already. */;
239 else if (prec > HOST_BITS_PER_WIDE_INT)
241 /* Sign extend top half? */
242 if (h1 & ((unsigned HOST_WIDE_INT)1
243 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
244 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
246 else if (prec == HOST_BITS_PER_WIDE_INT)
248 if ((HOST_WIDE_INT)l1 < 0)
249 h1 = -1;
251 else
253 /* Sign extend bottom half? */
254 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
256 h1 = -1;
257 l1 |= (HOST_WIDE_INT)(-1) << prec;
261 *lv = l1;
262 *hv = h1;
264 /* If the value didn't fit, signal overflow. */
265 return l1 != low0 || h1 != high0;
268 /* We force the double-int HIGH:LOW to the range of the type TYPE by
269 sign or zero extending it.
270 OVERFLOWABLE indicates if we are interested
271 in overflow of the value, when >0 we are only interested in signed
272 overflow, for <0 we are interested in any overflow. OVERFLOWED
273 indicates whether overflow has already occurred. CONST_OVERFLOWED
274 indicates whether constant overflow has already occurred. We force
275 T's value to be within range of T's type (by setting to 0 or 1 all
276 the bits outside the type's range). We set TREE_OVERFLOWED if,
277 OVERFLOWED is nonzero,
278 or OVERFLOWABLE is >0 and signed overflow occurs
279 or OVERFLOWABLE is <0 and any overflow occurs
280 We return a new tree node for the extended double-int. The node
281 is shared if no overflow flags are set. */
283 tree
284 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
285 HOST_WIDE_INT high, int overflowable,
286 bool overflowed)
288 int sign_extended_type;
289 bool overflow;
291 /* Size types *are* sign extended. */
292 sign_extended_type = (!TYPE_UNSIGNED (type)
293 || (TREE_CODE (type) == INTEGER_TYPE
294 && TYPE_IS_SIZETYPE (type)));
296 overflow = fit_double_type (low, high, &low, &high, type);
298 /* If we need to set overflow flags, return a new unshared node. */
299 if (overflowed || overflow)
301 if (overflowed
302 || overflowable < 0
303 || (overflowable > 0 && sign_extended_type))
305 tree t = make_node (INTEGER_CST);
306 TREE_INT_CST_LOW (t) = low;
307 TREE_INT_CST_HIGH (t) = high;
308 TREE_TYPE (t) = type;
309 TREE_OVERFLOW (t) = 1;
310 return t;
314 /* Else build a shared node. */
315 return build_int_cst_wide (type, low, high);
318 /* Add two doubleword integers with doubleword result.
319 Return nonzero if the operation overflows according to UNSIGNED_P.
320 Each argument is given as two `HOST_WIDE_INT' pieces.
321 One argument is L1 and H1; the other, L2 and H2.
322 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
325 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
326 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
327 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
328 bool unsigned_p)
330 unsigned HOST_WIDE_INT l;
331 HOST_WIDE_INT h;
333 l = l1 + l2;
334 h = h1 + h2 + (l < l1);
336 *lv = l;
337 *hv = h;
339 if (unsigned_p)
340 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
341 else
342 return OVERFLOW_SUM_SIGN (h1, h2, h);
345 /* Negate a doubleword integer with doubleword result.
346 Return nonzero if the operation overflows, assuming it's signed.
347 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
348 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
351 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
352 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
354 if (l1 == 0)
356 *lv = 0;
357 *hv = - h1;
358 return (*hv & h1) < 0;
360 else
362 *lv = -l1;
363 *hv = ~h1;
364 return 0;
368 /* Multiply two doubleword integers with doubleword result.
369 Return nonzero if the operation overflows according to UNSIGNED_P.
370 Each argument is given as two `HOST_WIDE_INT' pieces.
371 One argument is L1 and H1; the other, L2 and H2.
372 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
375 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
376 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
377 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
378 bool unsigned_p)
380 HOST_WIDE_INT arg1[4];
381 HOST_WIDE_INT arg2[4];
382 HOST_WIDE_INT prod[4 * 2];
383 unsigned HOST_WIDE_INT carry;
384 int i, j, k;
385 unsigned HOST_WIDE_INT toplow, neglow;
386 HOST_WIDE_INT tophigh, neghigh;
388 encode (arg1, l1, h1);
389 encode (arg2, l2, h2);
391 memset (prod, 0, sizeof prod);
393 for (i = 0; i < 4; i++)
395 carry = 0;
396 for (j = 0; j < 4; j++)
398 k = i + j;
399 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
400 carry += arg1[i] * arg2[j];
401 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
402 carry += prod[k];
403 prod[k] = LOWPART (carry);
404 carry = HIGHPART (carry);
406 prod[i + 4] = carry;
409 decode (prod, lv, hv);
410 decode (prod + 4, &toplow, &tophigh);
412 /* Unsigned overflow is immediate. */
413 if (unsigned_p)
414 return (toplow | tophigh) != 0;
416 /* Check for signed overflow by calculating the signed representation of the
417 top half of the result; it should agree with the low half's sign bit. */
418 if (h1 < 0)
420 neg_double (l2, h2, &neglow, &neghigh);
421 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
423 if (h2 < 0)
425 neg_double (l1, h1, &neglow, &neghigh);
426 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
428 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
431 /* Shift the doubleword integer in L1, H1 left by COUNT places
432 keeping only PREC bits of result.
433 Shift right if COUNT is negative.
434 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
435 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
437 void
438 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
439 HOST_WIDE_INT count, unsigned int prec,
440 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
442 unsigned HOST_WIDE_INT signmask;
444 if (count < 0)
446 rshift_double (l1, h1, -count, prec, lv, hv, arith);
447 return;
450 if (SHIFT_COUNT_TRUNCATED)
451 count %= prec;
453 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
455 /* Shifting by the host word size is undefined according to the
456 ANSI standard, so we must handle this as a special case. */
457 *hv = 0;
458 *lv = 0;
460 else if (count >= HOST_BITS_PER_WIDE_INT)
462 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
463 *lv = 0;
465 else
467 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
468 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
469 *lv = l1 << count;
472 /* Sign extend all bits that are beyond the precision. */
474 signmask = -((prec > HOST_BITS_PER_WIDE_INT
475 ? ((unsigned HOST_WIDE_INT) *hv
476 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
477 : (*lv >> (prec - 1))) & 1);
479 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
481 else if (prec >= HOST_BITS_PER_WIDE_INT)
483 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
484 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
486 else
488 *hv = signmask;
489 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
490 *lv |= signmask << prec;
494 /* Shift the doubleword integer in L1, H1 right by COUNT places
495 keeping only PREC bits of result. COUNT must be positive.
496 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
497 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
499 void
500 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
501 HOST_WIDE_INT count, unsigned int prec,
502 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
503 int arith)
505 unsigned HOST_WIDE_INT signmask;
507 signmask = (arith
508 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
509 : 0);
511 if (SHIFT_COUNT_TRUNCATED)
512 count %= prec;
514 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
516 /* Shifting by the host word size is undefined according to the
517 ANSI standard, so we must handle this as a special case. */
518 *hv = 0;
519 *lv = 0;
521 else if (count >= HOST_BITS_PER_WIDE_INT)
523 *hv = 0;
524 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
526 else
528 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
529 *lv = ((l1 >> count)
530 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
533 /* Zero / sign extend all bits that are beyond the precision. */
535 if (count >= (HOST_WIDE_INT)prec)
537 *hv = signmask;
538 *lv = signmask;
540 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
542 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
544 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
545 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
547 else
549 *hv = signmask;
550 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
551 *lv |= signmask << (prec - count);
555 /* Rotate the doubleword integer in L1, H1 left by COUNT places
556 keeping only PREC bits of result.
557 Rotate right if COUNT is negative.
558 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
560 void
561 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
562 HOST_WIDE_INT count, unsigned int prec,
563 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
565 unsigned HOST_WIDE_INT s1l, s2l;
566 HOST_WIDE_INT s1h, s2h;
568 count %= prec;
569 if (count < 0)
570 count += prec;
572 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
573 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
574 *lv = s1l | s2l;
575 *hv = s1h | s2h;
578 /* Rotate the doubleword integer in L1, H1 left by COUNT places
579 keeping only PREC bits of result. COUNT must be positive.
580 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
582 void
583 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
584 HOST_WIDE_INT count, unsigned int prec,
585 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
587 unsigned HOST_WIDE_INT s1l, s2l;
588 HOST_WIDE_INT s1h, s2h;
590 count %= prec;
591 if (count < 0)
592 count += prec;
594 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
595 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
596 *lv = s1l | s2l;
597 *hv = s1h | s2h;
600 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
601 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
602 CODE is a tree code for a kind of division, one of
603 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
604 or EXACT_DIV_EXPR
605 It controls how the quotient is rounded to an integer.
606 Return nonzero if the operation overflows.
607 UNS nonzero says do unsigned division. */
610 div_and_round_double (enum tree_code code, int uns,
611 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
612 HOST_WIDE_INT hnum_orig,
613 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
614 HOST_WIDE_INT hden_orig,
615 unsigned HOST_WIDE_INT *lquo,
616 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
617 HOST_WIDE_INT *hrem)
619 int quo_neg = 0;
620 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
621 HOST_WIDE_INT den[4], quo[4];
622 int i, j;
623 unsigned HOST_WIDE_INT work;
624 unsigned HOST_WIDE_INT carry = 0;
625 unsigned HOST_WIDE_INT lnum = lnum_orig;
626 HOST_WIDE_INT hnum = hnum_orig;
627 unsigned HOST_WIDE_INT lden = lden_orig;
628 HOST_WIDE_INT hden = hden_orig;
629 int overflow = 0;
631 if (hden == 0 && lden == 0)
632 overflow = 1, lden = 1;
634 /* Calculate quotient sign and convert operands to unsigned. */
635 if (!uns)
637 if (hnum < 0)
639 quo_neg = ~ quo_neg;
640 /* (minimum integer) / (-1) is the only overflow case. */
641 if (neg_double (lnum, hnum, &lnum, &hnum)
642 && ((HOST_WIDE_INT) lden & hden) == -1)
643 overflow = 1;
645 if (hden < 0)
647 quo_neg = ~ quo_neg;
648 neg_double (lden, hden, &lden, &hden);
652 if (hnum == 0 && hden == 0)
653 { /* single precision */
654 *hquo = *hrem = 0;
655 /* This unsigned division rounds toward zero. */
656 *lquo = lnum / lden;
657 goto finish_up;
660 if (hnum == 0)
661 { /* trivial case: dividend < divisor */
662 /* hden != 0 already checked. */
663 *hquo = *lquo = 0;
664 *hrem = hnum;
665 *lrem = lnum;
666 goto finish_up;
669 memset (quo, 0, sizeof quo);
671 memset (num, 0, sizeof num); /* to zero 9th element */
672 memset (den, 0, sizeof den);
674 encode (num, lnum, hnum);
675 encode (den, lden, hden);
677 /* Special code for when the divisor < BASE. */
678 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
680 /* hnum != 0 already checked. */
681 for (i = 4 - 1; i >= 0; i--)
683 work = num[i] + carry * BASE;
684 quo[i] = work / lden;
685 carry = work % lden;
688 else
690 /* Full double precision division,
691 with thanks to Don Knuth's "Seminumerical Algorithms". */
692 int num_hi_sig, den_hi_sig;
693 unsigned HOST_WIDE_INT quo_est, scale;
695 /* Find the highest nonzero divisor digit. */
696 for (i = 4 - 1;; i--)
697 if (den[i] != 0)
699 den_hi_sig = i;
700 break;
703 /* Insure that the first digit of the divisor is at least BASE/2.
704 This is required by the quotient digit estimation algorithm. */
706 scale = BASE / (den[den_hi_sig] + 1);
707 if (scale > 1)
708 { /* scale divisor and dividend */
709 carry = 0;
710 for (i = 0; i <= 4 - 1; i++)
712 work = (num[i] * scale) + carry;
713 num[i] = LOWPART (work);
714 carry = HIGHPART (work);
717 num[4] = carry;
718 carry = 0;
719 for (i = 0; i <= 4 - 1; i++)
721 work = (den[i] * scale) + carry;
722 den[i] = LOWPART (work);
723 carry = HIGHPART (work);
724 if (den[i] != 0) den_hi_sig = i;
728 num_hi_sig = 4;
730 /* Main loop */
731 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
733 /* Guess the next quotient digit, quo_est, by dividing the first
734 two remaining dividend digits by the high order quotient digit.
735 quo_est is never low and is at most 2 high. */
736 unsigned HOST_WIDE_INT tmp;
738 num_hi_sig = i + den_hi_sig + 1;
739 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
740 if (num[num_hi_sig] != den[den_hi_sig])
741 quo_est = work / den[den_hi_sig];
742 else
743 quo_est = BASE - 1;
745 /* Refine quo_est so it's usually correct, and at most one high. */
746 tmp = work - quo_est * den[den_hi_sig];
747 if (tmp < BASE
748 && (den[den_hi_sig - 1] * quo_est
749 > (tmp * BASE + num[num_hi_sig - 2])))
750 quo_est--;
752 /* Try QUO_EST as the quotient digit, by multiplying the
753 divisor by QUO_EST and subtracting from the remaining dividend.
754 Keep in mind that QUO_EST is the I - 1st digit. */
756 carry = 0;
757 for (j = 0; j <= den_hi_sig; j++)
759 work = quo_est * den[j] + carry;
760 carry = HIGHPART (work);
761 work = num[i + j] - LOWPART (work);
762 num[i + j] = LOWPART (work);
763 carry += HIGHPART (work) != 0;
766 /* If quo_est was high by one, then num[i] went negative and
767 we need to correct things. */
768 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
770 quo_est--;
771 carry = 0; /* add divisor back in */
772 for (j = 0; j <= den_hi_sig; j++)
774 work = num[i + j] + den[j] + carry;
775 carry = HIGHPART (work);
776 num[i + j] = LOWPART (work);
779 num [num_hi_sig] += carry;
782 /* Store the quotient digit. */
783 quo[i] = quo_est;
787 decode (quo, lquo, hquo);
789 finish_up:
790 /* If result is negative, make it so. */
791 if (quo_neg)
792 neg_double (*lquo, *hquo, lquo, hquo);
794 /* Compute trial remainder: rem = num - (quo * den) */
795 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
796 neg_double (*lrem, *hrem, lrem, hrem);
797 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
799 switch (code)
801 case TRUNC_DIV_EXPR:
802 case TRUNC_MOD_EXPR: /* round toward zero */
803 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
804 return overflow;
806 case FLOOR_DIV_EXPR:
807 case FLOOR_MOD_EXPR: /* round toward negative infinity */
808 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
810 /* quo = quo - 1; */
811 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
812 lquo, hquo);
814 else
815 return overflow;
816 break;
818 case CEIL_DIV_EXPR:
819 case CEIL_MOD_EXPR: /* round toward positive infinity */
820 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
822 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
823 lquo, hquo);
825 else
826 return overflow;
827 break;
829 case ROUND_DIV_EXPR:
830 case ROUND_MOD_EXPR: /* round to closest integer */
832 unsigned HOST_WIDE_INT labs_rem = *lrem;
833 HOST_WIDE_INT habs_rem = *hrem;
834 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
835 HOST_WIDE_INT habs_den = hden, htwice;
837 /* Get absolute values. */
838 if (*hrem < 0)
839 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
840 if (hden < 0)
841 neg_double (lden, hden, &labs_den, &habs_den);
843 /* If (2 * abs (lrem) >= abs (lden)) */
844 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
845 labs_rem, habs_rem, &ltwice, &htwice);
847 if (((unsigned HOST_WIDE_INT) habs_den
848 < (unsigned HOST_WIDE_INT) htwice)
849 || (((unsigned HOST_WIDE_INT) habs_den
850 == (unsigned HOST_WIDE_INT) htwice)
851 && (labs_den < ltwice)))
853 if (*hquo < 0)
854 /* quo = quo - 1; */
855 add_double (*lquo, *hquo,
856 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
857 else
858 /* quo = quo + 1; */
859 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
860 lquo, hquo);
862 else
863 return overflow;
865 break;
867 default:
868 gcc_unreachable ();
871 /* Compute true remainder: rem = num - (quo * den) */
872 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
873 neg_double (*lrem, *hrem, lrem, hrem);
874 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
875 return overflow;
878 /* If ARG2 divides ARG1 with zero remainder, carries out the division
879 of type CODE and returns the quotient.
880 Otherwise returns NULL_TREE. */
882 static tree
883 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
885 unsigned HOST_WIDE_INT int1l, int2l;
886 HOST_WIDE_INT int1h, int2h;
887 unsigned HOST_WIDE_INT quol, reml;
888 HOST_WIDE_INT quoh, remh;
889 tree type = TREE_TYPE (arg1);
890 int uns = TYPE_UNSIGNED (type);
892 int1l = TREE_INT_CST_LOW (arg1);
893 int1h = TREE_INT_CST_HIGH (arg1);
894 int2l = TREE_INT_CST_LOW (arg2);
895 int2h = TREE_INT_CST_HIGH (arg2);
897 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
898 &quol, &quoh, &reml, &remh);
899 if (remh != 0 || reml != 0)
900 return NULL_TREE;
902 return build_int_cst_wide (type, quol, quoh);
905 /* This is non-zero if we should defer warnings about undefined
906 overflow. This facility exists because these warnings are a
907 special case. The code to estimate loop iterations does not want
908 to issue any warnings, since it works with expressions which do not
909 occur in user code. Various bits of cleanup code call fold(), but
910 only use the result if it has certain characteristics (e.g., is a
911 constant); that code only wants to issue a warning if the result is
912 used. */
914 static int fold_deferring_overflow_warnings;
916 /* If a warning about undefined overflow is deferred, this is the
917 warning. Note that this may cause us to turn two warnings into
918 one, but that is fine since it is sufficient to only give one
919 warning per expression. */
921 static const char* fold_deferred_overflow_warning;
923 /* If a warning about undefined overflow is deferred, this is the
924 level at which the warning should be emitted. */
926 static enum warn_strict_overflow_code fold_deferred_overflow_code;
928 /* Start deferring overflow warnings. We could use a stack here to
929 permit nested calls, but at present it is not necessary. */
931 void
932 fold_defer_overflow_warnings (void)
934 ++fold_deferring_overflow_warnings;
937 /* Stop deferring overflow warnings. If there is a pending warning,
938 and ISSUE is true, then issue the warning if appropriate. STMT is
939 the statement with which the warning should be associated (used for
940 location information); STMT may be NULL. CODE is the level of the
941 warning--a warn_strict_overflow_code value. This function will use
942 the smaller of CODE and the deferred code when deciding whether to
943 issue the warning. CODE may be zero to mean to always use the
944 deferred code. */
946 void
947 fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
949 const char *warnmsg;
950 location_t locus;
952 gcc_assert (fold_deferring_overflow_warnings > 0);
953 --fold_deferring_overflow_warnings;
954 if (fold_deferring_overflow_warnings > 0)
956 if (fold_deferred_overflow_warning != NULL
957 && code != 0
958 && code < (int) fold_deferred_overflow_code)
959 fold_deferred_overflow_code = code;
960 return;
963 warnmsg = fold_deferred_overflow_warning;
964 fold_deferred_overflow_warning = NULL;
966 if (!issue || warnmsg == NULL)
967 return;
969 /* Use the smallest code level when deciding to issue the
970 warning. */
971 if (code == 0 || code > (int) fold_deferred_overflow_code)
972 code = fold_deferred_overflow_code;
974 if (!issue_strict_overflow_warning (code))
975 return;
977 if (stmt == NULL_TREE || !expr_has_location (stmt))
978 locus = input_location;
979 else
980 locus = expr_location (stmt);
981 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
984 /* Stop deferring overflow warnings, ignoring any deferred
985 warnings. */
987 void
988 fold_undefer_and_ignore_overflow_warnings (void)
990 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
993 /* Whether we are deferring overflow warnings. */
995 bool
996 fold_deferring_overflow_warnings_p (void)
998 return fold_deferring_overflow_warnings > 0;
1001 /* This is called when we fold something based on the fact that signed
1002 overflow is undefined. */
1004 static void
1005 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1007 gcc_assert (!flag_wrapv && !flag_trapv);
1008 if (fold_deferring_overflow_warnings > 0)
1010 if (fold_deferred_overflow_warning == NULL
1011 || wc < fold_deferred_overflow_code)
1013 fold_deferred_overflow_warning = gmsgid;
1014 fold_deferred_overflow_code = wc;
1017 else if (issue_strict_overflow_warning (wc))
1018 warning (OPT_Wstrict_overflow, gmsgid);
1021 /* Return true if the built-in mathematical function specified by CODE
1022 is odd, i.e. -f(x) == f(-x). */
1024 static bool
1025 negate_mathfn_p (enum built_in_function code)
1027 switch (code)
1029 CASE_FLT_FN (BUILT_IN_ASIN):
1030 CASE_FLT_FN (BUILT_IN_ASINH):
1031 CASE_FLT_FN (BUILT_IN_ATAN):
1032 CASE_FLT_FN (BUILT_IN_ATANH):
1033 CASE_FLT_FN (BUILT_IN_CASIN):
1034 CASE_FLT_FN (BUILT_IN_CASINH):
1035 CASE_FLT_FN (BUILT_IN_CATAN):
1036 CASE_FLT_FN (BUILT_IN_CATANH):
1037 CASE_FLT_FN (BUILT_IN_CBRT):
1038 CASE_FLT_FN (BUILT_IN_CPROJ):
1039 CASE_FLT_FN (BUILT_IN_CSIN):
1040 CASE_FLT_FN (BUILT_IN_CSINH):
1041 CASE_FLT_FN (BUILT_IN_CTAN):
1042 CASE_FLT_FN (BUILT_IN_CTANH):
1043 CASE_FLT_FN (BUILT_IN_ERF):
1044 CASE_FLT_FN (BUILT_IN_LLROUND):
1045 CASE_FLT_FN (BUILT_IN_LROUND):
1046 CASE_FLT_FN (BUILT_IN_ROUND):
1047 CASE_FLT_FN (BUILT_IN_SIN):
1048 CASE_FLT_FN (BUILT_IN_SINH):
1049 CASE_FLT_FN (BUILT_IN_TAN):
1050 CASE_FLT_FN (BUILT_IN_TANH):
1051 CASE_FLT_FN (BUILT_IN_TRUNC):
1052 return true;
1054 CASE_FLT_FN (BUILT_IN_LLRINT):
1055 CASE_FLT_FN (BUILT_IN_LRINT):
1056 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1057 CASE_FLT_FN (BUILT_IN_RINT):
1058 return !flag_rounding_math;
1060 default:
1061 break;
1063 return false;
1066 /* Check whether we may negate an integer constant T without causing
1067 overflow. */
1069 bool
1070 may_negate_without_overflow_p (tree t)
1072 unsigned HOST_WIDE_INT val;
1073 unsigned int prec;
1074 tree type;
1076 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1078 type = TREE_TYPE (t);
1079 if (TYPE_UNSIGNED (type))
1080 return false;
1082 prec = TYPE_PRECISION (type);
1083 if (prec > HOST_BITS_PER_WIDE_INT)
1085 if (TREE_INT_CST_LOW (t) != 0)
1086 return true;
1087 prec -= HOST_BITS_PER_WIDE_INT;
1088 val = TREE_INT_CST_HIGH (t);
1090 else
1091 val = TREE_INT_CST_LOW (t);
1092 if (prec < HOST_BITS_PER_WIDE_INT)
1093 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1094 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1097 /* Determine whether an expression T can be cheaply negated using
1098 the function negate_expr without introducing undefined overflow. */
1100 static bool
1101 negate_expr_p (tree t)
1103 tree type;
1105 if (t == 0)
1106 return false;
1108 type = TREE_TYPE (t);
1110 STRIP_SIGN_NOPS (t);
1111 switch (TREE_CODE (t))
1113 case INTEGER_CST:
1114 if (TYPE_OVERFLOW_WRAPS (type))
1115 return true;
1117 /* Check that -CST will not overflow type. */
1118 return may_negate_without_overflow_p (t);
1119 case BIT_NOT_EXPR:
1120 return (INTEGRAL_TYPE_P (type)
1121 && TYPE_OVERFLOW_WRAPS (type));
1123 case REAL_CST:
1124 case NEGATE_EXPR:
1125 return true;
1127 case COMPLEX_CST:
1128 return negate_expr_p (TREE_REALPART (t))
1129 && negate_expr_p (TREE_IMAGPART (t));
1131 case COMPLEX_EXPR:
1132 return negate_expr_p (TREE_OPERAND (t, 0))
1133 && negate_expr_p (TREE_OPERAND (t, 1));
1135 case CONJ_EXPR:
1136 return negate_expr_p (TREE_OPERAND (t, 0));
1138 case PLUS_EXPR:
1139 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1140 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1141 return false;
1142 /* -(A + B) -> (-B) - A. */
1143 if (negate_expr_p (TREE_OPERAND (t, 1))
1144 && reorder_operands_p (TREE_OPERAND (t, 0),
1145 TREE_OPERAND (t, 1)))
1146 return true;
1147 /* -(A + B) -> (-A) - B. */
1148 return negate_expr_p (TREE_OPERAND (t, 0));
1150 case MINUS_EXPR:
1151 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1152 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1153 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1154 && reorder_operands_p (TREE_OPERAND (t, 0),
1155 TREE_OPERAND (t, 1));
1157 case MULT_EXPR:
1158 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1159 break;
1161 /* Fall through. */
1163 case RDIV_EXPR:
1164 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1165 return negate_expr_p (TREE_OPERAND (t, 1))
1166 || negate_expr_p (TREE_OPERAND (t, 0));
1167 break;
1169 case TRUNC_DIV_EXPR:
1170 case ROUND_DIV_EXPR:
1171 case FLOOR_DIV_EXPR:
1172 case CEIL_DIV_EXPR:
1173 case EXACT_DIV_EXPR:
1174 /* In general we can't negate A / B, because if A is INT_MIN and
1175 B is 1, we may turn this into INT_MIN / -1 which is undefined
1176 and actually traps on some architectures. But if overflow is
1177 undefined, we can negate, because - (INT_MIN / 1) is an
1178 overflow. */
1179 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1180 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1181 break;
1182 return negate_expr_p (TREE_OPERAND (t, 1))
1183 || negate_expr_p (TREE_OPERAND (t, 0));
1185 case NOP_EXPR:
1186 /* Negate -((double)float) as (double)(-float). */
1187 if (TREE_CODE (type) == REAL_TYPE)
1189 tree tem = strip_float_extensions (t);
1190 if (tem != t)
1191 return negate_expr_p (tem);
1193 break;
1195 case CALL_EXPR:
1196 /* Negate -f(x) as f(-x). */
1197 if (negate_mathfn_p (builtin_mathfn_code (t)))
1198 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1199 break;
1201 case RSHIFT_EXPR:
1202 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1203 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1205 tree op1 = TREE_OPERAND (t, 1);
1206 if (TREE_INT_CST_HIGH (op1) == 0
1207 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1208 == TREE_INT_CST_LOW (op1))
1209 return true;
1211 break;
1213 default:
1214 break;
1216 return false;
1219 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1220 simplification is possible.
1221 If negate_expr_p would return true for T, NULL_TREE will never be
1222 returned. */
1224 static tree
1225 fold_negate_expr (tree t)
1227 tree type = TREE_TYPE (t);
1228 tree tem;
1230 switch (TREE_CODE (t))
1232 /* Convert - (~A) to A + 1. */
1233 case BIT_NOT_EXPR:
1234 if (INTEGRAL_TYPE_P (type))
1235 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1236 build_int_cst (type, 1));
1237 break;
1239 case INTEGER_CST:
1240 tem = fold_negate_const (t, type);
1241 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1242 || !TYPE_OVERFLOW_TRAPS (type))
1243 return tem;
1244 break;
1246 case REAL_CST:
1247 tem = fold_negate_const (t, type);
1248 /* Two's complement FP formats, such as c4x, may overflow. */
1249 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1250 return tem;
1251 break;
1253 case COMPLEX_CST:
1255 tree rpart = negate_expr (TREE_REALPART (t));
1256 tree ipart = negate_expr (TREE_IMAGPART (t));
1258 if ((TREE_CODE (rpart) == REAL_CST
1259 && TREE_CODE (ipart) == REAL_CST)
1260 || (TREE_CODE (rpart) == INTEGER_CST
1261 && TREE_CODE (ipart) == INTEGER_CST))
1262 return build_complex (type, rpart, ipart);
1264 break;
1266 case COMPLEX_EXPR:
1267 if (negate_expr_p (t))
1268 return fold_build2 (COMPLEX_EXPR, type,
1269 fold_negate_expr (TREE_OPERAND (t, 0)),
1270 fold_negate_expr (TREE_OPERAND (t, 1)));
1271 break;
1273 case CONJ_EXPR:
1274 if (negate_expr_p (t))
1275 return fold_build1 (CONJ_EXPR, type,
1276 fold_negate_expr (TREE_OPERAND (t, 0)));
1277 break;
1279 case NEGATE_EXPR:
1280 return TREE_OPERAND (t, 0);
1282 case PLUS_EXPR:
1283 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1284 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1286 /* -(A + B) -> (-B) - A. */
1287 if (negate_expr_p (TREE_OPERAND (t, 1))
1288 && reorder_operands_p (TREE_OPERAND (t, 0),
1289 TREE_OPERAND (t, 1)))
1291 tem = negate_expr (TREE_OPERAND (t, 1));
1292 return fold_build2 (MINUS_EXPR, type,
1293 tem, TREE_OPERAND (t, 0));
1296 /* -(A + B) -> (-A) - B. */
1297 if (negate_expr_p (TREE_OPERAND (t, 0)))
1299 tem = negate_expr (TREE_OPERAND (t, 0));
1300 return fold_build2 (MINUS_EXPR, type,
1301 tem, TREE_OPERAND (t, 1));
1304 break;
1306 case MINUS_EXPR:
1307 /* - (A - B) -> B - A */
1308 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1309 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1310 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1311 return fold_build2 (MINUS_EXPR, type,
1312 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1313 break;
1315 case MULT_EXPR:
1316 if (TYPE_UNSIGNED (type))
1317 break;
1319 /* Fall through. */
1321 case RDIV_EXPR:
1322 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1324 tem = TREE_OPERAND (t, 1);
1325 if (negate_expr_p (tem))
1326 return fold_build2 (TREE_CODE (t), type,
1327 TREE_OPERAND (t, 0), negate_expr (tem));
1328 tem = TREE_OPERAND (t, 0);
1329 if (negate_expr_p (tem))
1330 return fold_build2 (TREE_CODE (t), type,
1331 negate_expr (tem), TREE_OPERAND (t, 1));
1333 break;
1335 case TRUNC_DIV_EXPR:
1336 case ROUND_DIV_EXPR:
1337 case FLOOR_DIV_EXPR:
1338 case CEIL_DIV_EXPR:
1339 case EXACT_DIV_EXPR:
1340 /* In general we can't negate A / B, because if A is INT_MIN and
1341 B is 1, we may turn this into INT_MIN / -1 which is undefined
1342 and actually traps on some architectures. But if overflow is
1343 undefined, we can negate, because - (INT_MIN / 1) is an
1344 overflow. */
1345 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1347 const char * const warnmsg = G_("assuming signed overflow does not "
1348 "occur when negating a division");
1349 tem = TREE_OPERAND (t, 1);
1350 if (negate_expr_p (tem))
1352 if (INTEGRAL_TYPE_P (type)
1353 && (TREE_CODE (tem) != INTEGER_CST
1354 || integer_onep (tem)))
1355 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1356 return fold_build2 (TREE_CODE (t), type,
1357 TREE_OPERAND (t, 0), negate_expr (tem));
1359 tem = TREE_OPERAND (t, 0);
1360 if (negate_expr_p (tem))
1362 if (INTEGRAL_TYPE_P (type)
1363 && (TREE_CODE (tem) != INTEGER_CST
1364 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1365 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1366 return fold_build2 (TREE_CODE (t), type,
1367 negate_expr (tem), TREE_OPERAND (t, 1));
1370 break;
1372 case NOP_EXPR:
1373 /* Convert -((double)float) into (double)(-float). */
1374 if (TREE_CODE (type) == REAL_TYPE)
1376 tem = strip_float_extensions (t);
1377 if (tem != t && negate_expr_p (tem))
1378 return negate_expr (tem);
1380 break;
1382 case CALL_EXPR:
1383 /* Negate -f(x) as f(-x). */
1384 if (negate_mathfn_p (builtin_mathfn_code (t))
1385 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1387 tree fndecl, arg;
1389 fndecl = get_callee_fndecl (t);
1390 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1391 return build_call_expr (fndecl, 1, arg);
1393 break;
1395 case RSHIFT_EXPR:
1396 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1397 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1399 tree op1 = TREE_OPERAND (t, 1);
1400 if (TREE_INT_CST_HIGH (op1) == 0
1401 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1402 == TREE_INT_CST_LOW (op1))
1404 tree ntype = TYPE_UNSIGNED (type)
1405 ? lang_hooks.types.signed_type (type)
1406 : lang_hooks.types.unsigned_type (type);
1407 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1408 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1409 return fold_convert (type, temp);
1412 break;
1414 default:
1415 break;
1418 return NULL_TREE;
1421 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1422 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1423 return NULL_TREE. */
1425 static tree
1426 negate_expr (tree t)
1428 tree type, tem;
1430 if (t == NULL_TREE)
1431 return NULL_TREE;
1433 type = TREE_TYPE (t);
1434 STRIP_SIGN_NOPS (t);
1436 tem = fold_negate_expr (t);
1437 if (!tem)
1438 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1439 return fold_convert (type, tem);
1442 /* Split a tree IN into a constant, literal and variable parts that could be
1443 combined with CODE to make IN. "constant" means an expression with
1444 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1445 commutative arithmetic operation. Store the constant part into *CONP,
1446 the literal in *LITP and return the variable part. If a part isn't
1447 present, set it to null. If the tree does not decompose in this way,
1448 return the entire tree as the variable part and the other parts as null.
1450 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1451 case, we negate an operand that was subtracted. Except if it is a
1452 literal for which we use *MINUS_LITP instead.
1454 If NEGATE_P is true, we are negating all of IN, again except a literal
1455 for which we use *MINUS_LITP instead.
1457 If IN is itself a literal or constant, return it as appropriate.
1459 Note that we do not guarantee that any of the three values will be the
1460 same type as IN, but they will have the same signedness and mode. */
1462 static tree
1463 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1464 tree *minus_litp, int negate_p)
1466 tree var = 0;
1468 *conp = 0;
1469 *litp = 0;
1470 *minus_litp = 0;
1472 /* Strip any conversions that don't change the machine mode or signedness. */
1473 STRIP_SIGN_NOPS (in);
1475 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1476 *litp = in;
1477 else if (TREE_CODE (in) == code
1478 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1479 /* We can associate addition and subtraction together (even
1480 though the C standard doesn't say so) for integers because
1481 the value is not affected. For reals, the value might be
1482 affected, so we can't. */
1483 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1484 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1486 tree op0 = TREE_OPERAND (in, 0);
1487 tree op1 = TREE_OPERAND (in, 1);
1488 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1489 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1491 /* First see if either of the operands is a literal, then a constant. */
1492 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1493 *litp = op0, op0 = 0;
1494 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1495 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1497 if (op0 != 0 && TREE_CONSTANT (op0))
1498 *conp = op0, op0 = 0;
1499 else if (op1 != 0 && TREE_CONSTANT (op1))
1500 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1502 /* If we haven't dealt with either operand, this is not a case we can
1503 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1504 if (op0 != 0 && op1 != 0)
1505 var = in;
1506 else if (op0 != 0)
1507 var = op0;
1508 else
1509 var = op1, neg_var_p = neg1_p;
1511 /* Now do any needed negations. */
1512 if (neg_litp_p)
1513 *minus_litp = *litp, *litp = 0;
1514 if (neg_conp_p)
1515 *conp = negate_expr (*conp);
1516 if (neg_var_p)
1517 var = negate_expr (var);
1519 else if (TREE_CONSTANT (in))
1520 *conp = in;
1521 else
1522 var = in;
1524 if (negate_p)
1526 if (*litp)
1527 *minus_litp = *litp, *litp = 0;
1528 else if (*minus_litp)
1529 *litp = *minus_litp, *minus_litp = 0;
1530 *conp = negate_expr (*conp);
1531 var = negate_expr (var);
1534 return var;
1537 /* Re-associate trees split by the above function. T1 and T2 are either
1538 expressions to associate or null. Return the new expression, if any. If
1539 we build an operation, do it in TYPE and with CODE. */
1541 static tree
1542 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1544 if (t1 == 0)
1545 return t2;
1546 else if (t2 == 0)
1547 return t1;
1549 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1550 try to fold this since we will have infinite recursion. But do
1551 deal with any NEGATE_EXPRs. */
1552 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1553 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1555 if (code == PLUS_EXPR)
1557 if (TREE_CODE (t1) == NEGATE_EXPR)
1558 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1559 fold_convert (type, TREE_OPERAND (t1, 0)));
1560 else if (TREE_CODE (t2) == NEGATE_EXPR)
1561 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1562 fold_convert (type, TREE_OPERAND (t2, 0)));
1563 else if (integer_zerop (t2))
1564 return fold_convert (type, t1);
1566 else if (code == MINUS_EXPR)
1568 if (integer_zerop (t2))
1569 return fold_convert (type, t1);
1572 return build2 (code, type, fold_convert (type, t1),
1573 fold_convert (type, t2));
1576 return fold_build2 (code, type, fold_convert (type, t1),
1577 fold_convert (type, t2));
1580 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1581 for use in int_const_binop, size_binop and size_diffop. */
1583 static bool
1584 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1586 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1587 return false;
1588 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1589 return false;
1591 switch (code)
1593 case LSHIFT_EXPR:
1594 case RSHIFT_EXPR:
1595 case LROTATE_EXPR:
1596 case RROTATE_EXPR:
1597 return true;
1599 default:
1600 break;
1603 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1604 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1605 && TYPE_MODE (type1) == TYPE_MODE (type2);
1609 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1610 to produce a new constant. Return NULL_TREE if we don't know how
1611 to evaluate CODE at compile-time.
1613 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1615 tree
1616 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1618 unsigned HOST_WIDE_INT int1l, int2l;
1619 HOST_WIDE_INT int1h, int2h;
1620 unsigned HOST_WIDE_INT low;
1621 HOST_WIDE_INT hi;
1622 unsigned HOST_WIDE_INT garbagel;
1623 HOST_WIDE_INT garbageh;
1624 tree t;
1625 tree type = TREE_TYPE (arg1);
1626 int uns = TYPE_UNSIGNED (type);
1627 int is_sizetype
1628 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1629 int overflow = 0;
1631 int1l = TREE_INT_CST_LOW (arg1);
1632 int1h = TREE_INT_CST_HIGH (arg1);
1633 int2l = TREE_INT_CST_LOW (arg2);
1634 int2h = TREE_INT_CST_HIGH (arg2);
1636 switch (code)
1638 case BIT_IOR_EXPR:
1639 low = int1l | int2l, hi = int1h | int2h;
1640 break;
1642 case BIT_XOR_EXPR:
1643 low = int1l ^ int2l, hi = int1h ^ int2h;
1644 break;
1646 case BIT_AND_EXPR:
1647 low = int1l & int2l, hi = int1h & int2h;
1648 break;
1650 case RSHIFT_EXPR:
1651 int2l = -int2l;
1652 case LSHIFT_EXPR:
1653 /* It's unclear from the C standard whether shifts can overflow.
1654 The following code ignores overflow; perhaps a C standard
1655 interpretation ruling is needed. */
1656 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1657 &low, &hi, !uns);
1658 break;
1660 case RROTATE_EXPR:
1661 int2l = - int2l;
1662 case LROTATE_EXPR:
1663 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1664 &low, &hi);
1665 break;
1667 case PLUS_EXPR:
1668 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1669 break;
1671 case MINUS_EXPR:
1672 neg_double (int2l, int2h, &low, &hi);
1673 add_double (int1l, int1h, low, hi, &low, &hi);
1674 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1675 break;
1677 case MULT_EXPR:
1678 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1679 break;
1681 case TRUNC_DIV_EXPR:
1682 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1683 case EXACT_DIV_EXPR:
1684 /* This is a shortcut for a common special case. */
1685 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1686 && !TREE_OVERFLOW (arg1)
1687 && !TREE_OVERFLOW (arg2)
1688 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1690 if (code == CEIL_DIV_EXPR)
1691 int1l += int2l - 1;
1693 low = int1l / int2l, hi = 0;
1694 break;
1697 /* ... fall through ... */
1699 case ROUND_DIV_EXPR:
1700 if (int2h == 0 && int2l == 0)
1701 return NULL_TREE;
1702 if (int2h == 0 && int2l == 1)
1704 low = int1l, hi = int1h;
1705 break;
1707 if (int1l == int2l && int1h == int2h
1708 && ! (int1l == 0 && int1h == 0))
1710 low = 1, hi = 0;
1711 break;
1713 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1714 &low, &hi, &garbagel, &garbageh);
1715 break;
1717 case TRUNC_MOD_EXPR:
1718 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1719 /* This is a shortcut for a common special case. */
1720 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1721 && !TREE_OVERFLOW (arg1)
1722 && !TREE_OVERFLOW (arg2)
1723 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1725 if (code == CEIL_MOD_EXPR)
1726 int1l += int2l - 1;
1727 low = int1l % int2l, hi = 0;
1728 break;
1731 /* ... fall through ... */
1733 case ROUND_MOD_EXPR:
1734 if (int2h == 0 && int2l == 0)
1735 return NULL_TREE;
1736 overflow = div_and_round_double (code, uns,
1737 int1l, int1h, int2l, int2h,
1738 &garbagel, &garbageh, &low, &hi);
1739 break;
1741 case MIN_EXPR:
1742 case MAX_EXPR:
1743 if (uns)
1744 low = (((unsigned HOST_WIDE_INT) int1h
1745 < (unsigned HOST_WIDE_INT) int2h)
1746 || (((unsigned HOST_WIDE_INT) int1h
1747 == (unsigned HOST_WIDE_INT) int2h)
1748 && int1l < int2l));
1749 else
1750 low = (int1h < int2h
1751 || (int1h == int2h && int1l < int2l));
1753 if (low == (code == MIN_EXPR))
1754 low = int1l, hi = int1h;
1755 else
1756 low = int2l, hi = int2h;
1757 break;
1759 default:
1760 return NULL_TREE;
1763 if (notrunc)
1765 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1767 /* Propagate overflow flags ourselves. */
1768 if (((!uns || is_sizetype) && overflow)
1769 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1771 t = copy_node (t);
1772 TREE_OVERFLOW (t) = 1;
1775 else
1776 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1777 ((!uns || is_sizetype) && overflow)
1778 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1780 return t;
1783 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1784 constant. We assume ARG1 and ARG2 have the same data type, or at least
1785 are the same kind of constant and the same machine mode. Return zero if
1786 combining the constants is not allowed in the current operating mode.
1788 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1790 static tree
1791 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1793 /* Sanity check for the recursive cases. */
1794 if (!arg1 || !arg2)
1795 return NULL_TREE;
1797 STRIP_NOPS (arg1);
1798 STRIP_NOPS (arg2);
1800 if (TREE_CODE (arg1) == INTEGER_CST)
1801 return int_const_binop (code, arg1, arg2, notrunc);
1803 if (TREE_CODE (arg1) == REAL_CST)
1805 enum machine_mode mode;
1806 REAL_VALUE_TYPE d1;
1807 REAL_VALUE_TYPE d2;
1808 REAL_VALUE_TYPE value;
1809 REAL_VALUE_TYPE result;
1810 bool inexact;
1811 tree t, type;
1813 /* The following codes are handled by real_arithmetic. */
1814 switch (code)
1816 case PLUS_EXPR:
1817 case MINUS_EXPR:
1818 case MULT_EXPR:
1819 case RDIV_EXPR:
1820 case MIN_EXPR:
1821 case MAX_EXPR:
1822 break;
1824 default:
1825 return NULL_TREE;
1828 d1 = TREE_REAL_CST (arg1);
1829 d2 = TREE_REAL_CST (arg2);
1831 type = TREE_TYPE (arg1);
1832 mode = TYPE_MODE (type);
1834 /* Don't perform operation if we honor signaling NaNs and
1835 either operand is a NaN. */
1836 if (HONOR_SNANS (mode)
1837 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1838 return NULL_TREE;
1840 /* Don't perform operation if it would raise a division
1841 by zero exception. */
1842 if (code == RDIV_EXPR
1843 && REAL_VALUES_EQUAL (d2, dconst0)
1844 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1845 return NULL_TREE;
1847 /* If either operand is a NaN, just return it. Otherwise, set up
1848 for floating-point trap; we return an overflow. */
1849 if (REAL_VALUE_ISNAN (d1))
1850 return arg1;
1851 else if (REAL_VALUE_ISNAN (d2))
1852 return arg2;
1854 inexact = real_arithmetic (&value, code, &d1, &d2);
1855 real_convert (&result, mode, &value);
1857 /* Don't constant fold this floating point operation if
1858 the result has overflowed and flag_trapping_math. */
1859 if (flag_trapping_math
1860 && MODE_HAS_INFINITIES (mode)
1861 && REAL_VALUE_ISINF (result)
1862 && !REAL_VALUE_ISINF (d1)
1863 && !REAL_VALUE_ISINF (d2))
1864 return NULL_TREE;
1866 /* Don't constant fold this floating point operation if the
1867 result may dependent upon the run-time rounding mode and
1868 flag_rounding_math is set, or if GCC's software emulation
1869 is unable to accurately represent the result. */
1870 if ((flag_rounding_math
1871 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1872 && !flag_unsafe_math_optimizations))
1873 && (inexact || !real_identical (&result, &value)))
1874 return NULL_TREE;
1876 t = build_real (type, result);
1878 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1879 return t;
1882 if (TREE_CODE (arg1) == COMPLEX_CST)
1884 tree type = TREE_TYPE (arg1);
1885 tree r1 = TREE_REALPART (arg1);
1886 tree i1 = TREE_IMAGPART (arg1);
1887 tree r2 = TREE_REALPART (arg2);
1888 tree i2 = TREE_IMAGPART (arg2);
1889 tree real, imag;
1891 switch (code)
1893 case PLUS_EXPR:
1894 case MINUS_EXPR:
1895 real = const_binop (code, r1, r2, notrunc);
1896 imag = const_binop (code, i1, i2, notrunc);
1897 break;
1899 case MULT_EXPR:
1900 real = const_binop (MINUS_EXPR,
1901 const_binop (MULT_EXPR, r1, r2, notrunc),
1902 const_binop (MULT_EXPR, i1, i2, notrunc),
1903 notrunc);
1904 imag = const_binop (PLUS_EXPR,
1905 const_binop (MULT_EXPR, r1, i2, notrunc),
1906 const_binop (MULT_EXPR, i1, r2, notrunc),
1907 notrunc);
1908 break;
1910 case RDIV_EXPR:
1912 tree magsquared
1913 = const_binop (PLUS_EXPR,
1914 const_binop (MULT_EXPR, r2, r2, notrunc),
1915 const_binop (MULT_EXPR, i2, i2, notrunc),
1916 notrunc);
1917 tree t1
1918 = const_binop (PLUS_EXPR,
1919 const_binop (MULT_EXPR, r1, r2, notrunc),
1920 const_binop (MULT_EXPR, i1, i2, notrunc),
1921 notrunc);
1922 tree t2
1923 = const_binop (MINUS_EXPR,
1924 const_binop (MULT_EXPR, i1, r2, notrunc),
1925 const_binop (MULT_EXPR, r1, i2, notrunc),
1926 notrunc);
1928 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1929 code = TRUNC_DIV_EXPR;
1931 real = const_binop (code, t1, magsquared, notrunc);
1932 imag = const_binop (code, t2, magsquared, notrunc);
1934 break;
1936 default:
1937 return NULL_TREE;
1940 if (real && imag)
1941 return build_complex (type, real, imag);
1944 return NULL_TREE;
1947 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1948 indicates which particular sizetype to create. */
1950 tree
1951 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1953 return build_int_cst (sizetype_tab[(int) kind], number);
1956 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1957 is a tree code. The type of the result is taken from the operands.
1958 Both must be equivalent integer types, ala int_binop_types_match_p.
1959 If the operands are constant, so is the result. */
1961 tree
1962 size_binop (enum tree_code code, tree arg0, tree arg1)
1964 tree type = TREE_TYPE (arg0);
1966 if (arg0 == error_mark_node || arg1 == error_mark_node)
1967 return error_mark_node;
1969 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1970 TREE_TYPE (arg1)));
1972 /* Handle the special case of two integer constants faster. */
1973 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1975 /* And some specific cases even faster than that. */
1976 if (code == PLUS_EXPR)
1978 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1979 return arg1;
1980 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1981 return arg0;
1983 else if (code == MINUS_EXPR)
1985 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1986 return arg0;
1988 else if (code == MULT_EXPR)
1990 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1991 return arg1;
1994 /* Handle general case of two integer constants. */
1995 return int_const_binop (code, arg0, arg1, 0);
1998 return fold_build2 (code, type, arg0, arg1);
2001 /* Given two values, either both of sizetype or both of bitsizetype,
2002 compute the difference between the two values. Return the value
2003 in signed type corresponding to the type of the operands. */
2005 tree
2006 size_diffop (tree arg0, tree arg1)
2008 tree type = TREE_TYPE (arg0);
2009 tree ctype;
2011 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2012 TREE_TYPE (arg1)));
2014 /* If the type is already signed, just do the simple thing. */
2015 if (!TYPE_UNSIGNED (type))
2016 return size_binop (MINUS_EXPR, arg0, arg1);
2018 if (type == sizetype)
2019 ctype = ssizetype;
2020 else if (type == bitsizetype)
2021 ctype = sbitsizetype;
2022 else
2023 ctype = lang_hooks.types.signed_type (type);
2025 /* If either operand is not a constant, do the conversions to the signed
2026 type and subtract. The hardware will do the right thing with any
2027 overflow in the subtraction. */
2028 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2029 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2030 fold_convert (ctype, arg1));
2032 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2033 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2034 overflow) and negate (which can't either). Special-case a result
2035 of zero while we're here. */
2036 if (tree_int_cst_equal (arg0, arg1))
2037 return build_int_cst (ctype, 0);
2038 else if (tree_int_cst_lt (arg1, arg0))
2039 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2040 else
2041 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2042 fold_convert (ctype, size_binop (MINUS_EXPR,
2043 arg1, arg0)));
2046 /* A subroutine of fold_convert_const handling conversions of an
2047 INTEGER_CST to another integer type. */
2049 static tree
2050 fold_convert_const_int_from_int (tree type, tree arg1)
2052 tree t;
2054 /* Given an integer constant, make new constant with new type,
2055 appropriately sign-extended or truncated. */
2056 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2057 TREE_INT_CST_HIGH (arg1),
2058 /* Don't set the overflow when
2059 converting a pointer */
2060 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2061 (TREE_INT_CST_HIGH (arg1) < 0
2062 && (TYPE_UNSIGNED (type)
2063 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2064 | TREE_OVERFLOW (arg1));
2066 return t;
2069 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2070 to an integer type. */
2072 static tree
2073 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
2075 int overflow = 0;
2076 tree t;
2078 /* The following code implements the floating point to integer
2079 conversion rules required by the Java Language Specification,
2080 that IEEE NaNs are mapped to zero and values that overflow
2081 the target precision saturate, i.e. values greater than
2082 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2083 are mapped to INT_MIN. These semantics are allowed by the
2084 C and C++ standards that simply state that the behavior of
2085 FP-to-integer conversion is unspecified upon overflow. */
2087 HOST_WIDE_INT high, low;
2088 REAL_VALUE_TYPE r;
2089 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2091 switch (code)
2093 case FIX_TRUNC_EXPR:
2094 real_trunc (&r, VOIDmode, &x);
2095 break;
2097 default:
2098 gcc_unreachable ();
2101 /* If R is NaN, return zero and show we have an overflow. */
2102 if (REAL_VALUE_ISNAN (r))
2104 overflow = 1;
2105 high = 0;
2106 low = 0;
2109 /* See if R is less than the lower bound or greater than the
2110 upper bound. */
2112 if (! overflow)
2114 tree lt = TYPE_MIN_VALUE (type);
2115 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2116 if (REAL_VALUES_LESS (r, l))
2118 overflow = 1;
2119 high = TREE_INT_CST_HIGH (lt);
2120 low = TREE_INT_CST_LOW (lt);
2124 if (! overflow)
2126 tree ut = TYPE_MAX_VALUE (type);
2127 if (ut)
2129 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2130 if (REAL_VALUES_LESS (u, r))
2132 overflow = 1;
2133 high = TREE_INT_CST_HIGH (ut);
2134 low = TREE_INT_CST_LOW (ut);
2139 if (! overflow)
2140 REAL_VALUE_TO_INT (&low, &high, r);
2142 t = force_fit_type_double (type, low, high, -1,
2143 overflow | TREE_OVERFLOW (arg1));
2144 return t;
2147 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2148 to another floating point type. */
2150 static tree
2151 fold_convert_const_real_from_real (tree type, tree arg1)
2153 REAL_VALUE_TYPE value;
2154 tree t;
2156 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2157 t = build_real (type, value);
2159 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2160 return t;
2163 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2164 type TYPE. If no simplification can be done return NULL_TREE. */
2166 static tree
2167 fold_convert_const (enum tree_code code, tree type, tree arg1)
2169 if (TREE_TYPE (arg1) == type)
2170 return arg1;
2172 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2174 if (TREE_CODE (arg1) == INTEGER_CST)
2175 return fold_convert_const_int_from_int (type, arg1);
2176 else if (TREE_CODE (arg1) == REAL_CST)
2177 return fold_convert_const_int_from_real (code, type, arg1);
2179 else if (TREE_CODE (type) == REAL_TYPE)
2181 if (TREE_CODE (arg1) == INTEGER_CST)
2182 return build_real_from_int_cst (type, arg1);
2183 if (TREE_CODE (arg1) == REAL_CST)
2184 return fold_convert_const_real_from_real (type, arg1);
2186 return NULL_TREE;
2189 /* Construct a vector of zero elements of vector type TYPE. */
2191 static tree
2192 build_zero_vector (tree type)
2194 tree elem, list;
2195 int i, units;
2197 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2198 units = TYPE_VECTOR_SUBPARTS (type);
2200 list = NULL_TREE;
2201 for (i = 0; i < units; i++)
2202 list = tree_cons (NULL_TREE, elem, list);
2203 return build_vector (type, list);
2206 /* Convert expression ARG to type TYPE. Used by the middle-end for
2207 simple conversions in preference to calling the front-end's convert. */
2209 tree
2210 fold_convert (tree type, tree arg)
2212 tree orig = TREE_TYPE (arg);
2213 tree tem;
2215 if (type == orig)
2216 return arg;
2218 if (TREE_CODE (arg) == ERROR_MARK
2219 || TREE_CODE (type) == ERROR_MARK
2220 || TREE_CODE (orig) == ERROR_MARK)
2221 return error_mark_node;
2223 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2224 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2225 TYPE_MAIN_VARIANT (orig)))
2226 return fold_build1 (NOP_EXPR, type, arg);
2228 switch (TREE_CODE (type))
2230 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2231 case POINTER_TYPE: case REFERENCE_TYPE:
2232 case OFFSET_TYPE:
2233 if (TREE_CODE (arg) == INTEGER_CST)
2235 tem = fold_convert_const (NOP_EXPR, type, arg);
2236 if (tem != NULL_TREE)
2237 return tem;
2239 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2240 || TREE_CODE (orig) == OFFSET_TYPE)
2241 return fold_build1 (NOP_EXPR, type, arg);
2242 if (TREE_CODE (orig) == COMPLEX_TYPE)
2244 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2245 return fold_convert (type, tem);
2247 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2248 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2249 return fold_build1 (NOP_EXPR, type, arg);
2251 case REAL_TYPE:
2252 if (TREE_CODE (arg) == INTEGER_CST)
2254 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2255 if (tem != NULL_TREE)
2256 return tem;
2258 else if (TREE_CODE (arg) == REAL_CST)
2260 tem = fold_convert_const (NOP_EXPR, type, arg);
2261 if (tem != NULL_TREE)
2262 return tem;
2265 switch (TREE_CODE (orig))
2267 case INTEGER_TYPE:
2268 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2269 case POINTER_TYPE: case REFERENCE_TYPE:
2270 return fold_build1 (FLOAT_EXPR, type, arg);
2272 case REAL_TYPE:
2273 return fold_build1 (NOP_EXPR, type, arg);
2275 case COMPLEX_TYPE:
2276 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2277 return fold_convert (type, tem);
2279 default:
2280 gcc_unreachable ();
2283 case COMPLEX_TYPE:
2284 switch (TREE_CODE (orig))
2286 case INTEGER_TYPE:
2287 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2288 case POINTER_TYPE: case REFERENCE_TYPE:
2289 case REAL_TYPE:
2290 return build2 (COMPLEX_EXPR, type,
2291 fold_convert (TREE_TYPE (type), arg),
2292 fold_convert (TREE_TYPE (type), integer_zero_node));
2293 case COMPLEX_TYPE:
2295 tree rpart, ipart;
2297 if (TREE_CODE (arg) == COMPLEX_EXPR)
2299 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2300 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2301 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2304 arg = save_expr (arg);
2305 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2306 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2307 rpart = fold_convert (TREE_TYPE (type), rpart);
2308 ipart = fold_convert (TREE_TYPE (type), ipart);
2309 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2312 default:
2313 gcc_unreachable ();
2316 case VECTOR_TYPE:
2317 if (integer_zerop (arg))
2318 return build_zero_vector (type);
2319 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2320 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2321 || TREE_CODE (orig) == VECTOR_TYPE);
2322 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2324 case VOID_TYPE:
2325 tem = fold_ignored_result (arg);
2326 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2327 return tem;
2328 return fold_build1 (NOP_EXPR, type, tem);
2330 default:
2331 gcc_unreachable ();
2335 /* Return false if expr can be assumed not to be an lvalue, true
2336 otherwise. */
2338 static bool
2339 maybe_lvalue_p (tree x)
2341 /* We only need to wrap lvalue tree codes. */
2342 switch (TREE_CODE (x))
2344 case VAR_DECL:
2345 case PARM_DECL:
2346 case RESULT_DECL:
2347 case LABEL_DECL:
2348 case FUNCTION_DECL:
2349 case SSA_NAME:
2351 case COMPONENT_REF:
2352 case INDIRECT_REF:
2353 case ALIGN_INDIRECT_REF:
2354 case MISALIGNED_INDIRECT_REF:
2355 case ARRAY_REF:
2356 case ARRAY_RANGE_REF:
2357 case BIT_FIELD_REF:
2358 case OBJ_TYPE_REF:
2360 case REALPART_EXPR:
2361 case IMAGPART_EXPR:
2362 case PREINCREMENT_EXPR:
2363 case PREDECREMENT_EXPR:
2364 case SAVE_EXPR:
2365 case TRY_CATCH_EXPR:
2366 case WITH_CLEANUP_EXPR:
2367 case COMPOUND_EXPR:
2368 case MODIFY_EXPR:
2369 case GIMPLE_MODIFY_STMT:
2370 case TARGET_EXPR:
2371 case COND_EXPR:
2372 case BIND_EXPR:
2373 case MIN_EXPR:
2374 case MAX_EXPR:
2375 break;
2377 default:
2378 /* Assume the worst for front-end tree codes. */
2379 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2380 break;
2381 return false;
2384 return true;
2387 /* Return an expr equal to X but certainly not valid as an lvalue. */
2389 tree
2390 non_lvalue (tree x)
2392 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2393 us. */
2394 if (in_gimple_form)
2395 return x;
2397 if (! maybe_lvalue_p (x))
2398 return x;
2399 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2402 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2403 Zero means allow extended lvalues. */
2405 int pedantic_lvalues;
2407 /* When pedantic, return an expr equal to X but certainly not valid as a
2408 pedantic lvalue. Otherwise, return X. */
2410 static tree
2411 pedantic_non_lvalue (tree x)
2413 if (pedantic_lvalues)
2414 return non_lvalue (x);
2415 else
2416 return x;
2419 /* Given a tree comparison code, return the code that is the logical inverse
2420 of the given code. It is not safe to do this for floating-point
2421 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2422 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2424 enum tree_code
2425 invert_tree_comparison (enum tree_code code, bool honor_nans)
2427 if (honor_nans && flag_trapping_math)
2428 return ERROR_MARK;
2430 switch (code)
2432 case EQ_EXPR:
2433 return NE_EXPR;
2434 case NE_EXPR:
2435 return EQ_EXPR;
2436 case GT_EXPR:
2437 return honor_nans ? UNLE_EXPR : LE_EXPR;
2438 case GE_EXPR:
2439 return honor_nans ? UNLT_EXPR : LT_EXPR;
2440 case LT_EXPR:
2441 return honor_nans ? UNGE_EXPR : GE_EXPR;
2442 case LE_EXPR:
2443 return honor_nans ? UNGT_EXPR : GT_EXPR;
2444 case LTGT_EXPR:
2445 return UNEQ_EXPR;
2446 case UNEQ_EXPR:
2447 return LTGT_EXPR;
2448 case UNGT_EXPR:
2449 return LE_EXPR;
2450 case UNGE_EXPR:
2451 return LT_EXPR;
2452 case UNLT_EXPR:
2453 return GE_EXPR;
2454 case UNLE_EXPR:
2455 return GT_EXPR;
2456 case ORDERED_EXPR:
2457 return UNORDERED_EXPR;
2458 case UNORDERED_EXPR:
2459 return ORDERED_EXPR;
2460 default:
2461 gcc_unreachable ();
2465 /* Similar, but return the comparison that results if the operands are
2466 swapped. This is safe for floating-point. */
2468 enum tree_code
2469 swap_tree_comparison (enum tree_code code)
2471 switch (code)
2473 case EQ_EXPR:
2474 case NE_EXPR:
2475 case ORDERED_EXPR:
2476 case UNORDERED_EXPR:
2477 case LTGT_EXPR:
2478 case UNEQ_EXPR:
2479 return code;
2480 case GT_EXPR:
2481 return LT_EXPR;
2482 case GE_EXPR:
2483 return LE_EXPR;
2484 case LT_EXPR:
2485 return GT_EXPR;
2486 case LE_EXPR:
2487 return GE_EXPR;
2488 case UNGT_EXPR:
2489 return UNLT_EXPR;
2490 case UNGE_EXPR:
2491 return UNLE_EXPR;
2492 case UNLT_EXPR:
2493 return UNGT_EXPR;
2494 case UNLE_EXPR:
2495 return UNGE_EXPR;
2496 default:
2497 gcc_unreachable ();
2502 /* Convert a comparison tree code from an enum tree_code representation
2503 into a compcode bit-based encoding. This function is the inverse of
2504 compcode_to_comparison. */
2506 static enum comparison_code
2507 comparison_to_compcode (enum tree_code code)
2509 switch (code)
2511 case LT_EXPR:
2512 return COMPCODE_LT;
2513 case EQ_EXPR:
2514 return COMPCODE_EQ;
2515 case LE_EXPR:
2516 return COMPCODE_LE;
2517 case GT_EXPR:
2518 return COMPCODE_GT;
2519 case NE_EXPR:
2520 return COMPCODE_NE;
2521 case GE_EXPR:
2522 return COMPCODE_GE;
2523 case ORDERED_EXPR:
2524 return COMPCODE_ORD;
2525 case UNORDERED_EXPR:
2526 return COMPCODE_UNORD;
2527 case UNLT_EXPR:
2528 return COMPCODE_UNLT;
2529 case UNEQ_EXPR:
2530 return COMPCODE_UNEQ;
2531 case UNLE_EXPR:
2532 return COMPCODE_UNLE;
2533 case UNGT_EXPR:
2534 return COMPCODE_UNGT;
2535 case LTGT_EXPR:
2536 return COMPCODE_LTGT;
2537 case UNGE_EXPR:
2538 return COMPCODE_UNGE;
2539 default:
2540 gcc_unreachable ();
2544 /* Convert a compcode bit-based encoding of a comparison operator back
2545 to GCC's enum tree_code representation. This function is the
2546 inverse of comparison_to_compcode. */
2548 static enum tree_code
2549 compcode_to_comparison (enum comparison_code code)
2551 switch (code)
2553 case COMPCODE_LT:
2554 return LT_EXPR;
2555 case COMPCODE_EQ:
2556 return EQ_EXPR;
2557 case COMPCODE_LE:
2558 return LE_EXPR;
2559 case COMPCODE_GT:
2560 return GT_EXPR;
2561 case COMPCODE_NE:
2562 return NE_EXPR;
2563 case COMPCODE_GE:
2564 return GE_EXPR;
2565 case COMPCODE_ORD:
2566 return ORDERED_EXPR;
2567 case COMPCODE_UNORD:
2568 return UNORDERED_EXPR;
2569 case COMPCODE_UNLT:
2570 return UNLT_EXPR;
2571 case COMPCODE_UNEQ:
2572 return UNEQ_EXPR;
2573 case COMPCODE_UNLE:
2574 return UNLE_EXPR;
2575 case COMPCODE_UNGT:
2576 return UNGT_EXPR;
2577 case COMPCODE_LTGT:
2578 return LTGT_EXPR;
2579 case COMPCODE_UNGE:
2580 return UNGE_EXPR;
2581 default:
2582 gcc_unreachable ();
2586 /* Return a tree for the comparison which is the combination of
2587 doing the AND or OR (depending on CODE) of the two operations LCODE
2588 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2589 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2590 if this makes the transformation invalid. */
2592 tree
2593 combine_comparisons (enum tree_code code, enum tree_code lcode,
2594 enum tree_code rcode, tree truth_type,
2595 tree ll_arg, tree lr_arg)
2597 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2598 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2599 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2600 enum comparison_code compcode;
2602 switch (code)
2604 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2605 compcode = lcompcode & rcompcode;
2606 break;
2608 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2609 compcode = lcompcode | rcompcode;
2610 break;
2612 default:
2613 return NULL_TREE;
2616 if (!honor_nans)
2618 /* Eliminate unordered comparisons, as well as LTGT and ORD
2619 which are not used unless the mode has NaNs. */
2620 compcode &= ~COMPCODE_UNORD;
2621 if (compcode == COMPCODE_LTGT)
2622 compcode = COMPCODE_NE;
2623 else if (compcode == COMPCODE_ORD)
2624 compcode = COMPCODE_TRUE;
2626 else if (flag_trapping_math)
2628 /* Check that the original operation and the optimized ones will trap
2629 under the same condition. */
2630 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2631 && (lcompcode != COMPCODE_EQ)
2632 && (lcompcode != COMPCODE_ORD);
2633 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2634 && (rcompcode != COMPCODE_EQ)
2635 && (rcompcode != COMPCODE_ORD);
2636 bool trap = (compcode & COMPCODE_UNORD) == 0
2637 && (compcode != COMPCODE_EQ)
2638 && (compcode != COMPCODE_ORD);
2640 /* In a short-circuited boolean expression the LHS might be
2641 such that the RHS, if evaluated, will never trap. For
2642 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2643 if neither x nor y is NaN. (This is a mixed blessing: for
2644 example, the expression above will never trap, hence
2645 optimizing it to x < y would be invalid). */
2646 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2647 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2648 rtrap = false;
2650 /* If the comparison was short-circuited, and only the RHS
2651 trapped, we may now generate a spurious trap. */
2652 if (rtrap && !ltrap
2653 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2654 return NULL_TREE;
2656 /* If we changed the conditions that cause a trap, we lose. */
2657 if ((ltrap || rtrap) != trap)
2658 return NULL_TREE;
2661 if (compcode == COMPCODE_TRUE)
2662 return constant_boolean_node (true, truth_type);
2663 else if (compcode == COMPCODE_FALSE)
2664 return constant_boolean_node (false, truth_type);
2665 else
2666 return fold_build2 (compcode_to_comparison (compcode),
2667 truth_type, ll_arg, lr_arg);
2670 /* Return nonzero if CODE is a tree code that represents a truth value. */
2672 static int
2673 truth_value_p (enum tree_code code)
2675 return (TREE_CODE_CLASS (code) == tcc_comparison
2676 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2677 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2678 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2681 /* Return nonzero if two operands (typically of the same tree node)
2682 are necessarily equal. If either argument has side-effects this
2683 function returns zero. FLAGS modifies behavior as follows:
2685 If OEP_ONLY_CONST is set, only return nonzero for constants.
2686 This function tests whether the operands are indistinguishable;
2687 it does not test whether they are equal using C's == operation.
2688 The distinction is important for IEEE floating point, because
2689 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2690 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2692 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2693 even though it may hold multiple values during a function.
2694 This is because a GCC tree node guarantees that nothing else is
2695 executed between the evaluation of its "operands" (which may often
2696 be evaluated in arbitrary order). Hence if the operands themselves
2697 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2698 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2699 unset means assuming isochronic (or instantaneous) tree equivalence.
2700 Unless comparing arbitrary expression trees, such as from different
2701 statements, this flag can usually be left unset.
2703 If OEP_PURE_SAME is set, then pure functions with identical arguments
2704 are considered the same. It is used when the caller has other ways
2705 to ensure that global memory is unchanged in between. */
2708 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2710 /* If either is ERROR_MARK, they aren't equal. */
2711 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2712 return 0;
2714 /* If both types don't have the same signedness, then we can't consider
2715 them equal. We must check this before the STRIP_NOPS calls
2716 because they may change the signedness of the arguments. */
2717 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2718 return 0;
2720 /* If both types don't have the same precision, then it is not safe
2721 to strip NOPs. */
2722 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2723 return 0;
2725 STRIP_NOPS (arg0);
2726 STRIP_NOPS (arg1);
2728 /* In case both args are comparisons but with different comparison
2729 code, try to swap the comparison operands of one arg to produce
2730 a match and compare that variant. */
2731 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2732 && COMPARISON_CLASS_P (arg0)
2733 && COMPARISON_CLASS_P (arg1))
2735 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2737 if (TREE_CODE (arg0) == swap_code)
2738 return operand_equal_p (TREE_OPERAND (arg0, 0),
2739 TREE_OPERAND (arg1, 1), flags)
2740 && operand_equal_p (TREE_OPERAND (arg0, 1),
2741 TREE_OPERAND (arg1, 0), flags);
2744 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2745 /* This is needed for conversions and for COMPONENT_REF.
2746 Might as well play it safe and always test this. */
2747 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2748 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2749 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2750 return 0;
2752 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2753 We don't care about side effects in that case because the SAVE_EXPR
2754 takes care of that for us. In all other cases, two expressions are
2755 equal if they have no side effects. If we have two identical
2756 expressions with side effects that should be treated the same due
2757 to the only side effects being identical SAVE_EXPR's, that will
2758 be detected in the recursive calls below. */
2759 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2760 && (TREE_CODE (arg0) == SAVE_EXPR
2761 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2762 return 1;
2764 /* Next handle constant cases, those for which we can return 1 even
2765 if ONLY_CONST is set. */
2766 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2767 switch (TREE_CODE (arg0))
2769 case INTEGER_CST:
2770 return tree_int_cst_equal (arg0, arg1);
2772 case REAL_CST:
2773 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2774 TREE_REAL_CST (arg1)))
2775 return 1;
2778 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2780 /* If we do not distinguish between signed and unsigned zero,
2781 consider them equal. */
2782 if (real_zerop (arg0) && real_zerop (arg1))
2783 return 1;
2785 return 0;
2787 case VECTOR_CST:
2789 tree v1, v2;
2791 v1 = TREE_VECTOR_CST_ELTS (arg0);
2792 v2 = TREE_VECTOR_CST_ELTS (arg1);
2793 while (v1 && v2)
2795 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2796 flags))
2797 return 0;
2798 v1 = TREE_CHAIN (v1);
2799 v2 = TREE_CHAIN (v2);
2802 return v1 == v2;
2805 case COMPLEX_CST:
2806 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2807 flags)
2808 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2809 flags));
2811 case STRING_CST:
2812 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2813 && ! memcmp (TREE_STRING_POINTER (arg0),
2814 TREE_STRING_POINTER (arg1),
2815 TREE_STRING_LENGTH (arg0)));
2817 case ADDR_EXPR:
2818 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2820 default:
2821 break;
2824 if (flags & OEP_ONLY_CONST)
2825 return 0;
2827 /* Define macros to test an operand from arg0 and arg1 for equality and a
2828 variant that allows null and views null as being different from any
2829 non-null value. In the latter case, if either is null, the both
2830 must be; otherwise, do the normal comparison. */
2831 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2832 TREE_OPERAND (arg1, N), flags)
2834 #define OP_SAME_WITH_NULL(N) \
2835 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2836 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2838 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2840 case tcc_unary:
2841 /* Two conversions are equal only if signedness and modes match. */
2842 switch (TREE_CODE (arg0))
2844 case NOP_EXPR:
2845 case CONVERT_EXPR:
2846 case FIX_TRUNC_EXPR:
2847 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2848 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2849 return 0;
2850 break;
2851 default:
2852 break;
2855 return OP_SAME (0);
2858 case tcc_comparison:
2859 case tcc_binary:
2860 if (OP_SAME (0) && OP_SAME (1))
2861 return 1;
2863 /* For commutative ops, allow the other order. */
2864 return (commutative_tree_code (TREE_CODE (arg0))
2865 && operand_equal_p (TREE_OPERAND (arg0, 0),
2866 TREE_OPERAND (arg1, 1), flags)
2867 && operand_equal_p (TREE_OPERAND (arg0, 1),
2868 TREE_OPERAND (arg1, 0), flags));
2870 case tcc_reference:
2871 /* If either of the pointer (or reference) expressions we are
2872 dereferencing contain a side effect, these cannot be equal. */
2873 if (TREE_SIDE_EFFECTS (arg0)
2874 || TREE_SIDE_EFFECTS (arg1))
2875 return 0;
2877 switch (TREE_CODE (arg0))
2879 case INDIRECT_REF:
2880 case ALIGN_INDIRECT_REF:
2881 case MISALIGNED_INDIRECT_REF:
2882 case REALPART_EXPR:
2883 case IMAGPART_EXPR:
2884 return OP_SAME (0);
2886 case ARRAY_REF:
2887 case ARRAY_RANGE_REF:
2888 /* Operands 2 and 3 may be null. */
2889 return (OP_SAME (0)
2890 && OP_SAME (1)
2891 && OP_SAME_WITH_NULL (2)
2892 && OP_SAME_WITH_NULL (3));
2894 case COMPONENT_REF:
2895 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2896 may be NULL when we're called to compare MEM_EXPRs. */
2897 return OP_SAME_WITH_NULL (0)
2898 && OP_SAME (1)
2899 && OP_SAME_WITH_NULL (2);
2901 case BIT_FIELD_REF:
2902 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2904 default:
2905 return 0;
2908 case tcc_expression:
2909 switch (TREE_CODE (arg0))
2911 case ADDR_EXPR:
2912 case TRUTH_NOT_EXPR:
2913 return OP_SAME (0);
2915 case TRUTH_ANDIF_EXPR:
2916 case TRUTH_ORIF_EXPR:
2917 return OP_SAME (0) && OP_SAME (1);
2919 case TRUTH_AND_EXPR:
2920 case TRUTH_OR_EXPR:
2921 case TRUTH_XOR_EXPR:
2922 if (OP_SAME (0) && OP_SAME (1))
2923 return 1;
2925 /* Otherwise take into account this is a commutative operation. */
2926 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2927 TREE_OPERAND (arg1, 1), flags)
2928 && operand_equal_p (TREE_OPERAND (arg0, 1),
2929 TREE_OPERAND (arg1, 0), flags));
2931 default:
2932 return 0;
2935 case tcc_vl_exp:
2936 switch (TREE_CODE (arg0))
2938 case CALL_EXPR:
2939 /* If the CALL_EXPRs call different functions, then they
2940 clearly can not be equal. */
2941 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2942 flags))
2943 return 0;
2946 unsigned int cef = call_expr_flags (arg0);
2947 if (flags & OEP_PURE_SAME)
2948 cef &= ECF_CONST | ECF_PURE;
2949 else
2950 cef &= ECF_CONST;
2951 if (!cef)
2952 return 0;
2955 /* Now see if all the arguments are the same. */
2957 call_expr_arg_iterator iter0, iter1;
2958 tree a0, a1;
2959 for (a0 = first_call_expr_arg (arg0, &iter0),
2960 a1 = first_call_expr_arg (arg1, &iter1);
2961 a0 && a1;
2962 a0 = next_call_expr_arg (&iter0),
2963 a1 = next_call_expr_arg (&iter1))
2964 if (! operand_equal_p (a0, a1, flags))
2965 return 0;
2967 /* If we get here and both argument lists are exhausted
2968 then the CALL_EXPRs are equal. */
2969 return ! (a0 || a1);
2971 default:
2972 return 0;
2975 case tcc_declaration:
2976 /* Consider __builtin_sqrt equal to sqrt. */
2977 return (TREE_CODE (arg0) == FUNCTION_DECL
2978 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2979 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2980 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2982 default:
2983 return 0;
2986 #undef OP_SAME
2987 #undef OP_SAME_WITH_NULL
2990 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2991 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2993 When in doubt, return 0. */
2995 static int
2996 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2998 int unsignedp1, unsignedpo;
2999 tree primarg0, primarg1, primother;
3000 unsigned int correct_width;
3002 if (operand_equal_p (arg0, arg1, 0))
3003 return 1;
3005 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3006 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3007 return 0;
3009 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3010 and see if the inner values are the same. This removes any
3011 signedness comparison, which doesn't matter here. */
3012 primarg0 = arg0, primarg1 = arg1;
3013 STRIP_NOPS (primarg0);
3014 STRIP_NOPS (primarg1);
3015 if (operand_equal_p (primarg0, primarg1, 0))
3016 return 1;
3018 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3019 actual comparison operand, ARG0.
3021 First throw away any conversions to wider types
3022 already present in the operands. */
3024 primarg1 = get_narrower (arg1, &unsignedp1);
3025 primother = get_narrower (other, &unsignedpo);
3027 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3028 if (unsignedp1 == unsignedpo
3029 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3030 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3032 tree type = TREE_TYPE (arg0);
3034 /* Make sure shorter operand is extended the right way
3035 to match the longer operand. */
3036 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
3037 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3039 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3040 return 1;
3043 return 0;
3046 /* See if ARG is an expression that is either a comparison or is performing
3047 arithmetic on comparisons. The comparisons must only be comparing
3048 two different values, which will be stored in *CVAL1 and *CVAL2; if
3049 they are nonzero it means that some operands have already been found.
3050 No variables may be used anywhere else in the expression except in the
3051 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3052 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3054 If this is true, return 1. Otherwise, return zero. */
3056 static int
3057 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3059 enum tree_code code = TREE_CODE (arg);
3060 enum tree_code_class class = TREE_CODE_CLASS (code);
3062 /* We can handle some of the tcc_expression cases here. */
3063 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3064 class = tcc_unary;
3065 else if (class == tcc_expression
3066 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3067 || code == COMPOUND_EXPR))
3068 class = tcc_binary;
3070 else if (class == tcc_expression && code == SAVE_EXPR
3071 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3073 /* If we've already found a CVAL1 or CVAL2, this expression is
3074 two complex to handle. */
3075 if (*cval1 || *cval2)
3076 return 0;
3078 class = tcc_unary;
3079 *save_p = 1;
3082 switch (class)
3084 case tcc_unary:
3085 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3087 case tcc_binary:
3088 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3089 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3090 cval1, cval2, save_p));
3092 case tcc_constant:
3093 return 1;
3095 case tcc_expression:
3096 if (code == COND_EXPR)
3097 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3098 cval1, cval2, save_p)
3099 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3100 cval1, cval2, save_p)
3101 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3102 cval1, cval2, save_p));
3103 return 0;
3105 case tcc_comparison:
3106 /* First see if we can handle the first operand, then the second. For
3107 the second operand, we know *CVAL1 can't be zero. It must be that
3108 one side of the comparison is each of the values; test for the
3109 case where this isn't true by failing if the two operands
3110 are the same. */
3112 if (operand_equal_p (TREE_OPERAND (arg, 0),
3113 TREE_OPERAND (arg, 1), 0))
3114 return 0;
3116 if (*cval1 == 0)
3117 *cval1 = TREE_OPERAND (arg, 0);
3118 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3120 else if (*cval2 == 0)
3121 *cval2 = TREE_OPERAND (arg, 0);
3122 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3124 else
3125 return 0;
3127 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3129 else if (*cval2 == 0)
3130 *cval2 = TREE_OPERAND (arg, 1);
3131 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3133 else
3134 return 0;
3136 return 1;
3138 default:
3139 return 0;
3143 /* ARG is a tree that is known to contain just arithmetic operations and
3144 comparisons. Evaluate the operations in the tree substituting NEW0 for
3145 any occurrence of OLD0 as an operand of a comparison and likewise for
3146 NEW1 and OLD1. */
3148 static tree
3149 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3151 tree type = TREE_TYPE (arg);
3152 enum tree_code code = TREE_CODE (arg);
3153 enum tree_code_class class = TREE_CODE_CLASS (code);
3155 /* We can handle some of the tcc_expression cases here. */
3156 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3157 class = tcc_unary;
3158 else if (class == tcc_expression
3159 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3160 class = tcc_binary;
3162 switch (class)
3164 case tcc_unary:
3165 return fold_build1 (code, type,
3166 eval_subst (TREE_OPERAND (arg, 0),
3167 old0, new0, old1, new1));
3169 case tcc_binary:
3170 return fold_build2 (code, type,
3171 eval_subst (TREE_OPERAND (arg, 0),
3172 old0, new0, old1, new1),
3173 eval_subst (TREE_OPERAND (arg, 1),
3174 old0, new0, old1, new1));
3176 case tcc_expression:
3177 switch (code)
3179 case SAVE_EXPR:
3180 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3182 case COMPOUND_EXPR:
3183 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3185 case COND_EXPR:
3186 return fold_build3 (code, type,
3187 eval_subst (TREE_OPERAND (arg, 0),
3188 old0, new0, old1, new1),
3189 eval_subst (TREE_OPERAND (arg, 1),
3190 old0, new0, old1, new1),
3191 eval_subst (TREE_OPERAND (arg, 2),
3192 old0, new0, old1, new1));
3193 default:
3194 break;
3196 /* Fall through - ??? */
3198 case tcc_comparison:
3200 tree arg0 = TREE_OPERAND (arg, 0);
3201 tree arg1 = TREE_OPERAND (arg, 1);
3203 /* We need to check both for exact equality and tree equality. The
3204 former will be true if the operand has a side-effect. In that
3205 case, we know the operand occurred exactly once. */
3207 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3208 arg0 = new0;
3209 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3210 arg0 = new1;
3212 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3213 arg1 = new0;
3214 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3215 arg1 = new1;
3217 return fold_build2 (code, type, arg0, arg1);
3220 default:
3221 return arg;
3225 /* Return a tree for the case when the result of an expression is RESULT
3226 converted to TYPE and OMITTED was previously an operand of the expression
3227 but is now not needed (e.g., we folded OMITTED * 0).
3229 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3230 the conversion of RESULT to TYPE. */
3232 tree
3233 omit_one_operand (tree type, tree result, tree omitted)
3235 tree t = fold_convert (type, result);
3237 if (TREE_SIDE_EFFECTS (omitted))
3238 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3240 return non_lvalue (t);
3243 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3245 static tree
3246 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3248 tree t = fold_convert (type, result);
3250 if (TREE_SIDE_EFFECTS (omitted))
3251 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3253 return pedantic_non_lvalue (t);
3256 /* Return a tree for the case when the result of an expression is RESULT
3257 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3258 of the expression but are now not needed.
3260 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3261 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3262 evaluated before OMITTED2. Otherwise, if neither has side effects,
3263 just do the conversion of RESULT to TYPE. */
3265 tree
3266 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3268 tree t = fold_convert (type, result);
3270 if (TREE_SIDE_EFFECTS (omitted2))
3271 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3272 if (TREE_SIDE_EFFECTS (omitted1))
3273 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3275 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3279 /* Return a simplified tree node for the truth-negation of ARG. This
3280 never alters ARG itself. We assume that ARG is an operation that
3281 returns a truth value (0 or 1).
3283 FIXME: one would think we would fold the result, but it causes
3284 problems with the dominator optimizer. */
3286 tree
3287 fold_truth_not_expr (tree arg)
3289 tree type = TREE_TYPE (arg);
3290 enum tree_code code = TREE_CODE (arg);
3292 /* If this is a comparison, we can simply invert it, except for
3293 floating-point non-equality comparisons, in which case we just
3294 enclose a TRUTH_NOT_EXPR around what we have. */
3296 if (TREE_CODE_CLASS (code) == tcc_comparison)
3298 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3299 if (FLOAT_TYPE_P (op_type)
3300 && flag_trapping_math
3301 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3302 && code != NE_EXPR && code != EQ_EXPR)
3303 return NULL_TREE;
3304 else
3306 code = invert_tree_comparison (code,
3307 HONOR_NANS (TYPE_MODE (op_type)));
3308 if (code == ERROR_MARK)
3309 return NULL_TREE;
3310 else
3311 return build2 (code, type,
3312 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3316 switch (code)
3318 case INTEGER_CST:
3319 return constant_boolean_node (integer_zerop (arg), type);
3321 case TRUTH_AND_EXPR:
3322 return build2 (TRUTH_OR_EXPR, type,
3323 invert_truthvalue (TREE_OPERAND (arg, 0)),
3324 invert_truthvalue (TREE_OPERAND (arg, 1)));
3326 case TRUTH_OR_EXPR:
3327 return build2 (TRUTH_AND_EXPR, type,
3328 invert_truthvalue (TREE_OPERAND (arg, 0)),
3329 invert_truthvalue (TREE_OPERAND (arg, 1)));
3331 case TRUTH_XOR_EXPR:
3332 /* Here we can invert either operand. We invert the first operand
3333 unless the second operand is a TRUTH_NOT_EXPR in which case our
3334 result is the XOR of the first operand with the inside of the
3335 negation of the second operand. */
3337 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3338 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3339 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3340 else
3341 return build2 (TRUTH_XOR_EXPR, type,
3342 invert_truthvalue (TREE_OPERAND (arg, 0)),
3343 TREE_OPERAND (arg, 1));
3345 case TRUTH_ANDIF_EXPR:
3346 return build2 (TRUTH_ORIF_EXPR, type,
3347 invert_truthvalue (TREE_OPERAND (arg, 0)),
3348 invert_truthvalue (TREE_OPERAND (arg, 1)));
3350 case TRUTH_ORIF_EXPR:
3351 return build2 (TRUTH_ANDIF_EXPR, type,
3352 invert_truthvalue (TREE_OPERAND (arg, 0)),
3353 invert_truthvalue (TREE_OPERAND (arg, 1)));
3355 case TRUTH_NOT_EXPR:
3356 return TREE_OPERAND (arg, 0);
3358 case COND_EXPR:
3360 tree arg1 = TREE_OPERAND (arg, 1);
3361 tree arg2 = TREE_OPERAND (arg, 2);
3362 /* A COND_EXPR may have a throw as one operand, which
3363 then has void type. Just leave void operands
3364 as they are. */
3365 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3366 VOID_TYPE_P (TREE_TYPE (arg1))
3367 ? arg1 : invert_truthvalue (arg1),
3368 VOID_TYPE_P (TREE_TYPE (arg2))
3369 ? arg2 : invert_truthvalue (arg2));
3372 case COMPOUND_EXPR:
3373 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3374 invert_truthvalue (TREE_OPERAND (arg, 1)));
3376 case NON_LVALUE_EXPR:
3377 return invert_truthvalue (TREE_OPERAND (arg, 0));
3379 case NOP_EXPR:
3380 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3381 return build1 (TRUTH_NOT_EXPR, type, arg);
3383 case CONVERT_EXPR:
3384 case FLOAT_EXPR:
3385 return build1 (TREE_CODE (arg), type,
3386 invert_truthvalue (TREE_OPERAND (arg, 0)));
3388 case BIT_AND_EXPR:
3389 if (!integer_onep (TREE_OPERAND (arg, 1)))
3390 break;
3391 return build2 (EQ_EXPR, type, arg,
3392 build_int_cst (type, 0));
3394 case SAVE_EXPR:
3395 return build1 (TRUTH_NOT_EXPR, type, arg);
3397 case CLEANUP_POINT_EXPR:
3398 return build1 (CLEANUP_POINT_EXPR, type,
3399 invert_truthvalue (TREE_OPERAND (arg, 0)));
3401 default:
3402 break;
3405 return NULL_TREE;
3408 /* Return a simplified tree node for the truth-negation of ARG. This
3409 never alters ARG itself. We assume that ARG is an operation that
3410 returns a truth value (0 or 1).
3412 FIXME: one would think we would fold the result, but it causes
3413 problems with the dominator optimizer. */
3415 tree
3416 invert_truthvalue (tree arg)
3418 tree tem;
3420 if (TREE_CODE (arg) == ERROR_MARK)
3421 return arg;
3423 tem = fold_truth_not_expr (arg);
3424 if (!tem)
3425 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3427 return tem;
3430 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3431 operands are another bit-wise operation with a common input. If so,
3432 distribute the bit operations to save an operation and possibly two if
3433 constants are involved. For example, convert
3434 (A | B) & (A | C) into A | (B & C)
3435 Further simplification will occur if B and C are constants.
3437 If this optimization cannot be done, 0 will be returned. */
3439 static tree
3440 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3442 tree common;
3443 tree left, right;
3445 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3446 || TREE_CODE (arg0) == code
3447 || (TREE_CODE (arg0) != BIT_AND_EXPR
3448 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3449 return 0;
3451 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3453 common = TREE_OPERAND (arg0, 0);
3454 left = TREE_OPERAND (arg0, 1);
3455 right = TREE_OPERAND (arg1, 1);
3457 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3459 common = TREE_OPERAND (arg0, 0);
3460 left = TREE_OPERAND (arg0, 1);
3461 right = TREE_OPERAND (arg1, 0);
3463 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3465 common = TREE_OPERAND (arg0, 1);
3466 left = TREE_OPERAND (arg0, 0);
3467 right = TREE_OPERAND (arg1, 1);
3469 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3471 common = TREE_OPERAND (arg0, 1);
3472 left = TREE_OPERAND (arg0, 0);
3473 right = TREE_OPERAND (arg1, 0);
3475 else
3476 return 0;
3478 return fold_build2 (TREE_CODE (arg0), type, common,
3479 fold_build2 (code, type, left, right));
3482 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3483 with code CODE. This optimization is unsafe. */
3484 static tree
3485 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3487 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3488 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3490 /* (A / C) +- (B / C) -> (A +- B) / C. */
3491 if (mul0 == mul1
3492 && operand_equal_p (TREE_OPERAND (arg0, 1),
3493 TREE_OPERAND (arg1, 1), 0))
3494 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3495 fold_build2 (code, type,
3496 TREE_OPERAND (arg0, 0),
3497 TREE_OPERAND (arg1, 0)),
3498 TREE_OPERAND (arg0, 1));
3500 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3501 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3502 TREE_OPERAND (arg1, 0), 0)
3503 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3504 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3506 REAL_VALUE_TYPE r0, r1;
3507 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3508 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3509 if (!mul0)
3510 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3511 if (!mul1)
3512 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3513 real_arithmetic (&r0, code, &r0, &r1);
3514 return fold_build2 (MULT_EXPR, type,
3515 TREE_OPERAND (arg0, 0),
3516 build_real (type, r0));
3519 return NULL_TREE;
3522 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3523 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3525 static tree
3526 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3527 int unsignedp)
3529 tree result;
3531 if (bitpos == 0)
3533 tree size = TYPE_SIZE (TREE_TYPE (inner));
3534 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3535 || POINTER_TYPE_P (TREE_TYPE (inner)))
3536 && host_integerp (size, 0)
3537 && tree_low_cst (size, 0) == bitsize)
3538 return fold_convert (type, inner);
3541 result = build3 (BIT_FIELD_REF, type, inner,
3542 size_int (bitsize), bitsize_int (bitpos));
3544 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3546 return result;
3549 /* Optimize a bit-field compare.
3551 There are two cases: First is a compare against a constant and the
3552 second is a comparison of two items where the fields are at the same
3553 bit position relative to the start of a chunk (byte, halfword, word)
3554 large enough to contain it. In these cases we can avoid the shift
3555 implicit in bitfield extractions.
3557 For constants, we emit a compare of the shifted constant with the
3558 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3559 compared. For two fields at the same position, we do the ANDs with the
3560 similar mask and compare the result of the ANDs.
3562 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3563 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3564 are the left and right operands of the comparison, respectively.
3566 If the optimization described above can be done, we return the resulting
3567 tree. Otherwise we return zero. */
3569 static tree
3570 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3571 tree lhs, tree rhs)
3573 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3574 tree type = TREE_TYPE (lhs);
3575 tree signed_type, unsigned_type;
3576 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3577 enum machine_mode lmode, rmode, nmode;
3578 int lunsignedp, runsignedp;
3579 int lvolatilep = 0, rvolatilep = 0;
3580 tree linner, rinner = NULL_TREE;
3581 tree mask;
3582 tree offset;
3584 /* Get all the information about the extractions being done. If the bit size
3585 if the same as the size of the underlying object, we aren't doing an
3586 extraction at all and so can do nothing. We also don't want to
3587 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3588 then will no longer be able to replace it. */
3589 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3590 &lunsignedp, &lvolatilep, false);
3591 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3592 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3593 return 0;
3595 if (!const_p)
3597 /* If this is not a constant, we can only do something if bit positions,
3598 sizes, and signedness are the same. */
3599 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3600 &runsignedp, &rvolatilep, false);
3602 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3603 || lunsignedp != runsignedp || offset != 0
3604 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3605 return 0;
3608 /* See if we can find a mode to refer to this field. We should be able to,
3609 but fail if we can't. */
3610 nmode = get_best_mode (lbitsize, lbitpos,
3611 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3612 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3613 TYPE_ALIGN (TREE_TYPE (rinner))),
3614 word_mode, lvolatilep || rvolatilep);
3615 if (nmode == VOIDmode)
3616 return 0;
3618 /* Set signed and unsigned types of the precision of this mode for the
3619 shifts below. */
3620 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3621 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3623 /* Compute the bit position and size for the new reference and our offset
3624 within it. If the new reference is the same size as the original, we
3625 won't optimize anything, so return zero. */
3626 nbitsize = GET_MODE_BITSIZE (nmode);
3627 nbitpos = lbitpos & ~ (nbitsize - 1);
3628 lbitpos -= nbitpos;
3629 if (nbitsize == lbitsize)
3630 return 0;
3632 if (BYTES_BIG_ENDIAN)
3633 lbitpos = nbitsize - lbitsize - lbitpos;
3635 /* Make the mask to be used against the extracted field. */
3636 mask = build_int_cst_type (unsigned_type, -1);
3637 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3638 mask = const_binop (RSHIFT_EXPR, mask,
3639 size_int (nbitsize - lbitsize - lbitpos), 0);
3641 if (! const_p)
3642 /* If not comparing with constant, just rework the comparison
3643 and return. */
3644 return fold_build2 (code, compare_type,
3645 fold_build2 (BIT_AND_EXPR, unsigned_type,
3646 make_bit_field_ref (linner,
3647 unsigned_type,
3648 nbitsize, nbitpos,
3650 mask),
3651 fold_build2 (BIT_AND_EXPR, unsigned_type,
3652 make_bit_field_ref (rinner,
3653 unsigned_type,
3654 nbitsize, nbitpos,
3656 mask));
3658 /* Otherwise, we are handling the constant case. See if the constant is too
3659 big for the field. Warn and return a tree of for 0 (false) if so. We do
3660 this not only for its own sake, but to avoid having to test for this
3661 error case below. If we didn't, we might generate wrong code.
3663 For unsigned fields, the constant shifted right by the field length should
3664 be all zero. For signed fields, the high-order bits should agree with
3665 the sign bit. */
3667 if (lunsignedp)
3669 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3670 fold_convert (unsigned_type, rhs),
3671 size_int (lbitsize), 0)))
3673 warning (0, "comparison is always %d due to width of bit-field",
3674 code == NE_EXPR);
3675 return constant_boolean_node (code == NE_EXPR, compare_type);
3678 else
3680 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3681 size_int (lbitsize - 1), 0);
3682 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3684 warning (0, "comparison is always %d due to width of bit-field",
3685 code == NE_EXPR);
3686 return constant_boolean_node (code == NE_EXPR, compare_type);
3690 /* Single-bit compares should always be against zero. */
3691 if (lbitsize == 1 && ! integer_zerop (rhs))
3693 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3694 rhs = build_int_cst (type, 0);
3697 /* Make a new bitfield reference, shift the constant over the
3698 appropriate number of bits and mask it with the computed mask
3699 (in case this was a signed field). If we changed it, make a new one. */
3700 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3701 if (lvolatilep)
3703 TREE_SIDE_EFFECTS (lhs) = 1;
3704 TREE_THIS_VOLATILE (lhs) = 1;
3707 rhs = const_binop (BIT_AND_EXPR,
3708 const_binop (LSHIFT_EXPR,
3709 fold_convert (unsigned_type, rhs),
3710 size_int (lbitpos), 0),
3711 mask, 0);
3713 return build2 (code, compare_type,
3714 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3715 rhs);
3718 /* Subroutine for fold_truthop: decode a field reference.
3720 If EXP is a comparison reference, we return the innermost reference.
3722 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3723 set to the starting bit number.
3725 If the innermost field can be completely contained in a mode-sized
3726 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3728 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3729 otherwise it is not changed.
3731 *PUNSIGNEDP is set to the signedness of the field.
3733 *PMASK is set to the mask used. This is either contained in a
3734 BIT_AND_EXPR or derived from the width of the field.
3736 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3738 Return 0 if this is not a component reference or is one that we can't
3739 do anything with. */
3741 static tree
3742 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3743 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3744 int *punsignedp, int *pvolatilep,
3745 tree *pmask, tree *pand_mask)
3747 tree outer_type = 0;
3748 tree and_mask = 0;
3749 tree mask, inner, offset;
3750 tree unsigned_type;
3751 unsigned int precision;
3753 /* All the optimizations using this function assume integer fields.
3754 There are problems with FP fields since the type_for_size call
3755 below can fail for, e.g., XFmode. */
3756 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3757 return 0;
3759 /* We are interested in the bare arrangement of bits, so strip everything
3760 that doesn't affect the machine mode. However, record the type of the
3761 outermost expression if it may matter below. */
3762 if (TREE_CODE (exp) == NOP_EXPR
3763 || TREE_CODE (exp) == CONVERT_EXPR
3764 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3765 outer_type = TREE_TYPE (exp);
3766 STRIP_NOPS (exp);
3768 if (TREE_CODE (exp) == BIT_AND_EXPR)
3770 and_mask = TREE_OPERAND (exp, 1);
3771 exp = TREE_OPERAND (exp, 0);
3772 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3773 if (TREE_CODE (and_mask) != INTEGER_CST)
3774 return 0;
3777 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3778 punsignedp, pvolatilep, false);
3779 if ((inner == exp && and_mask == 0)
3780 || *pbitsize < 0 || offset != 0
3781 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3782 return 0;
3784 /* If the number of bits in the reference is the same as the bitsize of
3785 the outer type, then the outer type gives the signedness. Otherwise
3786 (in case of a small bitfield) the signedness is unchanged. */
3787 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3788 *punsignedp = TYPE_UNSIGNED (outer_type);
3790 /* Compute the mask to access the bitfield. */
3791 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3792 precision = TYPE_PRECISION (unsigned_type);
3794 mask = build_int_cst_type (unsigned_type, -1);
3796 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3797 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3799 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3800 if (and_mask != 0)
3801 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3802 fold_convert (unsigned_type, and_mask), mask);
3804 *pmask = mask;
3805 *pand_mask = and_mask;
3806 return inner;
3809 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3810 bit positions. */
3812 static int
3813 all_ones_mask_p (tree mask, int size)
3815 tree type = TREE_TYPE (mask);
3816 unsigned int precision = TYPE_PRECISION (type);
3817 tree tmask;
3819 tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
3821 return
3822 tree_int_cst_equal (mask,
3823 const_binop (RSHIFT_EXPR,
3824 const_binop (LSHIFT_EXPR, tmask,
3825 size_int (precision - size),
3827 size_int (precision - size), 0));
3830 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3831 represents the sign bit of EXP's type. If EXP represents a sign
3832 or zero extension, also test VAL against the unextended type.
3833 The return value is the (sub)expression whose sign bit is VAL,
3834 or NULL_TREE otherwise. */
3836 static tree
3837 sign_bit_p (tree exp, tree val)
3839 unsigned HOST_WIDE_INT mask_lo, lo;
3840 HOST_WIDE_INT mask_hi, hi;
3841 int width;
3842 tree t;
3844 /* Tree EXP must have an integral type. */
3845 t = TREE_TYPE (exp);
3846 if (! INTEGRAL_TYPE_P (t))
3847 return NULL_TREE;
3849 /* Tree VAL must be an integer constant. */
3850 if (TREE_CODE (val) != INTEGER_CST
3851 || TREE_OVERFLOW (val))
3852 return NULL_TREE;
3854 width = TYPE_PRECISION (t);
3855 if (width > HOST_BITS_PER_WIDE_INT)
3857 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3858 lo = 0;
3860 mask_hi = ((unsigned HOST_WIDE_INT) -1
3861 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3862 mask_lo = -1;
3864 else
3866 hi = 0;
3867 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3869 mask_hi = 0;
3870 mask_lo = ((unsigned HOST_WIDE_INT) -1
3871 >> (HOST_BITS_PER_WIDE_INT - width));
3874 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3875 treat VAL as if it were unsigned. */
3876 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3877 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3878 return exp;
3880 /* Handle extension from a narrower type. */
3881 if (TREE_CODE (exp) == NOP_EXPR
3882 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3883 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3885 return NULL_TREE;
3888 /* Subroutine for fold_truthop: determine if an operand is simple enough
3889 to be evaluated unconditionally. */
3891 static int
3892 simple_operand_p (tree exp)
3894 /* Strip any conversions that don't change the machine mode. */
3895 STRIP_NOPS (exp);
3897 return (CONSTANT_CLASS_P (exp)
3898 || TREE_CODE (exp) == SSA_NAME
3899 || (DECL_P (exp)
3900 && ! TREE_ADDRESSABLE (exp)
3901 && ! TREE_THIS_VOLATILE (exp)
3902 && ! DECL_NONLOCAL (exp)
3903 /* Don't regard global variables as simple. They may be
3904 allocated in ways unknown to the compiler (shared memory,
3905 #pragma weak, etc). */
3906 && ! TREE_PUBLIC (exp)
3907 && ! DECL_EXTERNAL (exp)
3908 /* Loading a static variable is unduly expensive, but global
3909 registers aren't expensive. */
3910 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3913 /* The following functions are subroutines to fold_range_test and allow it to
3914 try to change a logical combination of comparisons into a range test.
3916 For example, both
3917 X == 2 || X == 3 || X == 4 || X == 5
3919 X >= 2 && X <= 5
3920 are converted to
3921 (unsigned) (X - 2) <= 3
3923 We describe each set of comparisons as being either inside or outside
3924 a range, using a variable named like IN_P, and then describe the
3925 range with a lower and upper bound. If one of the bounds is omitted,
3926 it represents either the highest or lowest value of the type.
3928 In the comments below, we represent a range by two numbers in brackets
3929 preceded by a "+" to designate being inside that range, or a "-" to
3930 designate being outside that range, so the condition can be inverted by
3931 flipping the prefix. An omitted bound is represented by a "-". For
3932 example, "- [-, 10]" means being outside the range starting at the lowest
3933 possible value and ending at 10, in other words, being greater than 10.
3934 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3935 always false.
3937 We set up things so that the missing bounds are handled in a consistent
3938 manner so neither a missing bound nor "true" and "false" need to be
3939 handled using a special case. */
3941 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3942 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3943 and UPPER1_P are nonzero if the respective argument is an upper bound
3944 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3945 must be specified for a comparison. ARG1 will be converted to ARG0's
3946 type if both are specified. */
3948 static tree
3949 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3950 tree arg1, int upper1_p)
3952 tree tem;
3953 int result;
3954 int sgn0, sgn1;
3956 /* If neither arg represents infinity, do the normal operation.
3957 Else, if not a comparison, return infinity. Else handle the special
3958 comparison rules. Note that most of the cases below won't occur, but
3959 are handled for consistency. */
3961 if (arg0 != 0 && arg1 != 0)
3963 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3964 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3965 STRIP_NOPS (tem);
3966 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3969 if (TREE_CODE_CLASS (code) != tcc_comparison)
3970 return 0;
3972 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3973 for neither. In real maths, we cannot assume open ended ranges are
3974 the same. But, this is computer arithmetic, where numbers are finite.
3975 We can therefore make the transformation of any unbounded range with
3976 the value Z, Z being greater than any representable number. This permits
3977 us to treat unbounded ranges as equal. */
3978 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3979 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3980 switch (code)
3982 case EQ_EXPR:
3983 result = sgn0 == sgn1;
3984 break;
3985 case NE_EXPR:
3986 result = sgn0 != sgn1;
3987 break;
3988 case LT_EXPR:
3989 result = sgn0 < sgn1;
3990 break;
3991 case LE_EXPR:
3992 result = sgn0 <= sgn1;
3993 break;
3994 case GT_EXPR:
3995 result = sgn0 > sgn1;
3996 break;
3997 case GE_EXPR:
3998 result = sgn0 >= sgn1;
3999 break;
4000 default:
4001 gcc_unreachable ();
4004 return constant_boolean_node (result, type);
4007 /* Given EXP, a logical expression, set the range it is testing into
4008 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4009 actually being tested. *PLOW and *PHIGH will be made of the same
4010 type as the returned expression. If EXP is not a comparison, we
4011 will most likely not be returning a useful value and range. Set
4012 *STRICT_OVERFLOW_P to true if the return value is only valid
4013 because signed overflow is undefined; otherwise, do not change
4014 *STRICT_OVERFLOW_P. */
4016 static tree
4017 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4018 bool *strict_overflow_p)
4020 enum tree_code code;
4021 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4022 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4023 int in_p, n_in_p;
4024 tree low, high, n_low, n_high;
4026 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4027 and see if we can refine the range. Some of the cases below may not
4028 happen, but it doesn't seem worth worrying about this. We "continue"
4029 the outer loop when we've changed something; otherwise we "break"
4030 the switch, which will "break" the while. */
4032 in_p = 0;
4033 low = high = build_int_cst (TREE_TYPE (exp), 0);
4035 while (1)
4037 code = TREE_CODE (exp);
4038 exp_type = TREE_TYPE (exp);
4040 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4042 if (TREE_OPERAND_LENGTH (exp) > 0)
4043 arg0 = TREE_OPERAND (exp, 0);
4044 if (TREE_CODE_CLASS (code) == tcc_comparison
4045 || TREE_CODE_CLASS (code) == tcc_unary
4046 || TREE_CODE_CLASS (code) == tcc_binary)
4047 arg0_type = TREE_TYPE (arg0);
4048 if (TREE_CODE_CLASS (code) == tcc_binary
4049 || TREE_CODE_CLASS (code) == tcc_comparison
4050 || (TREE_CODE_CLASS (code) == tcc_expression
4051 && TREE_OPERAND_LENGTH (exp) > 1))
4052 arg1 = TREE_OPERAND (exp, 1);
4055 switch (code)
4057 case TRUTH_NOT_EXPR:
4058 in_p = ! in_p, exp = arg0;
4059 continue;
4061 case EQ_EXPR: case NE_EXPR:
4062 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4063 /* We can only do something if the range is testing for zero
4064 and if the second operand is an integer constant. Note that
4065 saying something is "in" the range we make is done by
4066 complementing IN_P since it will set in the initial case of
4067 being not equal to zero; "out" is leaving it alone. */
4068 if (low == 0 || high == 0
4069 || ! integer_zerop (low) || ! integer_zerop (high)
4070 || TREE_CODE (arg1) != INTEGER_CST)
4071 break;
4073 switch (code)
4075 case NE_EXPR: /* - [c, c] */
4076 low = high = arg1;
4077 break;
4078 case EQ_EXPR: /* + [c, c] */
4079 in_p = ! in_p, low = high = arg1;
4080 break;
4081 case GT_EXPR: /* - [-, c] */
4082 low = 0, high = arg1;
4083 break;
4084 case GE_EXPR: /* + [c, -] */
4085 in_p = ! in_p, low = arg1, high = 0;
4086 break;
4087 case LT_EXPR: /* - [c, -] */
4088 low = arg1, high = 0;
4089 break;
4090 case LE_EXPR: /* + [-, c] */
4091 in_p = ! in_p, low = 0, high = arg1;
4092 break;
4093 default:
4094 gcc_unreachable ();
4097 /* If this is an unsigned comparison, we also know that EXP is
4098 greater than or equal to zero. We base the range tests we make
4099 on that fact, so we record it here so we can parse existing
4100 range tests. We test arg0_type since often the return type
4101 of, e.g. EQ_EXPR, is boolean. */
4102 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4104 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4105 in_p, low, high, 1,
4106 build_int_cst (arg0_type, 0),
4107 NULL_TREE))
4108 break;
4110 in_p = n_in_p, low = n_low, high = n_high;
4112 /* If the high bound is missing, but we have a nonzero low
4113 bound, reverse the range so it goes from zero to the low bound
4114 minus 1. */
4115 if (high == 0 && low && ! integer_zerop (low))
4117 in_p = ! in_p;
4118 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4119 integer_one_node, 0);
4120 low = build_int_cst (arg0_type, 0);
4124 exp = arg0;
4125 continue;
4127 case NEGATE_EXPR:
4128 /* (-x) IN [a,b] -> x in [-b, -a] */
4129 n_low = range_binop (MINUS_EXPR, exp_type,
4130 build_int_cst (exp_type, 0),
4131 0, high, 1);
4132 n_high = range_binop (MINUS_EXPR, exp_type,
4133 build_int_cst (exp_type, 0),
4134 0, low, 0);
4135 low = n_low, high = n_high;
4136 exp = arg0;
4137 continue;
4139 case BIT_NOT_EXPR:
4140 /* ~ X -> -X - 1 */
4141 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4142 build_int_cst (exp_type, 1));
4143 continue;
4145 case PLUS_EXPR: case MINUS_EXPR:
4146 if (TREE_CODE (arg1) != INTEGER_CST)
4147 break;
4149 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4150 move a constant to the other side. */
4151 if (!TYPE_UNSIGNED (arg0_type)
4152 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4153 break;
4155 /* If EXP is signed, any overflow in the computation is undefined,
4156 so we don't worry about it so long as our computations on
4157 the bounds don't overflow. For unsigned, overflow is defined
4158 and this is exactly the right thing. */
4159 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4160 arg0_type, low, 0, arg1, 0);
4161 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4162 arg0_type, high, 1, arg1, 0);
4163 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4164 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4165 break;
4167 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4168 *strict_overflow_p = true;
4170 /* Check for an unsigned range which has wrapped around the maximum
4171 value thus making n_high < n_low, and normalize it. */
4172 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4174 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4175 integer_one_node, 0);
4176 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4177 integer_one_node, 0);
4179 /* If the range is of the form +/- [ x+1, x ], we won't
4180 be able to normalize it. But then, it represents the
4181 whole range or the empty set, so make it
4182 +/- [ -, - ]. */
4183 if (tree_int_cst_equal (n_low, low)
4184 && tree_int_cst_equal (n_high, high))
4185 low = high = 0;
4186 else
4187 in_p = ! in_p;
4189 else
4190 low = n_low, high = n_high;
4192 exp = arg0;
4193 continue;
4195 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4196 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4197 break;
4199 if (! INTEGRAL_TYPE_P (arg0_type)
4200 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4201 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4202 break;
4204 n_low = low, n_high = high;
4206 if (n_low != 0)
4207 n_low = fold_convert (arg0_type, n_low);
4209 if (n_high != 0)
4210 n_high = fold_convert (arg0_type, n_high);
4213 /* If we're converting arg0 from an unsigned type, to exp,
4214 a signed type, we will be doing the comparison as unsigned.
4215 The tests above have already verified that LOW and HIGH
4216 are both positive.
4218 So we have to ensure that we will handle large unsigned
4219 values the same way that the current signed bounds treat
4220 negative values. */
4222 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4224 tree high_positive;
4225 tree equiv_type = lang_hooks.types.type_for_mode
4226 (TYPE_MODE (arg0_type), 1);
4228 /* A range without an upper bound is, naturally, unbounded.
4229 Since convert would have cropped a very large value, use
4230 the max value for the destination type. */
4231 high_positive
4232 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4233 : TYPE_MAX_VALUE (arg0_type);
4235 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4236 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4237 fold_convert (arg0_type,
4238 high_positive),
4239 build_int_cst (arg0_type, 1));
4241 /* If the low bound is specified, "and" the range with the
4242 range for which the original unsigned value will be
4243 positive. */
4244 if (low != 0)
4246 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4247 1, n_low, n_high, 1,
4248 fold_convert (arg0_type,
4249 integer_zero_node),
4250 high_positive))
4251 break;
4253 in_p = (n_in_p == in_p);
4255 else
4257 /* Otherwise, "or" the range with the range of the input
4258 that will be interpreted as negative. */
4259 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4260 0, n_low, n_high, 1,
4261 fold_convert (arg0_type,
4262 integer_zero_node),
4263 high_positive))
4264 break;
4266 in_p = (in_p != n_in_p);
4270 exp = arg0;
4271 low = n_low, high = n_high;
4272 continue;
4274 default:
4275 break;
4278 break;
4281 /* If EXP is a constant, we can evaluate whether this is true or false. */
4282 if (TREE_CODE (exp) == INTEGER_CST)
4284 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4285 exp, 0, low, 0))
4286 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4287 exp, 1, high, 1)));
4288 low = high = 0;
4289 exp = 0;
4292 *pin_p = in_p, *plow = low, *phigh = high;
4293 return exp;
4296 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4297 type, TYPE, return an expression to test if EXP is in (or out of, depending
4298 on IN_P) the range. Return 0 if the test couldn't be created. */
4300 static tree
4301 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4303 tree etype = TREE_TYPE (exp);
4304 tree value;
4306 #ifdef HAVE_canonicalize_funcptr_for_compare
4307 /* Disable this optimization for function pointer expressions
4308 on targets that require function pointer canonicalization. */
4309 if (HAVE_canonicalize_funcptr_for_compare
4310 && TREE_CODE (etype) == POINTER_TYPE
4311 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4312 return NULL_TREE;
4313 #endif
4315 if (! in_p)
4317 value = build_range_check (type, exp, 1, low, high);
4318 if (value != 0)
4319 return invert_truthvalue (value);
4321 return 0;
4324 if (low == 0 && high == 0)
4325 return build_int_cst (type, 1);
4327 if (low == 0)
4328 return fold_build2 (LE_EXPR, type, exp,
4329 fold_convert (etype, high));
4331 if (high == 0)
4332 return fold_build2 (GE_EXPR, type, exp,
4333 fold_convert (etype, low));
4335 if (operand_equal_p (low, high, 0))
4336 return fold_build2 (EQ_EXPR, type, exp,
4337 fold_convert (etype, low));
4339 if (integer_zerop (low))
4341 if (! TYPE_UNSIGNED (etype))
4343 etype = lang_hooks.types.unsigned_type (etype);
4344 high = fold_convert (etype, high);
4345 exp = fold_convert (etype, exp);
4347 return build_range_check (type, exp, 1, 0, high);
4350 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4351 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4353 unsigned HOST_WIDE_INT lo;
4354 HOST_WIDE_INT hi;
4355 int prec;
4357 prec = TYPE_PRECISION (etype);
4358 if (prec <= HOST_BITS_PER_WIDE_INT)
4360 hi = 0;
4361 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4363 else
4365 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4366 lo = (unsigned HOST_WIDE_INT) -1;
4369 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4371 if (TYPE_UNSIGNED (etype))
4373 etype = lang_hooks.types.signed_type (etype);
4374 exp = fold_convert (etype, exp);
4376 return fold_build2 (GT_EXPR, type, exp,
4377 build_int_cst (etype, 0));
4381 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4382 This requires wrap-around arithmetics for the type of the expression. */
4383 switch (TREE_CODE (etype))
4385 case INTEGER_TYPE:
4386 /* There is no requirement that LOW be within the range of ETYPE
4387 if the latter is a subtype. It must, however, be within the base
4388 type of ETYPE. So be sure we do the subtraction in that type. */
4389 if (TREE_TYPE (etype))
4390 etype = TREE_TYPE (etype);
4391 break;
4393 case ENUMERAL_TYPE:
4394 case BOOLEAN_TYPE:
4395 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4396 TYPE_UNSIGNED (etype));
4397 break;
4399 default:
4400 break;
4403 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4404 if (TREE_CODE (etype) == INTEGER_TYPE
4405 && !TYPE_OVERFLOW_WRAPS (etype))
4407 tree utype, minv, maxv;
4409 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4410 for the type in question, as we rely on this here. */
4411 utype = lang_hooks.types.unsigned_type (etype);
4412 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4413 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4414 integer_one_node, 1);
4415 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4417 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4418 minv, 1, maxv, 1)))
4419 etype = utype;
4420 else
4421 return 0;
4424 high = fold_convert (etype, high);
4425 low = fold_convert (etype, low);
4426 exp = fold_convert (etype, exp);
4428 value = const_binop (MINUS_EXPR, high, low, 0);
4430 if (value != 0 && !TREE_OVERFLOW (value))
4431 return build_range_check (type,
4432 fold_build2 (MINUS_EXPR, etype, exp, low),
4433 1, build_int_cst (etype, 0), value);
4435 return 0;
4438 /* Return the predecessor of VAL in its type, handling the infinite case. */
4440 static tree
4441 range_predecessor (tree val)
4443 tree type = TREE_TYPE (val);
4445 if (INTEGRAL_TYPE_P (type)
4446 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4447 return 0;
4448 else
4449 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4452 /* Return the successor of VAL in its type, handling the infinite case. */
4454 static tree
4455 range_successor (tree val)
4457 tree type = TREE_TYPE (val);
4459 if (INTEGRAL_TYPE_P (type)
4460 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4461 return 0;
4462 else
4463 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4466 /* Given two ranges, see if we can merge them into one. Return 1 if we
4467 can, 0 if we can't. Set the output range into the specified parameters. */
4469 static int
4470 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4471 tree high0, int in1_p, tree low1, tree high1)
4473 int no_overlap;
4474 int subset;
4475 int temp;
4476 tree tem;
4477 int in_p;
4478 tree low, high;
4479 int lowequal = ((low0 == 0 && low1 == 0)
4480 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4481 low0, 0, low1, 0)));
4482 int highequal = ((high0 == 0 && high1 == 0)
4483 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4484 high0, 1, high1, 1)));
4486 /* Make range 0 be the range that starts first, or ends last if they
4487 start at the same value. Swap them if it isn't. */
4488 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4489 low0, 0, low1, 0))
4490 || (lowequal
4491 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4492 high1, 1, high0, 1))))
4494 temp = in0_p, in0_p = in1_p, in1_p = temp;
4495 tem = low0, low0 = low1, low1 = tem;
4496 tem = high0, high0 = high1, high1 = tem;
4499 /* Now flag two cases, whether the ranges are disjoint or whether the
4500 second range is totally subsumed in the first. Note that the tests
4501 below are simplified by the ones above. */
4502 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4503 high0, 1, low1, 0));
4504 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4505 high1, 1, high0, 1));
4507 /* We now have four cases, depending on whether we are including or
4508 excluding the two ranges. */
4509 if (in0_p && in1_p)
4511 /* If they don't overlap, the result is false. If the second range
4512 is a subset it is the result. Otherwise, the range is from the start
4513 of the second to the end of the first. */
4514 if (no_overlap)
4515 in_p = 0, low = high = 0;
4516 else if (subset)
4517 in_p = 1, low = low1, high = high1;
4518 else
4519 in_p = 1, low = low1, high = high0;
4522 else if (in0_p && ! in1_p)
4524 /* If they don't overlap, the result is the first range. If they are
4525 equal, the result is false. If the second range is a subset of the
4526 first, and the ranges begin at the same place, we go from just after
4527 the end of the second range to the end of the first. If the second
4528 range is not a subset of the first, or if it is a subset and both
4529 ranges end at the same place, the range starts at the start of the
4530 first range and ends just before the second range.
4531 Otherwise, we can't describe this as a single range. */
4532 if (no_overlap)
4533 in_p = 1, low = low0, high = high0;
4534 else if (lowequal && highequal)
4535 in_p = 0, low = high = 0;
4536 else if (subset && lowequal)
4538 low = range_successor (high1);
4539 high = high0;
4540 in_p = (low != 0);
4542 else if (! subset || highequal)
4544 low = low0;
4545 high = range_predecessor (low1);
4546 in_p = (high != 0);
4548 else
4549 return 0;
4552 else if (! in0_p && in1_p)
4554 /* If they don't overlap, the result is the second range. If the second
4555 is a subset of the first, the result is false. Otherwise,
4556 the range starts just after the first range and ends at the
4557 end of the second. */
4558 if (no_overlap)
4559 in_p = 1, low = low1, high = high1;
4560 else if (subset || highequal)
4561 in_p = 0, low = high = 0;
4562 else
4564 low = range_successor (high0);
4565 high = high1;
4566 in_p = (low != 0);
4570 else
4572 /* The case where we are excluding both ranges. Here the complex case
4573 is if they don't overlap. In that case, the only time we have a
4574 range is if they are adjacent. If the second is a subset of the
4575 first, the result is the first. Otherwise, the range to exclude
4576 starts at the beginning of the first range and ends at the end of the
4577 second. */
4578 if (no_overlap)
4580 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4581 range_successor (high0),
4582 1, low1, 0)))
4583 in_p = 0, low = low0, high = high1;
4584 else
4586 /* Canonicalize - [min, x] into - [-, x]. */
4587 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4588 switch (TREE_CODE (TREE_TYPE (low0)))
4590 case ENUMERAL_TYPE:
4591 if (TYPE_PRECISION (TREE_TYPE (low0))
4592 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4593 break;
4594 /* FALLTHROUGH */
4595 case INTEGER_TYPE:
4596 if (tree_int_cst_equal (low0,
4597 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4598 low0 = 0;
4599 break;
4600 case POINTER_TYPE:
4601 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4602 && integer_zerop (low0))
4603 low0 = 0;
4604 break;
4605 default:
4606 break;
4609 /* Canonicalize - [x, max] into - [x, -]. */
4610 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4611 switch (TREE_CODE (TREE_TYPE (high1)))
4613 case ENUMERAL_TYPE:
4614 if (TYPE_PRECISION (TREE_TYPE (high1))
4615 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4616 break;
4617 /* FALLTHROUGH */
4618 case INTEGER_TYPE:
4619 if (tree_int_cst_equal (high1,
4620 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4621 high1 = 0;
4622 break;
4623 case POINTER_TYPE:
4624 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4625 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4626 high1, 1,
4627 integer_one_node, 1)))
4628 high1 = 0;
4629 break;
4630 default:
4631 break;
4634 /* The ranges might be also adjacent between the maximum and
4635 minimum values of the given type. For
4636 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4637 return + [x + 1, y - 1]. */
4638 if (low0 == 0 && high1 == 0)
4640 low = range_successor (high0);
4641 high = range_predecessor (low1);
4642 if (low == 0 || high == 0)
4643 return 0;
4645 in_p = 1;
4647 else
4648 return 0;
4651 else if (subset)
4652 in_p = 0, low = low0, high = high0;
4653 else
4654 in_p = 0, low = low0, high = high1;
4657 *pin_p = in_p, *plow = low, *phigh = high;
4658 return 1;
4662 /* Subroutine of fold, looking inside expressions of the form
4663 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4664 of the COND_EXPR. This function is being used also to optimize
4665 A op B ? C : A, by reversing the comparison first.
4667 Return a folded expression whose code is not a COND_EXPR
4668 anymore, or NULL_TREE if no folding opportunity is found. */
4670 static tree
4671 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4673 enum tree_code comp_code = TREE_CODE (arg0);
4674 tree arg00 = TREE_OPERAND (arg0, 0);
4675 tree arg01 = TREE_OPERAND (arg0, 1);
4676 tree arg1_type = TREE_TYPE (arg1);
4677 tree tem;
4679 STRIP_NOPS (arg1);
4680 STRIP_NOPS (arg2);
4682 /* If we have A op 0 ? A : -A, consider applying the following
4683 transformations:
4685 A == 0? A : -A same as -A
4686 A != 0? A : -A same as A
4687 A >= 0? A : -A same as abs (A)
4688 A > 0? A : -A same as abs (A)
4689 A <= 0? A : -A same as -abs (A)
4690 A < 0? A : -A same as -abs (A)
4692 None of these transformations work for modes with signed
4693 zeros. If A is +/-0, the first two transformations will
4694 change the sign of the result (from +0 to -0, or vice
4695 versa). The last four will fix the sign of the result,
4696 even though the original expressions could be positive or
4697 negative, depending on the sign of A.
4699 Note that all these transformations are correct if A is
4700 NaN, since the two alternatives (A and -A) are also NaNs. */
4701 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4702 ? real_zerop (arg01)
4703 : integer_zerop (arg01))
4704 && ((TREE_CODE (arg2) == NEGATE_EXPR
4705 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4706 /* In the case that A is of the form X-Y, '-A' (arg2) may
4707 have already been folded to Y-X, check for that. */
4708 || (TREE_CODE (arg1) == MINUS_EXPR
4709 && TREE_CODE (arg2) == MINUS_EXPR
4710 && operand_equal_p (TREE_OPERAND (arg1, 0),
4711 TREE_OPERAND (arg2, 1), 0)
4712 && operand_equal_p (TREE_OPERAND (arg1, 1),
4713 TREE_OPERAND (arg2, 0), 0))))
4714 switch (comp_code)
4716 case EQ_EXPR:
4717 case UNEQ_EXPR:
4718 tem = fold_convert (arg1_type, arg1);
4719 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4720 case NE_EXPR:
4721 case LTGT_EXPR:
4722 return pedantic_non_lvalue (fold_convert (type, arg1));
4723 case UNGE_EXPR:
4724 case UNGT_EXPR:
4725 if (flag_trapping_math)
4726 break;
4727 /* Fall through. */
4728 case GE_EXPR:
4729 case GT_EXPR:
4730 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4731 arg1 = fold_convert (lang_hooks.types.signed_type
4732 (TREE_TYPE (arg1)), arg1);
4733 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4734 return pedantic_non_lvalue (fold_convert (type, tem));
4735 case UNLE_EXPR:
4736 case UNLT_EXPR:
4737 if (flag_trapping_math)
4738 break;
4739 case LE_EXPR:
4740 case LT_EXPR:
4741 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4742 arg1 = fold_convert (lang_hooks.types.signed_type
4743 (TREE_TYPE (arg1)), arg1);
4744 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4745 return negate_expr (fold_convert (type, tem));
4746 default:
4747 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4748 break;
4751 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4752 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4753 both transformations are correct when A is NaN: A != 0
4754 is then true, and A == 0 is false. */
4756 if (integer_zerop (arg01) && integer_zerop (arg2))
4758 if (comp_code == NE_EXPR)
4759 return pedantic_non_lvalue (fold_convert (type, arg1));
4760 else if (comp_code == EQ_EXPR)
4761 return build_int_cst (type, 0);
4764 /* Try some transformations of A op B ? A : B.
4766 A == B? A : B same as B
4767 A != B? A : B same as A
4768 A >= B? A : B same as max (A, B)
4769 A > B? A : B same as max (B, A)
4770 A <= B? A : B same as min (A, B)
4771 A < B? A : B same as min (B, A)
4773 As above, these transformations don't work in the presence
4774 of signed zeros. For example, if A and B are zeros of
4775 opposite sign, the first two transformations will change
4776 the sign of the result. In the last four, the original
4777 expressions give different results for (A=+0, B=-0) and
4778 (A=-0, B=+0), but the transformed expressions do not.
4780 The first two transformations are correct if either A or B
4781 is a NaN. In the first transformation, the condition will
4782 be false, and B will indeed be chosen. In the case of the
4783 second transformation, the condition A != B will be true,
4784 and A will be chosen.
4786 The conversions to max() and min() are not correct if B is
4787 a number and A is not. The conditions in the original
4788 expressions will be false, so all four give B. The min()
4789 and max() versions would give a NaN instead. */
4790 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4791 /* Avoid these transformations if the COND_EXPR may be used
4792 as an lvalue in the C++ front-end. PR c++/19199. */
4793 && (in_gimple_form
4794 || (strcmp (lang_hooks.name, "GNU C++") != 0
4795 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4796 || ! maybe_lvalue_p (arg1)
4797 || ! maybe_lvalue_p (arg2)))
4799 tree comp_op0 = arg00;
4800 tree comp_op1 = arg01;
4801 tree comp_type = TREE_TYPE (comp_op0);
4803 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4804 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4806 comp_type = type;
4807 comp_op0 = arg1;
4808 comp_op1 = arg2;
4811 switch (comp_code)
4813 case EQ_EXPR:
4814 return pedantic_non_lvalue (fold_convert (type, arg2));
4815 case NE_EXPR:
4816 return pedantic_non_lvalue (fold_convert (type, arg1));
4817 case LE_EXPR:
4818 case LT_EXPR:
4819 case UNLE_EXPR:
4820 case UNLT_EXPR:
4821 /* In C++ a ?: expression can be an lvalue, so put the
4822 operand which will be used if they are equal first
4823 so that we can convert this back to the
4824 corresponding COND_EXPR. */
4825 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4827 comp_op0 = fold_convert (comp_type, comp_op0);
4828 comp_op1 = fold_convert (comp_type, comp_op1);
4829 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4830 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4831 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4832 return pedantic_non_lvalue (fold_convert (type, tem));
4834 break;
4835 case GE_EXPR:
4836 case GT_EXPR:
4837 case UNGE_EXPR:
4838 case UNGT_EXPR:
4839 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4841 comp_op0 = fold_convert (comp_type, comp_op0);
4842 comp_op1 = fold_convert (comp_type, comp_op1);
4843 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4844 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4845 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4846 return pedantic_non_lvalue (fold_convert (type, tem));
4848 break;
4849 case UNEQ_EXPR:
4850 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4851 return pedantic_non_lvalue (fold_convert (type, arg2));
4852 break;
4853 case LTGT_EXPR:
4854 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4855 return pedantic_non_lvalue (fold_convert (type, arg1));
4856 break;
4857 default:
4858 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4859 break;
4863 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4864 we might still be able to simplify this. For example,
4865 if C1 is one less or one more than C2, this might have started
4866 out as a MIN or MAX and been transformed by this function.
4867 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4869 if (INTEGRAL_TYPE_P (type)
4870 && TREE_CODE (arg01) == INTEGER_CST
4871 && TREE_CODE (arg2) == INTEGER_CST)
4872 switch (comp_code)
4874 case EQ_EXPR:
4875 /* We can replace A with C1 in this case. */
4876 arg1 = fold_convert (type, arg01);
4877 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4879 case LT_EXPR:
4880 /* If C1 is C2 + 1, this is min(A, C2). */
4881 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4882 OEP_ONLY_CONST)
4883 && operand_equal_p (arg01,
4884 const_binop (PLUS_EXPR, arg2,
4885 build_int_cst (type, 1), 0),
4886 OEP_ONLY_CONST))
4887 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4888 type, arg1, arg2));
4889 break;
4891 case LE_EXPR:
4892 /* If C1 is C2 - 1, this is min(A, C2). */
4893 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4894 OEP_ONLY_CONST)
4895 && operand_equal_p (arg01,
4896 const_binop (MINUS_EXPR, arg2,
4897 build_int_cst (type, 1), 0),
4898 OEP_ONLY_CONST))
4899 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4900 type, arg1, arg2));
4901 break;
4903 case GT_EXPR:
4904 /* If C1 is C2 - 1, this is max(A, C2). */
4905 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4906 OEP_ONLY_CONST)
4907 && operand_equal_p (arg01,
4908 const_binop (MINUS_EXPR, arg2,
4909 build_int_cst (type, 1), 0),
4910 OEP_ONLY_CONST))
4911 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4912 type, arg1, arg2));
4913 break;
4915 case GE_EXPR:
4916 /* If C1 is C2 + 1, this is max(A, C2). */
4917 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4918 OEP_ONLY_CONST)
4919 && operand_equal_p (arg01,
4920 const_binop (PLUS_EXPR, arg2,
4921 build_int_cst (type, 1), 0),
4922 OEP_ONLY_CONST))
4923 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4924 type, arg1, arg2));
4925 break;
4926 case NE_EXPR:
4927 break;
4928 default:
4929 gcc_unreachable ();
4932 return NULL_TREE;
4937 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4938 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4939 #endif
4941 /* EXP is some logical combination of boolean tests. See if we can
4942 merge it into some range test. Return the new tree if so. */
4944 static tree
4945 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4947 int or_op = (code == TRUTH_ORIF_EXPR
4948 || code == TRUTH_OR_EXPR);
4949 int in0_p, in1_p, in_p;
4950 tree low0, low1, low, high0, high1, high;
4951 bool strict_overflow_p = false;
4952 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4953 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4954 tree tem;
4955 const char * const warnmsg = G_("assuming signed overflow does not occur "
4956 "when simplifying range test");
4958 /* If this is an OR operation, invert both sides; we will invert
4959 again at the end. */
4960 if (or_op)
4961 in0_p = ! in0_p, in1_p = ! in1_p;
4963 /* If both expressions are the same, if we can merge the ranges, and we
4964 can build the range test, return it or it inverted. If one of the
4965 ranges is always true or always false, consider it to be the same
4966 expression as the other. */
4967 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4968 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4969 in1_p, low1, high1)
4970 && 0 != (tem = (build_range_check (type,
4971 lhs != 0 ? lhs
4972 : rhs != 0 ? rhs : integer_zero_node,
4973 in_p, low, high))))
4975 if (strict_overflow_p)
4976 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4977 return or_op ? invert_truthvalue (tem) : tem;
4980 /* On machines where the branch cost is expensive, if this is a
4981 short-circuited branch and the underlying object on both sides
4982 is the same, make a non-short-circuit operation. */
4983 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4984 && lhs != 0 && rhs != 0
4985 && (code == TRUTH_ANDIF_EXPR
4986 || code == TRUTH_ORIF_EXPR)
4987 && operand_equal_p (lhs, rhs, 0))
4989 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4990 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4991 which cases we can't do this. */
4992 if (simple_operand_p (lhs))
4993 return build2 (code == TRUTH_ANDIF_EXPR
4994 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4995 type, op0, op1);
4997 else if (lang_hooks.decls.global_bindings_p () == 0
4998 && ! CONTAINS_PLACEHOLDER_P (lhs))
5000 tree common = save_expr (lhs);
5002 if (0 != (lhs = build_range_check (type, common,
5003 or_op ? ! in0_p : in0_p,
5004 low0, high0))
5005 && (0 != (rhs = build_range_check (type, common,
5006 or_op ? ! in1_p : in1_p,
5007 low1, high1))))
5009 if (strict_overflow_p)
5010 fold_overflow_warning (warnmsg,
5011 WARN_STRICT_OVERFLOW_COMPARISON);
5012 return build2 (code == TRUTH_ANDIF_EXPR
5013 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5014 type, lhs, rhs);
5019 return 0;
5022 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5023 bit value. Arrange things so the extra bits will be set to zero if and
5024 only if C is signed-extended to its full width. If MASK is nonzero,
5025 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5027 static tree
5028 unextend (tree c, int p, int unsignedp, tree mask)
5030 tree type = TREE_TYPE (c);
5031 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5032 tree temp;
5034 if (p == modesize || unsignedp)
5035 return c;
5037 /* We work by getting just the sign bit into the low-order bit, then
5038 into the high-order bit, then sign-extend. We then XOR that value
5039 with C. */
5040 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5041 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5043 /* We must use a signed type in order to get an arithmetic right shift.
5044 However, we must also avoid introducing accidental overflows, so that
5045 a subsequent call to integer_zerop will work. Hence we must
5046 do the type conversion here. At this point, the constant is either
5047 zero or one, and the conversion to a signed type can never overflow.
5048 We could get an overflow if this conversion is done anywhere else. */
5049 if (TYPE_UNSIGNED (type))
5050 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
5052 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5053 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5054 if (mask != 0)
5055 temp = const_binop (BIT_AND_EXPR, temp,
5056 fold_convert (TREE_TYPE (c), mask), 0);
5057 /* If necessary, convert the type back to match the type of C. */
5058 if (TYPE_UNSIGNED (type))
5059 temp = fold_convert (type, temp);
5061 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5064 /* Find ways of folding logical expressions of LHS and RHS:
5065 Try to merge two comparisons to the same innermost item.
5066 Look for range tests like "ch >= '0' && ch <= '9'".
5067 Look for combinations of simple terms on machines with expensive branches
5068 and evaluate the RHS unconditionally.
5070 For example, if we have p->a == 2 && p->b == 4 and we can make an
5071 object large enough to span both A and B, we can do this with a comparison
5072 against the object ANDed with the a mask.
5074 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5075 operations to do this with one comparison.
5077 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5078 function and the one above.
5080 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5081 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5083 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5084 two operands.
5086 We return the simplified tree or 0 if no optimization is possible. */
5088 static tree
5089 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5091 /* If this is the "or" of two comparisons, we can do something if
5092 the comparisons are NE_EXPR. If this is the "and", we can do something
5093 if the comparisons are EQ_EXPR. I.e.,
5094 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5096 WANTED_CODE is this operation code. For single bit fields, we can
5097 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5098 comparison for one-bit fields. */
5100 enum tree_code wanted_code;
5101 enum tree_code lcode, rcode;
5102 tree ll_arg, lr_arg, rl_arg, rr_arg;
5103 tree ll_inner, lr_inner, rl_inner, rr_inner;
5104 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5105 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5106 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5107 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5108 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5109 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5110 enum machine_mode lnmode, rnmode;
5111 tree ll_mask, lr_mask, rl_mask, rr_mask;
5112 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5113 tree l_const, r_const;
5114 tree lntype, rntype, result;
5115 int first_bit, end_bit;
5116 int volatilep;
5117 tree orig_lhs = lhs, orig_rhs = rhs;
5118 enum tree_code orig_code = code;
5120 /* Start by getting the comparison codes. Fail if anything is volatile.
5121 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5122 it were surrounded with a NE_EXPR. */
5124 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5125 return 0;
5127 lcode = TREE_CODE (lhs);
5128 rcode = TREE_CODE (rhs);
5130 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5132 lhs = build2 (NE_EXPR, truth_type, lhs,
5133 build_int_cst (TREE_TYPE (lhs), 0));
5134 lcode = NE_EXPR;
5137 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5139 rhs = build2 (NE_EXPR, truth_type, rhs,
5140 build_int_cst (TREE_TYPE (rhs), 0));
5141 rcode = NE_EXPR;
5144 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5145 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5146 return 0;
5148 ll_arg = TREE_OPERAND (lhs, 0);
5149 lr_arg = TREE_OPERAND (lhs, 1);
5150 rl_arg = TREE_OPERAND (rhs, 0);
5151 rr_arg = TREE_OPERAND (rhs, 1);
5153 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5154 if (simple_operand_p (ll_arg)
5155 && simple_operand_p (lr_arg))
5157 tree result;
5158 if (operand_equal_p (ll_arg, rl_arg, 0)
5159 && operand_equal_p (lr_arg, rr_arg, 0))
5161 result = combine_comparisons (code, lcode, rcode,
5162 truth_type, ll_arg, lr_arg);
5163 if (result)
5164 return result;
5166 else if (operand_equal_p (ll_arg, rr_arg, 0)
5167 && operand_equal_p (lr_arg, rl_arg, 0))
5169 result = combine_comparisons (code, lcode,
5170 swap_tree_comparison (rcode),
5171 truth_type, ll_arg, lr_arg);
5172 if (result)
5173 return result;
5177 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5178 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5180 /* If the RHS can be evaluated unconditionally and its operands are
5181 simple, it wins to evaluate the RHS unconditionally on machines
5182 with expensive branches. In this case, this isn't a comparison
5183 that can be merged. Avoid doing this if the RHS is a floating-point
5184 comparison since those can trap. */
5186 if (BRANCH_COST >= 2
5187 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5188 && simple_operand_p (rl_arg)
5189 && simple_operand_p (rr_arg))
5191 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5192 if (code == TRUTH_OR_EXPR
5193 && lcode == NE_EXPR && integer_zerop (lr_arg)
5194 && rcode == NE_EXPR && integer_zerop (rr_arg)
5195 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5196 return build2 (NE_EXPR, truth_type,
5197 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5198 ll_arg, rl_arg),
5199 build_int_cst (TREE_TYPE (ll_arg), 0));
5201 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5202 if (code == TRUTH_AND_EXPR
5203 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5204 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5205 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5206 return build2 (EQ_EXPR, truth_type,
5207 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5208 ll_arg, rl_arg),
5209 build_int_cst (TREE_TYPE (ll_arg), 0));
5211 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5213 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5214 return build2 (code, truth_type, lhs, rhs);
5215 return NULL_TREE;
5219 /* See if the comparisons can be merged. Then get all the parameters for
5220 each side. */
5222 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5223 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5224 return 0;
5226 volatilep = 0;
5227 ll_inner = decode_field_reference (ll_arg,
5228 &ll_bitsize, &ll_bitpos, &ll_mode,
5229 &ll_unsignedp, &volatilep, &ll_mask,
5230 &ll_and_mask);
5231 lr_inner = decode_field_reference (lr_arg,
5232 &lr_bitsize, &lr_bitpos, &lr_mode,
5233 &lr_unsignedp, &volatilep, &lr_mask,
5234 &lr_and_mask);
5235 rl_inner = decode_field_reference (rl_arg,
5236 &rl_bitsize, &rl_bitpos, &rl_mode,
5237 &rl_unsignedp, &volatilep, &rl_mask,
5238 &rl_and_mask);
5239 rr_inner = decode_field_reference (rr_arg,
5240 &rr_bitsize, &rr_bitpos, &rr_mode,
5241 &rr_unsignedp, &volatilep, &rr_mask,
5242 &rr_and_mask);
5244 /* It must be true that the inner operation on the lhs of each
5245 comparison must be the same if we are to be able to do anything.
5246 Then see if we have constants. If not, the same must be true for
5247 the rhs's. */
5248 if (volatilep || ll_inner == 0 || rl_inner == 0
5249 || ! operand_equal_p (ll_inner, rl_inner, 0))
5250 return 0;
5252 if (TREE_CODE (lr_arg) == INTEGER_CST
5253 && TREE_CODE (rr_arg) == INTEGER_CST)
5254 l_const = lr_arg, r_const = rr_arg;
5255 else if (lr_inner == 0 || rr_inner == 0
5256 || ! operand_equal_p (lr_inner, rr_inner, 0))
5257 return 0;
5258 else
5259 l_const = r_const = 0;
5261 /* If either comparison code is not correct for our logical operation,
5262 fail. However, we can convert a one-bit comparison against zero into
5263 the opposite comparison against that bit being set in the field. */
5265 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5266 if (lcode != wanted_code)
5268 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5270 /* Make the left operand unsigned, since we are only interested
5271 in the value of one bit. Otherwise we are doing the wrong
5272 thing below. */
5273 ll_unsignedp = 1;
5274 l_const = ll_mask;
5276 else
5277 return 0;
5280 /* This is analogous to the code for l_const above. */
5281 if (rcode != wanted_code)
5283 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5285 rl_unsignedp = 1;
5286 r_const = rl_mask;
5288 else
5289 return 0;
5292 /* See if we can find a mode that contains both fields being compared on
5293 the left. If we can't, fail. Otherwise, update all constants and masks
5294 to be relative to a field of that size. */
5295 first_bit = MIN (ll_bitpos, rl_bitpos);
5296 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5297 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5298 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5299 volatilep);
5300 if (lnmode == VOIDmode)
5301 return 0;
5303 lnbitsize = GET_MODE_BITSIZE (lnmode);
5304 lnbitpos = first_bit & ~ (lnbitsize - 1);
5305 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5306 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5308 if (BYTES_BIG_ENDIAN)
5310 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5311 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5314 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5315 size_int (xll_bitpos), 0);
5316 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5317 size_int (xrl_bitpos), 0);
5319 if (l_const)
5321 l_const = fold_convert (lntype, l_const);
5322 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5323 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5324 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5325 fold_build1 (BIT_NOT_EXPR,
5326 lntype, ll_mask),
5327 0)))
5329 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5331 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5334 if (r_const)
5336 r_const = fold_convert (lntype, r_const);
5337 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5338 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5339 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5340 fold_build1 (BIT_NOT_EXPR,
5341 lntype, rl_mask),
5342 0)))
5344 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5346 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5350 /* If the right sides are not constant, do the same for it. Also,
5351 disallow this optimization if a size or signedness mismatch occurs
5352 between the left and right sides. */
5353 if (l_const == 0)
5355 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5356 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5357 /* Make sure the two fields on the right
5358 correspond to the left without being swapped. */
5359 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5360 return 0;
5362 first_bit = MIN (lr_bitpos, rr_bitpos);
5363 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5364 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5365 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5366 volatilep);
5367 if (rnmode == VOIDmode)
5368 return 0;
5370 rnbitsize = GET_MODE_BITSIZE (rnmode);
5371 rnbitpos = first_bit & ~ (rnbitsize - 1);
5372 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5373 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5375 if (BYTES_BIG_ENDIAN)
5377 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5378 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5381 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5382 size_int (xlr_bitpos), 0);
5383 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5384 size_int (xrr_bitpos), 0);
5386 /* Make a mask that corresponds to both fields being compared.
5387 Do this for both items being compared. If the operands are the
5388 same size and the bits being compared are in the same position
5389 then we can do this by masking both and comparing the masked
5390 results. */
5391 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5392 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5393 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5395 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5396 ll_unsignedp || rl_unsignedp);
5397 if (! all_ones_mask_p (ll_mask, lnbitsize))
5398 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5400 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5401 lr_unsignedp || rr_unsignedp);
5402 if (! all_ones_mask_p (lr_mask, rnbitsize))
5403 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5405 return build2 (wanted_code, truth_type, lhs, rhs);
5408 /* There is still another way we can do something: If both pairs of
5409 fields being compared are adjacent, we may be able to make a wider
5410 field containing them both.
5412 Note that we still must mask the lhs/rhs expressions. Furthermore,
5413 the mask must be shifted to account for the shift done by
5414 make_bit_field_ref. */
5415 if ((ll_bitsize + ll_bitpos == rl_bitpos
5416 && lr_bitsize + lr_bitpos == rr_bitpos)
5417 || (ll_bitpos == rl_bitpos + rl_bitsize
5418 && lr_bitpos == rr_bitpos + rr_bitsize))
5420 tree type;
5422 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5423 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5424 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5425 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5427 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5428 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5429 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5430 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5432 /* Convert to the smaller type before masking out unwanted bits. */
5433 type = lntype;
5434 if (lntype != rntype)
5436 if (lnbitsize > rnbitsize)
5438 lhs = fold_convert (rntype, lhs);
5439 ll_mask = fold_convert (rntype, ll_mask);
5440 type = rntype;
5442 else if (lnbitsize < rnbitsize)
5444 rhs = fold_convert (lntype, rhs);
5445 lr_mask = fold_convert (lntype, lr_mask);
5446 type = lntype;
5450 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5451 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5453 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5454 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5456 return build2 (wanted_code, truth_type, lhs, rhs);
5459 return 0;
5462 /* Handle the case of comparisons with constants. If there is something in
5463 common between the masks, those bits of the constants must be the same.
5464 If not, the condition is always false. Test for this to avoid generating
5465 incorrect code below. */
5466 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5467 if (! integer_zerop (result)
5468 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5469 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5471 if (wanted_code == NE_EXPR)
5473 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5474 return constant_boolean_node (true, truth_type);
5476 else
5478 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5479 return constant_boolean_node (false, truth_type);
5483 /* Construct the expression we will return. First get the component
5484 reference we will make. Unless the mask is all ones the width of
5485 that field, perform the mask operation. Then compare with the
5486 merged constant. */
5487 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5488 ll_unsignedp || rl_unsignedp);
5490 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5491 if (! all_ones_mask_p (ll_mask, lnbitsize))
5492 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5494 return build2 (wanted_code, truth_type, result,
5495 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5498 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5499 constant. */
5501 static tree
5502 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5504 tree arg0 = op0;
5505 enum tree_code op_code;
5506 tree comp_const = op1;
5507 tree minmax_const;
5508 int consts_equal, consts_lt;
5509 tree inner;
5511 STRIP_SIGN_NOPS (arg0);
5513 op_code = TREE_CODE (arg0);
5514 minmax_const = TREE_OPERAND (arg0, 1);
5515 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5516 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5517 inner = TREE_OPERAND (arg0, 0);
5519 /* If something does not permit us to optimize, return the original tree. */
5520 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5521 || TREE_CODE (comp_const) != INTEGER_CST
5522 || TREE_OVERFLOW (comp_const)
5523 || TREE_CODE (minmax_const) != INTEGER_CST
5524 || TREE_OVERFLOW (minmax_const))
5525 return NULL_TREE;
5527 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5528 and GT_EXPR, doing the rest with recursive calls using logical
5529 simplifications. */
5530 switch (code)
5532 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5534 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5535 type, op0, op1);
5536 if (tem)
5537 return invert_truthvalue (tem);
5538 return NULL_TREE;
5541 case GE_EXPR:
5542 return
5543 fold_build2 (TRUTH_ORIF_EXPR, type,
5544 optimize_minmax_comparison
5545 (EQ_EXPR, type, arg0, comp_const),
5546 optimize_minmax_comparison
5547 (GT_EXPR, type, arg0, comp_const));
5549 case EQ_EXPR:
5550 if (op_code == MAX_EXPR && consts_equal)
5551 /* MAX (X, 0) == 0 -> X <= 0 */
5552 return fold_build2 (LE_EXPR, type, inner, comp_const);
5554 else if (op_code == MAX_EXPR && consts_lt)
5555 /* MAX (X, 0) == 5 -> X == 5 */
5556 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5558 else if (op_code == MAX_EXPR)
5559 /* MAX (X, 0) == -1 -> false */
5560 return omit_one_operand (type, integer_zero_node, inner);
5562 else if (consts_equal)
5563 /* MIN (X, 0) == 0 -> X >= 0 */
5564 return fold_build2 (GE_EXPR, type, inner, comp_const);
5566 else if (consts_lt)
5567 /* MIN (X, 0) == 5 -> false */
5568 return omit_one_operand (type, integer_zero_node, inner);
5570 else
5571 /* MIN (X, 0) == -1 -> X == -1 */
5572 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5574 case GT_EXPR:
5575 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5576 /* MAX (X, 0) > 0 -> X > 0
5577 MAX (X, 0) > 5 -> X > 5 */
5578 return fold_build2 (GT_EXPR, type, inner, comp_const);
5580 else if (op_code == MAX_EXPR)
5581 /* MAX (X, 0) > -1 -> true */
5582 return omit_one_operand (type, integer_one_node, inner);
5584 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5585 /* MIN (X, 0) > 0 -> false
5586 MIN (X, 0) > 5 -> false */
5587 return omit_one_operand (type, integer_zero_node, inner);
5589 else
5590 /* MIN (X, 0) > -1 -> X > -1 */
5591 return fold_build2 (GT_EXPR, type, inner, comp_const);
5593 default:
5594 return NULL_TREE;
5598 /* T is an integer expression that is being multiplied, divided, or taken a
5599 modulus (CODE says which and what kind of divide or modulus) by a
5600 constant C. See if we can eliminate that operation by folding it with
5601 other operations already in T. WIDE_TYPE, if non-null, is a type that
5602 should be used for the computation if wider than our type.
5604 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5605 (X * 2) + (Y * 4). We must, however, be assured that either the original
5606 expression would not overflow or that overflow is undefined for the type
5607 in the language in question.
5609 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5610 the machine has a multiply-accumulate insn or that this is part of an
5611 addressing calculation.
5613 If we return a non-null expression, it is an equivalent form of the
5614 original computation, but need not be in the original type.
5616 We set *STRICT_OVERFLOW_P to true if the return values depends on
5617 signed overflow being undefined. Otherwise we do not change
5618 *STRICT_OVERFLOW_P. */
5620 static tree
5621 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5622 bool *strict_overflow_p)
5624 /* To avoid exponential search depth, refuse to allow recursion past
5625 three levels. Beyond that (1) it's highly unlikely that we'll find
5626 something interesting and (2) we've probably processed it before
5627 when we built the inner expression. */
5629 static int depth;
5630 tree ret;
5632 if (depth > 3)
5633 return NULL;
5635 depth++;
5636 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5637 depth--;
5639 return ret;
5642 static tree
5643 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5644 bool *strict_overflow_p)
5646 tree type = TREE_TYPE (t);
5647 enum tree_code tcode = TREE_CODE (t);
5648 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5649 > GET_MODE_SIZE (TYPE_MODE (type)))
5650 ? wide_type : type);
5651 tree t1, t2;
5652 int same_p = tcode == code;
5653 tree op0 = NULL_TREE, op1 = NULL_TREE;
5654 bool sub_strict_overflow_p;
5656 /* Don't deal with constants of zero here; they confuse the code below. */
5657 if (integer_zerop (c))
5658 return NULL_TREE;
5660 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5661 op0 = TREE_OPERAND (t, 0);
5663 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5664 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5666 /* Note that we need not handle conditional operations here since fold
5667 already handles those cases. So just do arithmetic here. */
5668 switch (tcode)
5670 case INTEGER_CST:
5671 /* For a constant, we can always simplify if we are a multiply
5672 or (for divide and modulus) if it is a multiple of our constant. */
5673 if (code == MULT_EXPR
5674 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5675 return const_binop (code, fold_convert (ctype, t),
5676 fold_convert (ctype, c), 0);
5677 break;
5679 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5680 /* If op0 is an expression ... */
5681 if ((COMPARISON_CLASS_P (op0)
5682 || UNARY_CLASS_P (op0)
5683 || BINARY_CLASS_P (op0)
5684 || VL_EXP_CLASS_P (op0)
5685 || EXPRESSION_CLASS_P (op0))
5686 /* ... and is unsigned, and its type is smaller than ctype,
5687 then we cannot pass through as widening. */
5688 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5689 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5690 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5691 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5692 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5693 /* ... or this is a truncation (t is narrower than op0),
5694 then we cannot pass through this narrowing. */
5695 || (GET_MODE_SIZE (TYPE_MODE (type))
5696 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5697 /* ... or signedness changes for division or modulus,
5698 then we cannot pass through this conversion. */
5699 || (code != MULT_EXPR
5700 && (TYPE_UNSIGNED (ctype)
5701 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5702 break;
5704 /* Pass the constant down and see if we can make a simplification. If
5705 we can, replace this expression with the inner simplification for
5706 possible later conversion to our or some other type. */
5707 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5708 && TREE_CODE (t2) == INTEGER_CST
5709 && !TREE_OVERFLOW (t2)
5710 && (0 != (t1 = extract_muldiv (op0, t2, code,
5711 code == MULT_EXPR
5712 ? ctype : NULL_TREE,
5713 strict_overflow_p))))
5714 return t1;
5715 break;
5717 case ABS_EXPR:
5718 /* If widening the type changes it from signed to unsigned, then we
5719 must avoid building ABS_EXPR itself as unsigned. */
5720 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5722 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5723 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5724 != 0)
5726 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5727 return fold_convert (ctype, t1);
5729 break;
5731 /* FALLTHROUGH */
5732 case NEGATE_EXPR:
5733 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5734 != 0)
5735 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5736 break;
5738 case MIN_EXPR: case MAX_EXPR:
5739 /* If widening the type changes the signedness, then we can't perform
5740 this optimization as that changes the result. */
5741 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5742 break;
5744 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5745 sub_strict_overflow_p = false;
5746 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5747 &sub_strict_overflow_p)) != 0
5748 && (t2 = extract_muldiv (op1, c, code, wide_type,
5749 &sub_strict_overflow_p)) != 0)
5751 if (tree_int_cst_sgn (c) < 0)
5752 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5753 if (sub_strict_overflow_p)
5754 *strict_overflow_p = true;
5755 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5756 fold_convert (ctype, t2));
5758 break;
5760 case LSHIFT_EXPR: case RSHIFT_EXPR:
5761 /* If the second operand is constant, this is a multiplication
5762 or floor division, by a power of two, so we can treat it that
5763 way unless the multiplier or divisor overflows. Signed
5764 left-shift overflow is implementation-defined rather than
5765 undefined in C90, so do not convert signed left shift into
5766 multiplication. */
5767 if (TREE_CODE (op1) == INTEGER_CST
5768 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5769 /* const_binop may not detect overflow correctly,
5770 so check for it explicitly here. */
5771 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5772 && TREE_INT_CST_HIGH (op1) == 0
5773 && 0 != (t1 = fold_convert (ctype,
5774 const_binop (LSHIFT_EXPR,
5775 size_one_node,
5776 op1, 0)))
5777 && !TREE_OVERFLOW (t1))
5778 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5779 ? MULT_EXPR : FLOOR_DIV_EXPR,
5780 ctype, fold_convert (ctype, op0), t1),
5781 c, code, wide_type, strict_overflow_p);
5782 break;
5784 case PLUS_EXPR: case MINUS_EXPR:
5785 /* See if we can eliminate the operation on both sides. If we can, we
5786 can return a new PLUS or MINUS. If we can't, the only remaining
5787 cases where we can do anything are if the second operand is a
5788 constant. */
5789 sub_strict_overflow_p = false;
5790 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5791 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5792 if (t1 != 0 && t2 != 0
5793 && (code == MULT_EXPR
5794 /* If not multiplication, we can only do this if both operands
5795 are divisible by c. */
5796 || (multiple_of_p (ctype, op0, c)
5797 && multiple_of_p (ctype, op1, c))))
5799 if (sub_strict_overflow_p)
5800 *strict_overflow_p = true;
5801 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5802 fold_convert (ctype, t2));
5805 /* If this was a subtraction, negate OP1 and set it to be an addition.
5806 This simplifies the logic below. */
5807 if (tcode == MINUS_EXPR)
5808 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5810 if (TREE_CODE (op1) != INTEGER_CST)
5811 break;
5813 /* If either OP1 or C are negative, this optimization is not safe for
5814 some of the division and remainder types while for others we need
5815 to change the code. */
5816 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5818 if (code == CEIL_DIV_EXPR)
5819 code = FLOOR_DIV_EXPR;
5820 else if (code == FLOOR_DIV_EXPR)
5821 code = CEIL_DIV_EXPR;
5822 else if (code != MULT_EXPR
5823 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5824 break;
5827 /* If it's a multiply or a division/modulus operation of a multiple
5828 of our constant, do the operation and verify it doesn't overflow. */
5829 if (code == MULT_EXPR
5830 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5832 op1 = const_binop (code, fold_convert (ctype, op1),
5833 fold_convert (ctype, c), 0);
5834 /* We allow the constant to overflow with wrapping semantics. */
5835 if (op1 == 0
5836 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5837 break;
5839 else
5840 break;
5842 /* If we have an unsigned type is not a sizetype, we cannot widen
5843 the operation since it will change the result if the original
5844 computation overflowed. */
5845 if (TYPE_UNSIGNED (ctype)
5846 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5847 && ctype != type)
5848 break;
5850 /* If we were able to eliminate our operation from the first side,
5851 apply our operation to the second side and reform the PLUS. */
5852 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5853 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5855 /* The last case is if we are a multiply. In that case, we can
5856 apply the distributive law to commute the multiply and addition
5857 if the multiplication of the constants doesn't overflow. */
5858 if (code == MULT_EXPR)
5859 return fold_build2 (tcode, ctype,
5860 fold_build2 (code, ctype,
5861 fold_convert (ctype, op0),
5862 fold_convert (ctype, c)),
5863 op1);
5865 break;
5867 case MULT_EXPR:
5868 /* We have a special case here if we are doing something like
5869 (C * 8) % 4 since we know that's zero. */
5870 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5871 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5872 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5873 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5874 return omit_one_operand (type, integer_zero_node, op0);
5876 /* ... fall through ... */
5878 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5879 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5880 /* If we can extract our operation from the LHS, do so and return a
5881 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5882 do something only if the second operand is a constant. */
5883 if (same_p
5884 && (t1 = extract_muldiv (op0, c, code, wide_type,
5885 strict_overflow_p)) != 0)
5886 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5887 fold_convert (ctype, op1));
5888 else if (tcode == MULT_EXPR && code == MULT_EXPR
5889 && (t1 = extract_muldiv (op1, c, code, wide_type,
5890 strict_overflow_p)) != 0)
5891 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5892 fold_convert (ctype, t1));
5893 else if (TREE_CODE (op1) != INTEGER_CST)
5894 return 0;
5896 /* If these are the same operation types, we can associate them
5897 assuming no overflow. */
5898 if (tcode == code
5899 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5900 fold_convert (ctype, c), 0))
5901 && !TREE_OVERFLOW (t1))
5902 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5904 /* If these operations "cancel" each other, we have the main
5905 optimizations of this pass, which occur when either constant is a
5906 multiple of the other, in which case we replace this with either an
5907 operation or CODE or TCODE.
5909 If we have an unsigned type that is not a sizetype, we cannot do
5910 this since it will change the result if the original computation
5911 overflowed. */
5912 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5913 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5914 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5915 || (tcode == MULT_EXPR
5916 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5917 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5919 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5921 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5922 *strict_overflow_p = true;
5923 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5924 fold_convert (ctype,
5925 const_binop (TRUNC_DIV_EXPR,
5926 op1, c, 0)));
5928 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5930 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5931 *strict_overflow_p = true;
5932 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5933 fold_convert (ctype,
5934 const_binop (TRUNC_DIV_EXPR,
5935 c, op1, 0)));
5938 break;
5940 default:
5941 break;
5944 return 0;
5947 /* Return a node which has the indicated constant VALUE (either 0 or
5948 1), and is of the indicated TYPE. */
5950 tree
5951 constant_boolean_node (int value, tree type)
5953 if (type == integer_type_node)
5954 return value ? integer_one_node : integer_zero_node;
5955 else if (type == boolean_type_node)
5956 return value ? boolean_true_node : boolean_false_node;
5957 else
5958 return build_int_cst (type, value);
5962 /* Return true if expr looks like an ARRAY_REF and set base and
5963 offset to the appropriate trees. If there is no offset,
5964 offset is set to NULL_TREE. Base will be canonicalized to
5965 something you can get the element type from using
5966 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5967 in bytes to the base. */
5969 static bool
5970 extract_array_ref (tree expr, tree *base, tree *offset)
5972 /* One canonical form is a PLUS_EXPR with the first
5973 argument being an ADDR_EXPR with a possible NOP_EXPR
5974 attached. */
5975 if (TREE_CODE (expr) == PLUS_EXPR)
5977 tree op0 = TREE_OPERAND (expr, 0);
5978 tree inner_base, dummy1;
5979 /* Strip NOP_EXPRs here because the C frontends and/or
5980 folders present us (int *)&x.a + 4B possibly. */
5981 STRIP_NOPS (op0);
5982 if (extract_array_ref (op0, &inner_base, &dummy1))
5984 *base = inner_base;
5985 if (dummy1 == NULL_TREE)
5986 *offset = TREE_OPERAND (expr, 1);
5987 else
5988 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5989 dummy1, TREE_OPERAND (expr, 1));
5990 return true;
5993 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5994 which we transform into an ADDR_EXPR with appropriate
5995 offset. For other arguments to the ADDR_EXPR we assume
5996 zero offset and as such do not care about the ADDR_EXPR
5997 type and strip possible nops from it. */
5998 else if (TREE_CODE (expr) == ADDR_EXPR)
6000 tree op0 = TREE_OPERAND (expr, 0);
6001 if (TREE_CODE (op0) == ARRAY_REF)
6003 tree idx = TREE_OPERAND (op0, 1);
6004 *base = TREE_OPERAND (op0, 0);
6005 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
6006 array_ref_element_size (op0));
6008 else
6010 /* Handle array-to-pointer decay as &a. */
6011 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
6012 *base = TREE_OPERAND (expr, 0);
6013 else
6014 *base = expr;
6015 *offset = NULL_TREE;
6017 return true;
6019 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
6020 else if (SSA_VAR_P (expr)
6021 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
6023 *base = expr;
6024 *offset = NULL_TREE;
6025 return true;
6028 return false;
6032 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6033 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6034 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6035 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6036 COND is the first argument to CODE; otherwise (as in the example
6037 given here), it is the second argument. TYPE is the type of the
6038 original expression. Return NULL_TREE if no simplification is
6039 possible. */
6041 static tree
6042 fold_binary_op_with_conditional_arg (enum tree_code code,
6043 tree type, tree op0, tree op1,
6044 tree cond, tree arg, int cond_first_p)
6046 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6047 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6048 tree test, true_value, false_value;
6049 tree lhs = NULL_TREE;
6050 tree rhs = NULL_TREE;
6052 /* This transformation is only worthwhile if we don't have to wrap
6053 arg in a SAVE_EXPR, and the operation can be simplified on at least
6054 one of the branches once its pushed inside the COND_EXPR. */
6055 if (!TREE_CONSTANT (arg))
6056 return NULL_TREE;
6058 if (TREE_CODE (cond) == COND_EXPR)
6060 test = TREE_OPERAND (cond, 0);
6061 true_value = TREE_OPERAND (cond, 1);
6062 false_value = TREE_OPERAND (cond, 2);
6063 /* If this operand throws an expression, then it does not make
6064 sense to try to perform a logical or arithmetic operation
6065 involving it. */
6066 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6067 lhs = true_value;
6068 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6069 rhs = false_value;
6071 else
6073 tree testtype = TREE_TYPE (cond);
6074 test = cond;
6075 true_value = constant_boolean_node (true, testtype);
6076 false_value = constant_boolean_node (false, testtype);
6079 arg = fold_convert (arg_type, arg);
6080 if (lhs == 0)
6082 true_value = fold_convert (cond_type, true_value);
6083 if (cond_first_p)
6084 lhs = fold_build2 (code, type, true_value, arg);
6085 else
6086 lhs = fold_build2 (code, type, arg, true_value);
6088 if (rhs == 0)
6090 false_value = fold_convert (cond_type, false_value);
6091 if (cond_first_p)
6092 rhs = fold_build2 (code, type, false_value, arg);
6093 else
6094 rhs = fold_build2 (code, type, arg, false_value);
6097 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6098 return fold_convert (type, test);
6102 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6104 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6105 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6106 ADDEND is the same as X.
6108 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6109 and finite. The problematic cases are when X is zero, and its mode
6110 has signed zeros. In the case of rounding towards -infinity,
6111 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6112 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6114 static bool
6115 fold_real_zero_addition_p (tree type, tree addend, int negate)
6117 if (!real_zerop (addend))
6118 return false;
6120 /* Don't allow the fold with -fsignaling-nans. */
6121 if (HONOR_SNANS (TYPE_MODE (type)))
6122 return false;
6124 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6125 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6126 return true;
6128 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6129 if (TREE_CODE (addend) == REAL_CST
6130 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6131 negate = !negate;
6133 /* The mode has signed zeros, and we have to honor their sign.
6134 In this situation, there is only one case we can return true for.
6135 X - 0 is the same as X unless rounding towards -infinity is
6136 supported. */
6137 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6140 /* Subroutine of fold() that checks comparisons of built-in math
6141 functions against real constants.
6143 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6144 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6145 is the type of the result and ARG0 and ARG1 are the operands of the
6146 comparison. ARG1 must be a TREE_REAL_CST.
6148 The function returns the constant folded tree if a simplification
6149 can be made, and NULL_TREE otherwise. */
6151 static tree
6152 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6153 tree type, tree arg0, tree arg1)
6155 REAL_VALUE_TYPE c;
6157 if (BUILTIN_SQRT_P (fcode))
6159 tree arg = CALL_EXPR_ARG (arg0, 0);
6160 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6162 c = TREE_REAL_CST (arg1);
6163 if (REAL_VALUE_NEGATIVE (c))
6165 /* sqrt(x) < y is always false, if y is negative. */
6166 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6167 return omit_one_operand (type, integer_zero_node, arg);
6169 /* sqrt(x) > y is always true, if y is negative and we
6170 don't care about NaNs, i.e. negative values of x. */
6171 if (code == NE_EXPR || !HONOR_NANS (mode))
6172 return omit_one_operand (type, integer_one_node, arg);
6174 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6175 return fold_build2 (GE_EXPR, type, arg,
6176 build_real (TREE_TYPE (arg), dconst0));
6178 else if (code == GT_EXPR || code == GE_EXPR)
6180 REAL_VALUE_TYPE c2;
6182 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6183 real_convert (&c2, mode, &c2);
6185 if (REAL_VALUE_ISINF (c2))
6187 /* sqrt(x) > y is x == +Inf, when y is very large. */
6188 if (HONOR_INFINITIES (mode))
6189 return fold_build2 (EQ_EXPR, type, arg,
6190 build_real (TREE_TYPE (arg), c2));
6192 /* sqrt(x) > y is always false, when y is very large
6193 and we don't care about infinities. */
6194 return omit_one_operand (type, integer_zero_node, arg);
6197 /* sqrt(x) > c is the same as x > c*c. */
6198 return fold_build2 (code, type, arg,
6199 build_real (TREE_TYPE (arg), c2));
6201 else if (code == LT_EXPR || code == LE_EXPR)
6203 REAL_VALUE_TYPE c2;
6205 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6206 real_convert (&c2, mode, &c2);
6208 if (REAL_VALUE_ISINF (c2))
6210 /* sqrt(x) < y is always true, when y is a very large
6211 value and we don't care about NaNs or Infinities. */
6212 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6213 return omit_one_operand (type, integer_one_node, arg);
6215 /* sqrt(x) < y is x != +Inf when y is very large and we
6216 don't care about NaNs. */
6217 if (! HONOR_NANS (mode))
6218 return fold_build2 (NE_EXPR, type, arg,
6219 build_real (TREE_TYPE (arg), c2));
6221 /* sqrt(x) < y is x >= 0 when y is very large and we
6222 don't care about Infinities. */
6223 if (! HONOR_INFINITIES (mode))
6224 return fold_build2 (GE_EXPR, type, arg,
6225 build_real (TREE_TYPE (arg), dconst0));
6227 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6228 if (lang_hooks.decls.global_bindings_p () != 0
6229 || CONTAINS_PLACEHOLDER_P (arg))
6230 return NULL_TREE;
6232 arg = save_expr (arg);
6233 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6234 fold_build2 (GE_EXPR, type, arg,
6235 build_real (TREE_TYPE (arg),
6236 dconst0)),
6237 fold_build2 (NE_EXPR, type, arg,
6238 build_real (TREE_TYPE (arg),
6239 c2)));
6242 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6243 if (! HONOR_NANS (mode))
6244 return fold_build2 (code, type, arg,
6245 build_real (TREE_TYPE (arg), c2));
6247 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6248 if (lang_hooks.decls.global_bindings_p () == 0
6249 && ! CONTAINS_PLACEHOLDER_P (arg))
6251 arg = save_expr (arg);
6252 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6253 fold_build2 (GE_EXPR, type, arg,
6254 build_real (TREE_TYPE (arg),
6255 dconst0)),
6256 fold_build2 (code, type, arg,
6257 build_real (TREE_TYPE (arg),
6258 c2)));
6263 return NULL_TREE;
6266 /* Subroutine of fold() that optimizes comparisons against Infinities,
6267 either +Inf or -Inf.
6269 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6270 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6271 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6273 The function returns the constant folded tree if a simplification
6274 can be made, and NULL_TREE otherwise. */
6276 static tree
6277 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6279 enum machine_mode mode;
6280 REAL_VALUE_TYPE max;
6281 tree temp;
6282 bool neg;
6284 mode = TYPE_MODE (TREE_TYPE (arg0));
6286 /* For negative infinity swap the sense of the comparison. */
6287 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6288 if (neg)
6289 code = swap_tree_comparison (code);
6291 switch (code)
6293 case GT_EXPR:
6294 /* x > +Inf is always false, if with ignore sNANs. */
6295 if (HONOR_SNANS (mode))
6296 return NULL_TREE;
6297 return omit_one_operand (type, integer_zero_node, arg0);
6299 case LE_EXPR:
6300 /* x <= +Inf is always true, if we don't case about NaNs. */
6301 if (! HONOR_NANS (mode))
6302 return omit_one_operand (type, integer_one_node, arg0);
6304 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6305 if (lang_hooks.decls.global_bindings_p () == 0
6306 && ! CONTAINS_PLACEHOLDER_P (arg0))
6308 arg0 = save_expr (arg0);
6309 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6311 break;
6313 case EQ_EXPR:
6314 case GE_EXPR:
6315 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6316 real_maxval (&max, neg, mode);
6317 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6318 arg0, build_real (TREE_TYPE (arg0), max));
6320 case LT_EXPR:
6321 /* x < +Inf is always equal to x <= DBL_MAX. */
6322 real_maxval (&max, neg, mode);
6323 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6324 arg0, build_real (TREE_TYPE (arg0), max));
6326 case NE_EXPR:
6327 /* x != +Inf is always equal to !(x > DBL_MAX). */
6328 real_maxval (&max, neg, mode);
6329 if (! HONOR_NANS (mode))
6330 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6331 arg0, build_real (TREE_TYPE (arg0), max));
6333 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6334 arg0, build_real (TREE_TYPE (arg0), max));
6335 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6337 default:
6338 break;
6341 return NULL_TREE;
6344 /* Subroutine of fold() that optimizes comparisons of a division by
6345 a nonzero integer constant against an integer constant, i.e.
6346 X/C1 op C2.
6348 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6349 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6350 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6352 The function returns the constant folded tree if a simplification
6353 can be made, and NULL_TREE otherwise. */
6355 static tree
6356 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6358 tree prod, tmp, hi, lo;
6359 tree arg00 = TREE_OPERAND (arg0, 0);
6360 tree arg01 = TREE_OPERAND (arg0, 1);
6361 unsigned HOST_WIDE_INT lpart;
6362 HOST_WIDE_INT hpart;
6363 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6364 bool neg_overflow;
6365 int overflow;
6367 /* We have to do this the hard way to detect unsigned overflow.
6368 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6369 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6370 TREE_INT_CST_HIGH (arg01),
6371 TREE_INT_CST_LOW (arg1),
6372 TREE_INT_CST_HIGH (arg1),
6373 &lpart, &hpart, unsigned_p);
6374 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6375 -1, overflow);
6376 neg_overflow = false;
6378 if (unsigned_p)
6380 tmp = int_const_binop (MINUS_EXPR, arg01,
6381 build_int_cst (TREE_TYPE (arg01), 1), 0);
6382 lo = prod;
6384 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6385 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6386 TREE_INT_CST_HIGH (prod),
6387 TREE_INT_CST_LOW (tmp),
6388 TREE_INT_CST_HIGH (tmp),
6389 &lpart, &hpart, unsigned_p);
6390 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6391 -1, overflow | TREE_OVERFLOW (prod));
6393 else if (tree_int_cst_sgn (arg01) >= 0)
6395 tmp = int_const_binop (MINUS_EXPR, arg01,
6396 build_int_cst (TREE_TYPE (arg01), 1), 0);
6397 switch (tree_int_cst_sgn (arg1))
6399 case -1:
6400 neg_overflow = true;
6401 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6402 hi = prod;
6403 break;
6405 case 0:
6406 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6407 hi = tmp;
6408 break;
6410 case 1:
6411 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6412 lo = prod;
6413 break;
6415 default:
6416 gcc_unreachable ();
6419 else
6421 /* A negative divisor reverses the relational operators. */
6422 code = swap_tree_comparison (code);
6424 tmp = int_const_binop (PLUS_EXPR, arg01,
6425 build_int_cst (TREE_TYPE (arg01), 1), 0);
6426 switch (tree_int_cst_sgn (arg1))
6428 case -1:
6429 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6430 lo = prod;
6431 break;
6433 case 0:
6434 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6435 lo = tmp;
6436 break;
6438 case 1:
6439 neg_overflow = true;
6440 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6441 hi = prod;
6442 break;
6444 default:
6445 gcc_unreachable ();
6449 switch (code)
6451 case EQ_EXPR:
6452 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6453 return omit_one_operand (type, integer_zero_node, arg00);
6454 if (TREE_OVERFLOW (hi))
6455 return fold_build2 (GE_EXPR, type, arg00, lo);
6456 if (TREE_OVERFLOW (lo))
6457 return fold_build2 (LE_EXPR, type, arg00, hi);
6458 return build_range_check (type, arg00, 1, lo, hi);
6460 case NE_EXPR:
6461 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6462 return omit_one_operand (type, integer_one_node, arg00);
6463 if (TREE_OVERFLOW (hi))
6464 return fold_build2 (LT_EXPR, type, arg00, lo);
6465 if (TREE_OVERFLOW (lo))
6466 return fold_build2 (GT_EXPR, type, arg00, hi);
6467 return build_range_check (type, arg00, 0, lo, hi);
6469 case LT_EXPR:
6470 if (TREE_OVERFLOW (lo))
6472 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6473 return omit_one_operand (type, tmp, arg00);
6475 return fold_build2 (LT_EXPR, type, arg00, lo);
6477 case LE_EXPR:
6478 if (TREE_OVERFLOW (hi))
6480 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6481 return omit_one_operand (type, tmp, arg00);
6483 return fold_build2 (LE_EXPR, type, arg00, hi);
6485 case GT_EXPR:
6486 if (TREE_OVERFLOW (hi))
6488 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6489 return omit_one_operand (type, tmp, arg00);
6491 return fold_build2 (GT_EXPR, type, arg00, hi);
6493 case GE_EXPR:
6494 if (TREE_OVERFLOW (lo))
6496 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6497 return omit_one_operand (type, tmp, arg00);
6499 return fold_build2 (GE_EXPR, type, arg00, lo);
6501 default:
6502 break;
6505 return NULL_TREE;
6509 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6510 equality/inequality test, then return a simplified form of the test
6511 using a sign testing. Otherwise return NULL. TYPE is the desired
6512 result type. */
6514 static tree
6515 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6516 tree result_type)
6518 /* If this is testing a single bit, we can optimize the test. */
6519 if ((code == NE_EXPR || code == EQ_EXPR)
6520 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6521 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6523 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6524 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6525 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6527 if (arg00 != NULL_TREE
6528 /* This is only a win if casting to a signed type is cheap,
6529 i.e. when arg00's type is not a partial mode. */
6530 && TYPE_PRECISION (TREE_TYPE (arg00))
6531 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6533 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6534 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6535 result_type, fold_convert (stype, arg00),
6536 build_int_cst (stype, 0));
6540 return NULL_TREE;
6543 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6544 equality/inequality test, then return a simplified form of
6545 the test using shifts and logical operations. Otherwise return
6546 NULL. TYPE is the desired result type. */
6548 tree
6549 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6550 tree result_type)
6552 /* If this is testing a single bit, we can optimize the test. */
6553 if ((code == NE_EXPR || code == EQ_EXPR)
6554 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6555 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6557 tree inner = TREE_OPERAND (arg0, 0);
6558 tree type = TREE_TYPE (arg0);
6559 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6560 enum machine_mode operand_mode = TYPE_MODE (type);
6561 int ops_unsigned;
6562 tree signed_type, unsigned_type, intermediate_type;
6563 tree tem, one;
6565 /* First, see if we can fold the single bit test into a sign-bit
6566 test. */
6567 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6568 result_type);
6569 if (tem)
6570 return tem;
6572 /* Otherwise we have (A & C) != 0 where C is a single bit,
6573 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6574 Similarly for (A & C) == 0. */
6576 /* If INNER is a right shift of a constant and it plus BITNUM does
6577 not overflow, adjust BITNUM and INNER. */
6578 if (TREE_CODE (inner) == RSHIFT_EXPR
6579 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6580 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6581 && bitnum < TYPE_PRECISION (type)
6582 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6583 bitnum - TYPE_PRECISION (type)))
6585 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6586 inner = TREE_OPERAND (inner, 0);
6589 /* If we are going to be able to omit the AND below, we must do our
6590 operations as unsigned. If we must use the AND, we have a choice.
6591 Normally unsigned is faster, but for some machines signed is. */
6592 #ifdef LOAD_EXTEND_OP
6593 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6594 && !flag_syntax_only) ? 0 : 1;
6595 #else
6596 ops_unsigned = 1;
6597 #endif
6599 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6600 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6601 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6602 inner = fold_convert (intermediate_type, inner);
6604 if (bitnum != 0)
6605 inner = build2 (RSHIFT_EXPR, intermediate_type,
6606 inner, size_int (bitnum));
6608 one = build_int_cst (intermediate_type, 1);
6610 if (code == EQ_EXPR)
6611 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6613 /* Put the AND last so it can combine with more things. */
6614 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6616 /* Make sure to return the proper type. */
6617 inner = fold_convert (result_type, inner);
6619 return inner;
6621 return NULL_TREE;
6624 /* Check whether we are allowed to reorder operands arg0 and arg1,
6625 such that the evaluation of arg1 occurs before arg0. */
6627 static bool
6628 reorder_operands_p (tree arg0, tree arg1)
6630 if (! flag_evaluation_order)
6631 return true;
6632 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6633 return true;
6634 return ! TREE_SIDE_EFFECTS (arg0)
6635 && ! TREE_SIDE_EFFECTS (arg1);
6638 /* Test whether it is preferable two swap two operands, ARG0 and
6639 ARG1, for example because ARG0 is an integer constant and ARG1
6640 isn't. If REORDER is true, only recommend swapping if we can
6641 evaluate the operands in reverse order. */
6643 bool
6644 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6646 STRIP_SIGN_NOPS (arg0);
6647 STRIP_SIGN_NOPS (arg1);
6649 if (TREE_CODE (arg1) == INTEGER_CST)
6650 return 0;
6651 if (TREE_CODE (arg0) == INTEGER_CST)
6652 return 1;
6654 if (TREE_CODE (arg1) == REAL_CST)
6655 return 0;
6656 if (TREE_CODE (arg0) == REAL_CST)
6657 return 1;
6659 if (TREE_CODE (arg1) == COMPLEX_CST)
6660 return 0;
6661 if (TREE_CODE (arg0) == COMPLEX_CST)
6662 return 1;
6664 if (TREE_CONSTANT (arg1))
6665 return 0;
6666 if (TREE_CONSTANT (arg0))
6667 return 1;
6669 if (optimize_size)
6670 return 0;
6672 if (reorder && flag_evaluation_order
6673 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6674 return 0;
6676 if (DECL_P (arg1))
6677 return 0;
6678 if (DECL_P (arg0))
6679 return 1;
6681 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6682 for commutative and comparison operators. Ensuring a canonical
6683 form allows the optimizers to find additional redundancies without
6684 having to explicitly check for both orderings. */
6685 if (TREE_CODE (arg0) == SSA_NAME
6686 && TREE_CODE (arg1) == SSA_NAME
6687 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6688 return 1;
6690 return 0;
6693 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6694 ARG0 is extended to a wider type. */
6696 static tree
6697 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6699 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6700 tree arg1_unw;
6701 tree shorter_type, outer_type;
6702 tree min, max;
6703 bool above, below;
6705 if (arg0_unw == arg0)
6706 return NULL_TREE;
6707 shorter_type = TREE_TYPE (arg0_unw);
6709 #ifdef HAVE_canonicalize_funcptr_for_compare
6710 /* Disable this optimization if we're casting a function pointer
6711 type on targets that require function pointer canonicalization. */
6712 if (HAVE_canonicalize_funcptr_for_compare
6713 && TREE_CODE (shorter_type) == POINTER_TYPE
6714 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6715 return NULL_TREE;
6716 #endif
6718 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6719 return NULL_TREE;
6721 arg1_unw = get_unwidened (arg1, shorter_type);
6723 /* If possible, express the comparison in the shorter mode. */
6724 if ((code == EQ_EXPR || code == NE_EXPR
6725 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6726 && (TREE_TYPE (arg1_unw) == shorter_type
6727 || (TREE_CODE (arg1_unw) == INTEGER_CST
6728 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6729 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6730 && int_fits_type_p (arg1_unw, shorter_type))))
6731 return fold_build2 (code, type, arg0_unw,
6732 fold_convert (shorter_type, arg1_unw));
6734 if (TREE_CODE (arg1_unw) != INTEGER_CST
6735 || TREE_CODE (shorter_type) != INTEGER_TYPE
6736 || !int_fits_type_p (arg1_unw, shorter_type))
6737 return NULL_TREE;
6739 /* If we are comparing with the integer that does not fit into the range
6740 of the shorter type, the result is known. */
6741 outer_type = TREE_TYPE (arg1_unw);
6742 min = lower_bound_in_type (outer_type, shorter_type);
6743 max = upper_bound_in_type (outer_type, shorter_type);
6745 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6746 max, arg1_unw));
6747 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6748 arg1_unw, min));
6750 switch (code)
6752 case EQ_EXPR:
6753 if (above || below)
6754 return omit_one_operand (type, integer_zero_node, arg0);
6755 break;
6757 case NE_EXPR:
6758 if (above || below)
6759 return omit_one_operand (type, integer_one_node, arg0);
6760 break;
6762 case LT_EXPR:
6763 case LE_EXPR:
6764 if (above)
6765 return omit_one_operand (type, integer_one_node, arg0);
6766 else if (below)
6767 return omit_one_operand (type, integer_zero_node, arg0);
6769 case GT_EXPR:
6770 case GE_EXPR:
6771 if (above)
6772 return omit_one_operand (type, integer_zero_node, arg0);
6773 else if (below)
6774 return omit_one_operand (type, integer_one_node, arg0);
6776 default:
6777 break;
6780 return NULL_TREE;
6783 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6784 ARG0 just the signedness is changed. */
6786 static tree
6787 fold_sign_changed_comparison (enum tree_code code, tree type,
6788 tree arg0, tree arg1)
6790 tree arg0_inner;
6791 tree inner_type, outer_type;
6793 if (TREE_CODE (arg0) != NOP_EXPR
6794 && TREE_CODE (arg0) != CONVERT_EXPR)
6795 return NULL_TREE;
6797 outer_type = TREE_TYPE (arg0);
6798 arg0_inner = TREE_OPERAND (arg0, 0);
6799 inner_type = TREE_TYPE (arg0_inner);
6801 #ifdef HAVE_canonicalize_funcptr_for_compare
6802 /* Disable this optimization if we're casting a function pointer
6803 type on targets that require function pointer canonicalization. */
6804 if (HAVE_canonicalize_funcptr_for_compare
6805 && TREE_CODE (inner_type) == POINTER_TYPE
6806 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6807 return NULL_TREE;
6808 #endif
6810 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6811 return NULL_TREE;
6813 if (TREE_CODE (arg1) != INTEGER_CST
6814 && !((TREE_CODE (arg1) == NOP_EXPR
6815 || TREE_CODE (arg1) == CONVERT_EXPR)
6816 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6817 return NULL_TREE;
6819 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6820 && code != NE_EXPR
6821 && code != EQ_EXPR)
6822 return NULL_TREE;
6824 if (TREE_CODE (arg1) == INTEGER_CST)
6825 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6826 TREE_INT_CST_HIGH (arg1), 0,
6827 TREE_OVERFLOW (arg1));
6828 else
6829 arg1 = fold_convert (inner_type, arg1);
6831 return fold_build2 (code, type, arg0_inner, arg1);
6834 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6835 step of the array. Reconstructs s and delta in the case of s * delta
6836 being an integer constant (and thus already folded).
6837 ADDR is the address. MULT is the multiplicative expression.
6838 If the function succeeds, the new address expression is returned. Otherwise
6839 NULL_TREE is returned. */
6841 static tree
6842 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6844 tree s, delta, step;
6845 tree ref = TREE_OPERAND (addr, 0), pref;
6846 tree ret, pos;
6847 tree itype;
6848 bool mdim = false;
6850 /* Canonicalize op1 into a possibly non-constant delta
6851 and an INTEGER_CST s. */
6852 if (TREE_CODE (op1) == MULT_EXPR)
6854 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6856 STRIP_NOPS (arg0);
6857 STRIP_NOPS (arg1);
6859 if (TREE_CODE (arg0) == INTEGER_CST)
6861 s = arg0;
6862 delta = arg1;
6864 else if (TREE_CODE (arg1) == INTEGER_CST)
6866 s = arg1;
6867 delta = arg0;
6869 else
6870 return NULL_TREE;
6872 else if (TREE_CODE (op1) == INTEGER_CST)
6874 delta = op1;
6875 s = NULL_TREE;
6877 else
6879 /* Simulate we are delta * 1. */
6880 delta = op1;
6881 s = integer_one_node;
6884 for (;; ref = TREE_OPERAND (ref, 0))
6886 if (TREE_CODE (ref) == ARRAY_REF)
6888 /* Remember if this was a multi-dimensional array. */
6889 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6890 mdim = true;
6892 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6893 if (! itype)
6894 continue;
6896 step = array_ref_element_size (ref);
6897 if (TREE_CODE (step) != INTEGER_CST)
6898 continue;
6900 if (s)
6902 if (! tree_int_cst_equal (step, s))
6903 continue;
6905 else
6907 /* Try if delta is a multiple of step. */
6908 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6909 if (! tmp)
6910 continue;
6911 delta = tmp;
6914 /* Only fold here if we can verify we do not overflow one
6915 dimension of a multi-dimensional array. */
6916 if (mdim)
6918 tree tmp;
6920 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6921 || !INTEGRAL_TYPE_P (itype)
6922 || !TYPE_MAX_VALUE (itype)
6923 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6924 continue;
6926 tmp = fold_binary (code, itype,
6927 fold_convert (itype,
6928 TREE_OPERAND (ref, 1)),
6929 fold_convert (itype, delta));
6930 if (!tmp
6931 || TREE_CODE (tmp) != INTEGER_CST
6932 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6933 continue;
6936 break;
6938 else
6939 mdim = false;
6941 if (!handled_component_p (ref))
6942 return NULL_TREE;
6945 /* We found the suitable array reference. So copy everything up to it,
6946 and replace the index. */
6948 pref = TREE_OPERAND (addr, 0);
6949 ret = copy_node (pref);
6950 pos = ret;
6952 while (pref != ref)
6954 pref = TREE_OPERAND (pref, 0);
6955 TREE_OPERAND (pos, 0) = copy_node (pref);
6956 pos = TREE_OPERAND (pos, 0);
6959 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6960 fold_convert (itype,
6961 TREE_OPERAND (pos, 1)),
6962 fold_convert (itype, delta));
6964 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6968 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6969 means A >= Y && A != MAX, but in this case we know that
6970 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6972 static tree
6973 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6975 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6977 if (TREE_CODE (bound) == LT_EXPR)
6978 a = TREE_OPERAND (bound, 0);
6979 else if (TREE_CODE (bound) == GT_EXPR)
6980 a = TREE_OPERAND (bound, 1);
6981 else
6982 return NULL_TREE;
6984 typea = TREE_TYPE (a);
6985 if (!INTEGRAL_TYPE_P (typea)
6986 && !POINTER_TYPE_P (typea))
6987 return NULL_TREE;
6989 if (TREE_CODE (ineq) == LT_EXPR)
6991 a1 = TREE_OPERAND (ineq, 1);
6992 y = TREE_OPERAND (ineq, 0);
6994 else if (TREE_CODE (ineq) == GT_EXPR)
6996 a1 = TREE_OPERAND (ineq, 0);
6997 y = TREE_OPERAND (ineq, 1);
6999 else
7000 return NULL_TREE;
7002 if (TREE_TYPE (a1) != typea)
7003 return NULL_TREE;
7005 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
7006 if (!integer_onep (diff))
7007 return NULL_TREE;
7009 return fold_build2 (GE_EXPR, type, a, y);
7012 /* Fold a sum or difference of at least one multiplication.
7013 Returns the folded tree or NULL if no simplification could be made. */
7015 static tree
7016 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7018 tree arg00, arg01, arg10, arg11;
7019 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7021 /* (A * C) +- (B * C) -> (A+-B) * C.
7022 (A * C) +- A -> A * (C+-1).
7023 We are most concerned about the case where C is a constant,
7024 but other combinations show up during loop reduction. Since
7025 it is not difficult, try all four possibilities. */
7027 if (TREE_CODE (arg0) == MULT_EXPR)
7029 arg00 = TREE_OPERAND (arg0, 0);
7030 arg01 = TREE_OPERAND (arg0, 1);
7032 else
7034 arg00 = arg0;
7035 arg01 = build_one_cst (type);
7037 if (TREE_CODE (arg1) == MULT_EXPR)
7039 arg10 = TREE_OPERAND (arg1, 0);
7040 arg11 = TREE_OPERAND (arg1, 1);
7042 else
7044 arg10 = arg1;
7045 arg11 = build_one_cst (type);
7047 same = NULL_TREE;
7049 if (operand_equal_p (arg01, arg11, 0))
7050 same = arg01, alt0 = arg00, alt1 = arg10;
7051 else if (operand_equal_p (arg00, arg10, 0))
7052 same = arg00, alt0 = arg01, alt1 = arg11;
7053 else if (operand_equal_p (arg00, arg11, 0))
7054 same = arg00, alt0 = arg01, alt1 = arg10;
7055 else if (operand_equal_p (arg01, arg10, 0))
7056 same = arg01, alt0 = arg00, alt1 = arg11;
7058 /* No identical multiplicands; see if we can find a common
7059 power-of-two factor in non-power-of-two multiplies. This
7060 can help in multi-dimensional array access. */
7061 else if (host_integerp (arg01, 0)
7062 && host_integerp (arg11, 0))
7064 HOST_WIDE_INT int01, int11, tmp;
7065 bool swap = false;
7066 tree maybe_same;
7067 int01 = TREE_INT_CST_LOW (arg01);
7068 int11 = TREE_INT_CST_LOW (arg11);
7070 /* Move min of absolute values to int11. */
7071 if ((int01 >= 0 ? int01 : -int01)
7072 < (int11 >= 0 ? int11 : -int11))
7074 tmp = int01, int01 = int11, int11 = tmp;
7075 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7076 maybe_same = arg01;
7077 swap = true;
7079 else
7080 maybe_same = arg11;
7082 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7084 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7085 build_int_cst (TREE_TYPE (arg00),
7086 int01 / int11));
7087 alt1 = arg10;
7088 same = maybe_same;
7089 if (swap)
7090 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7094 if (same)
7095 return fold_build2 (MULT_EXPR, type,
7096 fold_build2 (code, type,
7097 fold_convert (type, alt0),
7098 fold_convert (type, alt1)),
7099 fold_convert (type, same));
7101 return NULL_TREE;
7104 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7105 specified by EXPR into the buffer PTR of length LEN bytes.
7106 Return the number of bytes placed in the buffer, or zero
7107 upon failure. */
7109 static int
7110 native_encode_int (tree expr, unsigned char *ptr, int len)
7112 tree type = TREE_TYPE (expr);
7113 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7114 int byte, offset, word, words;
7115 unsigned char value;
7117 if (total_bytes > len)
7118 return 0;
7119 words = total_bytes / UNITS_PER_WORD;
7121 for (byte = 0; byte < total_bytes; byte++)
7123 int bitpos = byte * BITS_PER_UNIT;
7124 if (bitpos < HOST_BITS_PER_WIDE_INT)
7125 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7126 else
7127 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7128 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7130 if (total_bytes > UNITS_PER_WORD)
7132 word = byte / UNITS_PER_WORD;
7133 if (WORDS_BIG_ENDIAN)
7134 word = (words - 1) - word;
7135 offset = word * UNITS_PER_WORD;
7136 if (BYTES_BIG_ENDIAN)
7137 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7138 else
7139 offset += byte % UNITS_PER_WORD;
7141 else
7142 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7143 ptr[offset] = value;
7145 return total_bytes;
7149 /* Subroutine of native_encode_expr. Encode the REAL_CST
7150 specified by EXPR into the buffer PTR of length LEN bytes.
7151 Return the number of bytes placed in the buffer, or zero
7152 upon failure. */
7154 static int
7155 native_encode_real (tree expr, unsigned char *ptr, int len)
7157 tree type = TREE_TYPE (expr);
7158 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7159 int byte, offset, word, words;
7160 unsigned char value;
7162 /* There are always 32 bits in each long, no matter the size of
7163 the hosts long. We handle floating point representations with
7164 up to 192 bits. */
7165 long tmp[6];
7167 if (total_bytes > len)
7168 return 0;
7169 words = total_bytes / UNITS_PER_WORD;
7171 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7173 for (byte = 0; byte < total_bytes; byte++)
7175 int bitpos = byte * BITS_PER_UNIT;
7176 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7178 if (total_bytes > UNITS_PER_WORD)
7180 word = byte / UNITS_PER_WORD;
7181 if (FLOAT_WORDS_BIG_ENDIAN)
7182 word = (words - 1) - word;
7183 offset = word * UNITS_PER_WORD;
7184 if (BYTES_BIG_ENDIAN)
7185 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7186 else
7187 offset += byte % UNITS_PER_WORD;
7189 else
7190 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7191 ptr[offset] = value;
7193 return total_bytes;
7196 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7197 specified by EXPR into the buffer PTR of length LEN bytes.
7198 Return the number of bytes placed in the buffer, or zero
7199 upon failure. */
7201 static int
7202 native_encode_complex (tree expr, unsigned char *ptr, int len)
7204 int rsize, isize;
7205 tree part;
7207 part = TREE_REALPART (expr);
7208 rsize = native_encode_expr (part, ptr, len);
7209 if (rsize == 0)
7210 return 0;
7211 part = TREE_IMAGPART (expr);
7212 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7213 if (isize != rsize)
7214 return 0;
7215 return rsize + isize;
7219 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7220 specified by EXPR into the buffer PTR of length LEN bytes.
7221 Return the number of bytes placed in the buffer, or zero
7222 upon failure. */
7224 static int
7225 native_encode_vector (tree expr, unsigned char *ptr, int len)
7227 int i, size, offset, count;
7228 tree itype, elem, elements;
7230 offset = 0;
7231 elements = TREE_VECTOR_CST_ELTS (expr);
7232 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7233 itype = TREE_TYPE (TREE_TYPE (expr));
7234 size = GET_MODE_SIZE (TYPE_MODE (itype));
7235 for (i = 0; i < count; i++)
7237 if (elements)
7239 elem = TREE_VALUE (elements);
7240 elements = TREE_CHAIN (elements);
7242 else
7243 elem = NULL_TREE;
7245 if (elem)
7247 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7248 return 0;
7250 else
7252 if (offset + size > len)
7253 return 0;
7254 memset (ptr+offset, 0, size);
7256 offset += size;
7258 return offset;
7262 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7263 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7264 buffer PTR of length LEN bytes. Return the number of bytes
7265 placed in the buffer, or zero upon failure. */
7267 static int
7268 native_encode_expr (tree expr, unsigned char *ptr, int len)
7270 switch (TREE_CODE (expr))
7272 case INTEGER_CST:
7273 return native_encode_int (expr, ptr, len);
7275 case REAL_CST:
7276 return native_encode_real (expr, ptr, len);
7278 case COMPLEX_CST:
7279 return native_encode_complex (expr, ptr, len);
7281 case VECTOR_CST:
7282 return native_encode_vector (expr, ptr, len);
7284 default:
7285 return 0;
7290 /* Subroutine of native_interpret_expr. Interpret the contents of
7291 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7292 If the buffer cannot be interpreted, return NULL_TREE. */
7294 static tree
7295 native_interpret_int (tree type, unsigned char *ptr, int len)
7297 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7298 int byte, offset, word, words;
7299 unsigned char value;
7300 unsigned int HOST_WIDE_INT lo = 0;
7301 HOST_WIDE_INT hi = 0;
7303 if (total_bytes > len)
7304 return NULL_TREE;
7305 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7306 return NULL_TREE;
7307 words = total_bytes / UNITS_PER_WORD;
7309 for (byte = 0; byte < total_bytes; byte++)
7311 int bitpos = byte * BITS_PER_UNIT;
7312 if (total_bytes > UNITS_PER_WORD)
7314 word = byte / UNITS_PER_WORD;
7315 if (WORDS_BIG_ENDIAN)
7316 word = (words - 1) - word;
7317 offset = word * UNITS_PER_WORD;
7318 if (BYTES_BIG_ENDIAN)
7319 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7320 else
7321 offset += byte % UNITS_PER_WORD;
7323 else
7324 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7325 value = ptr[offset];
7327 if (bitpos < HOST_BITS_PER_WIDE_INT)
7328 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7329 else
7330 hi |= (unsigned HOST_WIDE_INT) value
7331 << (bitpos - HOST_BITS_PER_WIDE_INT);
7334 return build_int_cst_wide_type (type, lo, hi);
7338 /* Subroutine of native_interpret_expr. Interpret the contents of
7339 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7340 If the buffer cannot be interpreted, return NULL_TREE. */
7342 static tree
7343 native_interpret_real (tree type, unsigned char *ptr, int len)
7345 enum machine_mode mode = TYPE_MODE (type);
7346 int total_bytes = GET_MODE_SIZE (mode);
7347 int byte, offset, word, words;
7348 unsigned char value;
7349 /* There are always 32 bits in each long, no matter the size of
7350 the hosts long. We handle floating point representations with
7351 up to 192 bits. */
7352 REAL_VALUE_TYPE r;
7353 long tmp[6];
7355 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7356 if (total_bytes > len || total_bytes > 24)
7357 return NULL_TREE;
7358 words = total_bytes / UNITS_PER_WORD;
7360 memset (tmp, 0, sizeof (tmp));
7361 for (byte = 0; byte < total_bytes; byte++)
7363 int bitpos = byte * BITS_PER_UNIT;
7364 if (total_bytes > UNITS_PER_WORD)
7366 word = byte / UNITS_PER_WORD;
7367 if (FLOAT_WORDS_BIG_ENDIAN)
7368 word = (words - 1) - word;
7369 offset = word * UNITS_PER_WORD;
7370 if (BYTES_BIG_ENDIAN)
7371 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7372 else
7373 offset += byte % UNITS_PER_WORD;
7375 else
7376 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7377 value = ptr[offset];
7379 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7382 real_from_target (&r, tmp, mode);
7383 return build_real (type, r);
7387 /* Subroutine of native_interpret_expr. Interpret the contents of
7388 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7389 If the buffer cannot be interpreted, return NULL_TREE. */
7391 static tree
7392 native_interpret_complex (tree type, unsigned char *ptr, int len)
7394 tree etype, rpart, ipart;
7395 int size;
7397 etype = TREE_TYPE (type);
7398 size = GET_MODE_SIZE (TYPE_MODE (etype));
7399 if (size * 2 > len)
7400 return NULL_TREE;
7401 rpart = native_interpret_expr (etype, ptr, size);
7402 if (!rpart)
7403 return NULL_TREE;
7404 ipart = native_interpret_expr (etype, ptr+size, size);
7405 if (!ipart)
7406 return NULL_TREE;
7407 return build_complex (type, rpart, ipart);
7411 /* Subroutine of native_interpret_expr. Interpret the contents of
7412 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7413 If the buffer cannot be interpreted, return NULL_TREE. */
7415 static tree
7416 native_interpret_vector (tree type, unsigned char *ptr, int len)
7418 tree etype, elem, elements;
7419 int i, size, count;
7421 etype = TREE_TYPE (type);
7422 size = GET_MODE_SIZE (TYPE_MODE (etype));
7423 count = TYPE_VECTOR_SUBPARTS (type);
7424 if (size * count > len)
7425 return NULL_TREE;
7427 elements = NULL_TREE;
7428 for (i = count - 1; i >= 0; i--)
7430 elem = native_interpret_expr (etype, ptr+(i*size), size);
7431 if (!elem)
7432 return NULL_TREE;
7433 elements = tree_cons (NULL_TREE, elem, elements);
7435 return build_vector (type, elements);
7439 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7440 the buffer PTR of length LEN as a constant of type TYPE. For
7441 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7442 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7443 return NULL_TREE. */
7445 static tree
7446 native_interpret_expr (tree type, unsigned char *ptr, int len)
7448 switch (TREE_CODE (type))
7450 case INTEGER_TYPE:
7451 case ENUMERAL_TYPE:
7452 case BOOLEAN_TYPE:
7453 return native_interpret_int (type, ptr, len);
7455 case REAL_TYPE:
7456 return native_interpret_real (type, ptr, len);
7458 case COMPLEX_TYPE:
7459 return native_interpret_complex (type, ptr, len);
7461 case VECTOR_TYPE:
7462 return native_interpret_vector (type, ptr, len);
7464 default:
7465 return NULL_TREE;
7470 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7471 TYPE at compile-time. If we're unable to perform the conversion
7472 return NULL_TREE. */
7474 static tree
7475 fold_view_convert_expr (tree type, tree expr)
7477 /* We support up to 512-bit values (for V8DFmode). */
7478 unsigned char buffer[64];
7479 int len;
7481 /* Check that the host and target are sane. */
7482 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7483 return NULL_TREE;
7485 len = native_encode_expr (expr, buffer, sizeof (buffer));
7486 if (len == 0)
7487 return NULL_TREE;
7489 return native_interpret_expr (type, buffer, len);
7493 /* Fold a unary expression of code CODE and type TYPE with operand
7494 OP0. Return the folded expression if folding is successful.
7495 Otherwise, return NULL_TREE. */
7497 tree
7498 fold_unary (enum tree_code code, tree type, tree op0)
7500 tree tem;
7501 tree arg0;
7502 enum tree_code_class kind = TREE_CODE_CLASS (code);
7504 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7505 && TREE_CODE_LENGTH (code) == 1);
7507 arg0 = op0;
7508 if (arg0)
7510 if (code == NOP_EXPR || code == CONVERT_EXPR
7511 || code == FLOAT_EXPR || code == ABS_EXPR)
7513 /* Don't use STRIP_NOPS, because signedness of argument type
7514 matters. */
7515 STRIP_SIGN_NOPS (arg0);
7517 else
7519 /* Strip any conversions that don't change the mode. This
7520 is safe for every expression, except for a comparison
7521 expression because its signedness is derived from its
7522 operands.
7524 Note that this is done as an internal manipulation within
7525 the constant folder, in order to find the simplest
7526 representation of the arguments so that their form can be
7527 studied. In any cases, the appropriate type conversions
7528 should be put back in the tree that will get out of the
7529 constant folder. */
7530 STRIP_NOPS (arg0);
7534 if (TREE_CODE_CLASS (code) == tcc_unary)
7536 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7537 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7538 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7539 else if (TREE_CODE (arg0) == COND_EXPR)
7541 tree arg01 = TREE_OPERAND (arg0, 1);
7542 tree arg02 = TREE_OPERAND (arg0, 2);
7543 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7544 arg01 = fold_build1 (code, type, arg01);
7545 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7546 arg02 = fold_build1 (code, type, arg02);
7547 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7548 arg01, arg02);
7550 /* If this was a conversion, and all we did was to move into
7551 inside the COND_EXPR, bring it back out. But leave it if
7552 it is a conversion from integer to integer and the
7553 result precision is no wider than a word since such a
7554 conversion is cheap and may be optimized away by combine,
7555 while it couldn't if it were outside the COND_EXPR. Then return
7556 so we don't get into an infinite recursion loop taking the
7557 conversion out and then back in. */
7559 if ((code == NOP_EXPR || code == CONVERT_EXPR
7560 || code == NON_LVALUE_EXPR)
7561 && TREE_CODE (tem) == COND_EXPR
7562 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7563 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7564 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7565 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7566 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7567 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7568 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7569 && (INTEGRAL_TYPE_P
7570 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7571 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7572 || flag_syntax_only))
7573 tem = build1 (code, type,
7574 build3 (COND_EXPR,
7575 TREE_TYPE (TREE_OPERAND
7576 (TREE_OPERAND (tem, 1), 0)),
7577 TREE_OPERAND (tem, 0),
7578 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7579 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7580 return tem;
7582 else if (COMPARISON_CLASS_P (arg0))
7584 if (TREE_CODE (type) == BOOLEAN_TYPE)
7586 arg0 = copy_node (arg0);
7587 TREE_TYPE (arg0) = type;
7588 return arg0;
7590 else if (TREE_CODE (type) != INTEGER_TYPE)
7591 return fold_build3 (COND_EXPR, type, arg0,
7592 fold_build1 (code, type,
7593 integer_one_node),
7594 fold_build1 (code, type,
7595 integer_zero_node));
7599 switch (code)
7601 case NOP_EXPR:
7602 case FLOAT_EXPR:
7603 case CONVERT_EXPR:
7604 case FIX_TRUNC_EXPR:
7605 if (TREE_TYPE (op0) == type)
7606 return op0;
7608 /* If we have (type) (a CMP b) and type is an integral type, return
7609 new expression involving the new type. */
7610 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7611 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7612 TREE_OPERAND (op0, 1));
7614 /* Handle cases of two conversions in a row. */
7615 if (TREE_CODE (op0) == NOP_EXPR
7616 || TREE_CODE (op0) == CONVERT_EXPR)
7618 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7619 tree inter_type = TREE_TYPE (op0);
7620 int inside_int = INTEGRAL_TYPE_P (inside_type);
7621 int inside_ptr = POINTER_TYPE_P (inside_type);
7622 int inside_float = FLOAT_TYPE_P (inside_type);
7623 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7624 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7625 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7626 int inter_int = INTEGRAL_TYPE_P (inter_type);
7627 int inter_ptr = POINTER_TYPE_P (inter_type);
7628 int inter_float = FLOAT_TYPE_P (inter_type);
7629 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7630 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7631 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7632 int final_int = INTEGRAL_TYPE_P (type);
7633 int final_ptr = POINTER_TYPE_P (type);
7634 int final_float = FLOAT_TYPE_P (type);
7635 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7636 unsigned int final_prec = TYPE_PRECISION (type);
7637 int final_unsignedp = TYPE_UNSIGNED (type);
7639 /* In addition to the cases of two conversions in a row
7640 handled below, if we are converting something to its own
7641 type via an object of identical or wider precision, neither
7642 conversion is needed. */
7643 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7644 && (((inter_int || inter_ptr) && final_int)
7645 || (inter_float && final_float))
7646 && inter_prec >= final_prec)
7647 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7649 /* Likewise, if the intermediate and final types are either both
7650 float or both integer, we don't need the middle conversion if
7651 it is wider than the final type and doesn't change the signedness
7652 (for integers). Avoid this if the final type is a pointer
7653 since then we sometimes need the inner conversion. Likewise if
7654 the outer has a precision not equal to the size of its mode. */
7655 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7656 || (inter_float && inside_float)
7657 || (inter_vec && inside_vec))
7658 && inter_prec >= inside_prec
7659 && (inter_float || inter_vec
7660 || inter_unsignedp == inside_unsignedp)
7661 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7662 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7663 && ! final_ptr
7664 && (! final_vec || inter_prec == inside_prec))
7665 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7667 /* If we have a sign-extension of a zero-extended value, we can
7668 replace that by a single zero-extension. */
7669 if (inside_int && inter_int && final_int
7670 && inside_prec < inter_prec && inter_prec < final_prec
7671 && inside_unsignedp && !inter_unsignedp)
7672 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7674 /* Two conversions in a row are not needed unless:
7675 - some conversion is floating-point (overstrict for now), or
7676 - some conversion is a vector (overstrict for now), or
7677 - the intermediate type is narrower than both initial and
7678 final, or
7679 - the intermediate type and innermost type differ in signedness,
7680 and the outermost type is wider than the intermediate, or
7681 - the initial type is a pointer type and the precisions of the
7682 intermediate and final types differ, or
7683 - the final type is a pointer type and the precisions of the
7684 initial and intermediate types differ.
7685 - the final type is a pointer type and the initial type not
7686 - the initial type is a pointer to an array and the final type
7687 not. */
7688 if (! inside_float && ! inter_float && ! final_float
7689 && ! inside_vec && ! inter_vec && ! final_vec
7690 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7691 && ! (inside_int && inter_int
7692 && inter_unsignedp != inside_unsignedp
7693 && inter_prec < final_prec)
7694 && ((inter_unsignedp && inter_prec > inside_prec)
7695 == (final_unsignedp && final_prec > inter_prec))
7696 && ! (inside_ptr && inter_prec != final_prec)
7697 && ! (final_ptr && inside_prec != inter_prec)
7698 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7699 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7700 && final_ptr == inside_ptr
7701 && ! (inside_ptr
7702 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7703 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7704 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7707 /* Handle (T *)&A.B.C for A being of type T and B and C
7708 living at offset zero. This occurs frequently in
7709 C++ upcasting and then accessing the base. */
7710 if (TREE_CODE (op0) == ADDR_EXPR
7711 && POINTER_TYPE_P (type)
7712 && handled_component_p (TREE_OPERAND (op0, 0)))
7714 HOST_WIDE_INT bitsize, bitpos;
7715 tree offset;
7716 enum machine_mode mode;
7717 int unsignedp, volatilep;
7718 tree base = TREE_OPERAND (op0, 0);
7719 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7720 &mode, &unsignedp, &volatilep, false);
7721 /* If the reference was to a (constant) zero offset, we can use
7722 the address of the base if it has the same base type
7723 as the result type. */
7724 if (! offset && bitpos == 0
7725 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7726 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7727 return fold_convert (type, build_fold_addr_expr (base));
7730 if ((TREE_CODE (op0) == MODIFY_EXPR
7731 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7732 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7733 /* Detect assigning a bitfield. */
7734 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7735 && DECL_BIT_FIELD
7736 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7738 /* Don't leave an assignment inside a conversion
7739 unless assigning a bitfield. */
7740 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7741 /* First do the assignment, then return converted constant. */
7742 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7743 TREE_NO_WARNING (tem) = 1;
7744 TREE_USED (tem) = 1;
7745 return tem;
7748 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7749 constants (if x has signed type, the sign bit cannot be set
7750 in c). This folds extension into the BIT_AND_EXPR. */
7751 if (INTEGRAL_TYPE_P (type)
7752 && TREE_CODE (type) != BOOLEAN_TYPE
7753 && TREE_CODE (op0) == BIT_AND_EXPR
7754 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7756 tree and = op0;
7757 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7758 int change = 0;
7760 if (TYPE_UNSIGNED (TREE_TYPE (and))
7761 || (TYPE_PRECISION (type)
7762 <= TYPE_PRECISION (TREE_TYPE (and))))
7763 change = 1;
7764 else if (TYPE_PRECISION (TREE_TYPE (and1))
7765 <= HOST_BITS_PER_WIDE_INT
7766 && host_integerp (and1, 1))
7768 unsigned HOST_WIDE_INT cst;
7770 cst = tree_low_cst (and1, 1);
7771 cst &= (HOST_WIDE_INT) -1
7772 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7773 change = (cst == 0);
7774 #ifdef LOAD_EXTEND_OP
7775 if (change
7776 && !flag_syntax_only
7777 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7778 == ZERO_EXTEND))
7780 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7781 and0 = fold_convert (uns, and0);
7782 and1 = fold_convert (uns, and1);
7784 #endif
7786 if (change)
7788 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7789 TREE_INT_CST_HIGH (and1), 0,
7790 TREE_OVERFLOW (and1));
7791 return fold_build2 (BIT_AND_EXPR, type,
7792 fold_convert (type, and0), tem);
7796 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7797 T2 being pointers to types of the same size. */
7798 if (POINTER_TYPE_P (type)
7799 && BINARY_CLASS_P (arg0)
7800 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7801 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7803 tree arg00 = TREE_OPERAND (arg0, 0);
7804 tree t0 = type;
7805 tree t1 = TREE_TYPE (arg00);
7806 tree tt0 = TREE_TYPE (t0);
7807 tree tt1 = TREE_TYPE (t1);
7808 tree s0 = TYPE_SIZE (tt0);
7809 tree s1 = TYPE_SIZE (tt1);
7811 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7812 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7813 TREE_OPERAND (arg0, 1));
7816 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7817 of the same precision, and X is a integer type not narrower than
7818 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7819 if (INTEGRAL_TYPE_P (type)
7820 && TREE_CODE (op0) == BIT_NOT_EXPR
7821 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7822 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7823 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7824 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7826 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7827 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7828 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7829 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7832 tem = fold_convert_const (code, type, arg0);
7833 return tem ? tem : NULL_TREE;
7835 case VIEW_CONVERT_EXPR:
7836 if (TREE_TYPE (op0) == type)
7837 return op0;
7838 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7839 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7840 return fold_view_convert_expr (type, op0);
7842 case NEGATE_EXPR:
7843 tem = fold_negate_expr (arg0);
7844 if (tem)
7845 return fold_convert (type, tem);
7846 return NULL_TREE;
7848 case ABS_EXPR:
7849 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7850 return fold_abs_const (arg0, type);
7851 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7852 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7853 /* Convert fabs((double)float) into (double)fabsf(float). */
7854 else if (TREE_CODE (arg0) == NOP_EXPR
7855 && TREE_CODE (type) == REAL_TYPE)
7857 tree targ0 = strip_float_extensions (arg0);
7858 if (targ0 != arg0)
7859 return fold_convert (type, fold_build1 (ABS_EXPR,
7860 TREE_TYPE (targ0),
7861 targ0));
7863 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7864 else if (TREE_CODE (arg0) == ABS_EXPR)
7865 return arg0;
7866 else if (tree_expr_nonnegative_p (arg0))
7867 return arg0;
7869 /* Strip sign ops from argument. */
7870 if (TREE_CODE (type) == REAL_TYPE)
7872 tem = fold_strip_sign_ops (arg0);
7873 if (tem)
7874 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7876 return NULL_TREE;
7878 case CONJ_EXPR:
7879 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7880 return fold_convert (type, arg0);
7881 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7883 tree itype = TREE_TYPE (type);
7884 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7885 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7886 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7888 if (TREE_CODE (arg0) == COMPLEX_CST)
7890 tree itype = TREE_TYPE (type);
7891 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7892 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7893 return build_complex (type, rpart, negate_expr (ipart));
7895 if (TREE_CODE (arg0) == CONJ_EXPR)
7896 return fold_convert (type, TREE_OPERAND (arg0, 0));
7897 return NULL_TREE;
7899 case BIT_NOT_EXPR:
7900 if (TREE_CODE (arg0) == INTEGER_CST)
7901 return fold_not_const (arg0, type);
7902 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7903 return TREE_OPERAND (arg0, 0);
7904 /* Convert ~ (-A) to A - 1. */
7905 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7906 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7907 build_int_cst (type, 1));
7908 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7909 else if (INTEGRAL_TYPE_P (type)
7910 && ((TREE_CODE (arg0) == MINUS_EXPR
7911 && integer_onep (TREE_OPERAND (arg0, 1)))
7912 || (TREE_CODE (arg0) == PLUS_EXPR
7913 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7914 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7915 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7916 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7917 && (tem = fold_unary (BIT_NOT_EXPR, type,
7918 fold_convert (type,
7919 TREE_OPERAND (arg0, 0)))))
7920 return fold_build2 (BIT_XOR_EXPR, type, tem,
7921 fold_convert (type, TREE_OPERAND (arg0, 1)));
7922 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7923 && (tem = fold_unary (BIT_NOT_EXPR, type,
7924 fold_convert (type,
7925 TREE_OPERAND (arg0, 1)))))
7926 return fold_build2 (BIT_XOR_EXPR, type,
7927 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7929 return NULL_TREE;
7931 case TRUTH_NOT_EXPR:
7932 /* The argument to invert_truthvalue must have Boolean type. */
7933 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7934 arg0 = fold_convert (boolean_type_node, arg0);
7936 /* Note that the operand of this must be an int
7937 and its values must be 0 or 1.
7938 ("true" is a fixed value perhaps depending on the language,
7939 but we don't handle values other than 1 correctly yet.) */
7940 tem = fold_truth_not_expr (arg0);
7941 if (!tem)
7942 return NULL_TREE;
7943 return fold_convert (type, tem);
7945 case REALPART_EXPR:
7946 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7947 return fold_convert (type, arg0);
7948 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7949 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7950 TREE_OPERAND (arg0, 1));
7951 if (TREE_CODE (arg0) == COMPLEX_CST)
7952 return fold_convert (type, TREE_REALPART (arg0));
7953 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7955 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7956 tem = fold_build2 (TREE_CODE (arg0), itype,
7957 fold_build1 (REALPART_EXPR, itype,
7958 TREE_OPERAND (arg0, 0)),
7959 fold_build1 (REALPART_EXPR, itype,
7960 TREE_OPERAND (arg0, 1)));
7961 return fold_convert (type, tem);
7963 if (TREE_CODE (arg0) == CONJ_EXPR)
7965 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7966 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7967 return fold_convert (type, tem);
7969 if (TREE_CODE (arg0) == CALL_EXPR)
7971 tree fn = get_callee_fndecl (arg0);
7972 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7973 switch (DECL_FUNCTION_CODE (fn))
7975 CASE_FLT_FN (BUILT_IN_CEXPI):
7976 fn = mathfn_built_in (type, BUILT_IN_COS);
7977 if (fn)
7978 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
7979 break;
7981 default:
7982 break;
7985 return NULL_TREE;
7987 case IMAGPART_EXPR:
7988 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7989 return fold_convert (type, integer_zero_node);
7990 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7991 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7992 TREE_OPERAND (arg0, 0));
7993 if (TREE_CODE (arg0) == COMPLEX_CST)
7994 return fold_convert (type, TREE_IMAGPART (arg0));
7995 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7997 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7998 tem = fold_build2 (TREE_CODE (arg0), itype,
7999 fold_build1 (IMAGPART_EXPR, itype,
8000 TREE_OPERAND (arg0, 0)),
8001 fold_build1 (IMAGPART_EXPR, itype,
8002 TREE_OPERAND (arg0, 1)));
8003 return fold_convert (type, tem);
8005 if (TREE_CODE (arg0) == CONJ_EXPR)
8007 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8008 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8009 return fold_convert (type, negate_expr (tem));
8011 if (TREE_CODE (arg0) == CALL_EXPR)
8013 tree fn = get_callee_fndecl (arg0);
8014 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8015 switch (DECL_FUNCTION_CODE (fn))
8017 CASE_FLT_FN (BUILT_IN_CEXPI):
8018 fn = mathfn_built_in (type, BUILT_IN_SIN);
8019 if (fn)
8020 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8021 break;
8023 default:
8024 break;
8027 return NULL_TREE;
8029 default:
8030 return NULL_TREE;
8031 } /* switch (code) */
8034 /* Fold a binary expression of code CODE and type TYPE with operands
8035 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8036 Return the folded expression if folding is successful. Otherwise,
8037 return NULL_TREE. */
8039 static tree
8040 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8042 enum tree_code compl_code;
8044 if (code == MIN_EXPR)
8045 compl_code = MAX_EXPR;
8046 else if (code == MAX_EXPR)
8047 compl_code = MIN_EXPR;
8048 else
8049 gcc_unreachable ();
8051 /* MIN (MAX (a, b), b) == b. */
8052 if (TREE_CODE (op0) == compl_code
8053 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8054 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8056 /* MIN (MAX (b, a), b) == b. */
8057 if (TREE_CODE (op0) == compl_code
8058 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8059 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8060 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8062 /* MIN (a, MAX (a, b)) == a. */
8063 if (TREE_CODE (op1) == compl_code
8064 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8065 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8066 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8068 /* MIN (a, MAX (b, a)) == a. */
8069 if (TREE_CODE (op1) == compl_code
8070 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8071 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8072 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8074 return NULL_TREE;
8077 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8078 by changing CODE to reduce the magnitude of constants involved in
8079 ARG0 of the comparison.
8080 Returns a canonicalized comparison tree if a simplification was
8081 possible, otherwise returns NULL_TREE.
8082 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8083 valid if signed overflow is undefined. */
8085 static tree
8086 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8087 tree arg0, tree arg1,
8088 bool *strict_overflow_p)
8090 enum tree_code code0 = TREE_CODE (arg0);
8091 tree t, cst0 = NULL_TREE;
8092 int sgn0;
8093 bool swap = false;
8095 /* Match A +- CST code arg1 and CST code arg1. */
8096 if (!(((code0 == MINUS_EXPR
8097 || code0 == PLUS_EXPR)
8098 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8099 || code0 == INTEGER_CST))
8100 return NULL_TREE;
8102 /* Identify the constant in arg0 and its sign. */
8103 if (code0 == INTEGER_CST)
8104 cst0 = arg0;
8105 else
8106 cst0 = TREE_OPERAND (arg0, 1);
8107 sgn0 = tree_int_cst_sgn (cst0);
8109 /* Overflowed constants and zero will cause problems. */
8110 if (integer_zerop (cst0)
8111 || TREE_OVERFLOW (cst0))
8112 return NULL_TREE;
8114 /* See if we can reduce the magnitude of the constant in
8115 arg0 by changing the comparison code. */
8116 if (code0 == INTEGER_CST)
8118 /* CST <= arg1 -> CST-1 < arg1. */
8119 if (code == LE_EXPR && sgn0 == 1)
8120 code = LT_EXPR;
8121 /* -CST < arg1 -> -CST-1 <= arg1. */
8122 else if (code == LT_EXPR && sgn0 == -1)
8123 code = LE_EXPR;
8124 /* CST > arg1 -> CST-1 >= arg1. */
8125 else if (code == GT_EXPR && sgn0 == 1)
8126 code = GE_EXPR;
8127 /* -CST >= arg1 -> -CST-1 > arg1. */
8128 else if (code == GE_EXPR && sgn0 == -1)
8129 code = GT_EXPR;
8130 else
8131 return NULL_TREE;
8132 /* arg1 code' CST' might be more canonical. */
8133 swap = true;
8135 else
8137 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8138 if (code == LT_EXPR
8139 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8140 code = LE_EXPR;
8141 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8142 else if (code == GT_EXPR
8143 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8144 code = GE_EXPR;
8145 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8146 else if (code == LE_EXPR
8147 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8148 code = LT_EXPR;
8149 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8150 else if (code == GE_EXPR
8151 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8152 code = GT_EXPR;
8153 else
8154 return NULL_TREE;
8155 *strict_overflow_p = true;
8158 /* Now build the constant reduced in magnitude. */
8159 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8160 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8161 if (code0 != INTEGER_CST)
8162 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8164 /* If swapping might yield to a more canonical form, do so. */
8165 if (swap)
8166 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8167 else
8168 return fold_build2 (code, type, t, arg1);
8171 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8172 overflow further. Try to decrease the magnitude of constants involved
8173 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8174 and put sole constants at the second argument position.
8175 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8177 static tree
8178 maybe_canonicalize_comparison (enum tree_code code, tree type,
8179 tree arg0, tree arg1)
8181 tree t;
8182 bool strict_overflow_p;
8183 const char * const warnmsg = G_("assuming signed overflow does not occur "
8184 "when reducing constant in comparison");
8186 /* In principle pointers also have undefined overflow behavior,
8187 but that causes problems elsewhere. */
8188 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8189 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8190 return NULL_TREE;
8192 /* Try canonicalization by simplifying arg0. */
8193 strict_overflow_p = false;
8194 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8195 &strict_overflow_p);
8196 if (t)
8198 if (strict_overflow_p)
8199 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8200 return t;
8203 /* Try canonicalization by simplifying arg1 using the swapped
8204 comparison. */
8205 code = swap_tree_comparison (code);
8206 strict_overflow_p = false;
8207 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8208 &strict_overflow_p);
8209 if (t && strict_overflow_p)
8210 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8211 return t;
8214 /* Subroutine of fold_binary. This routine performs all of the
8215 transformations that are common to the equality/inequality
8216 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8217 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8218 fold_binary should call fold_binary. Fold a comparison with
8219 tree code CODE and type TYPE with operands OP0 and OP1. Return
8220 the folded comparison or NULL_TREE. */
8222 static tree
8223 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8225 tree arg0, arg1, tem;
8227 arg0 = op0;
8228 arg1 = op1;
8230 STRIP_SIGN_NOPS (arg0);
8231 STRIP_SIGN_NOPS (arg1);
8233 tem = fold_relational_const (code, type, arg0, arg1);
8234 if (tem != NULL_TREE)
8235 return tem;
8237 /* If one arg is a real or integer constant, put it last. */
8238 if (tree_swap_operands_p (arg0, arg1, true))
8239 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8241 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8242 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8243 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8244 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8245 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8246 && (TREE_CODE (arg1) == INTEGER_CST
8247 && !TREE_OVERFLOW (arg1)))
8249 tree const1 = TREE_OPERAND (arg0, 1);
8250 tree const2 = arg1;
8251 tree variable = TREE_OPERAND (arg0, 0);
8252 tree lhs;
8253 int lhs_add;
8254 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8256 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8257 TREE_TYPE (arg1), const2, const1);
8259 /* If the constant operation overflowed this can be
8260 simplified as a comparison against INT_MAX/INT_MIN. */
8261 if (TREE_CODE (lhs) == INTEGER_CST
8262 && TREE_OVERFLOW (lhs))
8264 int const1_sgn = tree_int_cst_sgn (const1);
8265 enum tree_code code2 = code;
8267 /* Get the sign of the constant on the lhs if the
8268 operation were VARIABLE + CONST1. */
8269 if (TREE_CODE (arg0) == MINUS_EXPR)
8270 const1_sgn = -const1_sgn;
8272 /* The sign of the constant determines if we overflowed
8273 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8274 Canonicalize to the INT_MIN overflow by swapping the comparison
8275 if necessary. */
8276 if (const1_sgn == -1)
8277 code2 = swap_tree_comparison (code);
8279 /* We now can look at the canonicalized case
8280 VARIABLE + 1 CODE2 INT_MIN
8281 and decide on the result. */
8282 if (code2 == LT_EXPR
8283 || code2 == LE_EXPR
8284 || code2 == EQ_EXPR)
8285 return omit_one_operand (type, boolean_false_node, variable);
8286 else if (code2 == NE_EXPR
8287 || code2 == GE_EXPR
8288 || code2 == GT_EXPR)
8289 return omit_one_operand (type, boolean_true_node, variable);
8292 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8293 && (TREE_CODE (lhs) != INTEGER_CST
8294 || !TREE_OVERFLOW (lhs)))
8296 fold_overflow_warning (("assuming signed overflow does not occur "
8297 "when changing X +- C1 cmp C2 to "
8298 "X cmp C1 +- C2"),
8299 WARN_STRICT_OVERFLOW_COMPARISON);
8300 return fold_build2 (code, type, variable, lhs);
8304 /* For comparisons of pointers we can decompose it to a compile time
8305 comparison of the base objects and the offsets into the object.
8306 This requires at least one operand being an ADDR_EXPR to do more
8307 than the operand_equal_p test below. */
8308 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8309 && (TREE_CODE (arg0) == ADDR_EXPR
8310 || TREE_CODE (arg1) == ADDR_EXPR))
8312 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8313 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8314 enum machine_mode mode;
8315 int volatilep, unsignedp;
8316 bool indirect_base0 = false;
8318 /* Get base and offset for the access. Strip ADDR_EXPR for
8319 get_inner_reference, but put it back by stripping INDIRECT_REF
8320 off the base object if possible. */
8321 base0 = arg0;
8322 if (TREE_CODE (arg0) == ADDR_EXPR)
8324 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8325 &bitsize, &bitpos0, &offset0, &mode,
8326 &unsignedp, &volatilep, false);
8327 if (TREE_CODE (base0) == INDIRECT_REF)
8328 base0 = TREE_OPERAND (base0, 0);
8329 else
8330 indirect_base0 = true;
8333 base1 = arg1;
8334 if (TREE_CODE (arg1) == ADDR_EXPR)
8336 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8337 &bitsize, &bitpos1, &offset1, &mode,
8338 &unsignedp, &volatilep, false);
8339 /* We have to make sure to have an indirect/non-indirect base1
8340 just the same as we did for base0. */
8341 if (TREE_CODE (base1) == INDIRECT_REF
8342 && !indirect_base0)
8343 base1 = TREE_OPERAND (base1, 0);
8344 else if (!indirect_base0)
8345 base1 = NULL_TREE;
8347 else if (indirect_base0)
8348 base1 = NULL_TREE;
8350 /* If we have equivalent bases we might be able to simplify. */
8351 if (base0 && base1
8352 && operand_equal_p (base0, base1, 0))
8354 /* We can fold this expression to a constant if the non-constant
8355 offset parts are equal. */
8356 if (offset0 == offset1
8357 || (offset0 && offset1
8358 && operand_equal_p (offset0, offset1, 0)))
8360 switch (code)
8362 case EQ_EXPR:
8363 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8364 case NE_EXPR:
8365 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8366 case LT_EXPR:
8367 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8368 case LE_EXPR:
8369 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8370 case GE_EXPR:
8371 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8372 case GT_EXPR:
8373 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8374 default:;
8377 /* We can simplify the comparison to a comparison of the variable
8378 offset parts if the constant offset parts are equal.
8379 Be careful to use signed size type here because otherwise we
8380 mess with array offsets in the wrong way. This is possible
8381 because pointer arithmetic is restricted to retain within an
8382 object and overflow on pointer differences is undefined as of
8383 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8384 else if (bitpos0 == bitpos1)
8386 tree signed_size_type_node;
8387 signed_size_type_node = signed_type_for (size_type_node);
8389 /* By converting to signed size type we cover middle-end pointer
8390 arithmetic which operates on unsigned pointer types of size
8391 type size and ARRAY_REF offsets which are properly sign or
8392 zero extended from their type in case it is narrower than
8393 size type. */
8394 if (offset0 == NULL_TREE)
8395 offset0 = build_int_cst (signed_size_type_node, 0);
8396 else
8397 offset0 = fold_convert (signed_size_type_node, offset0);
8398 if (offset1 == NULL_TREE)
8399 offset1 = build_int_cst (signed_size_type_node, 0);
8400 else
8401 offset1 = fold_convert (signed_size_type_node, offset1);
8403 return fold_build2 (code, type, offset0, offset1);
8408 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8409 same object, then we can fold this to a comparison of the two offsets in
8410 signed size type. This is possible because pointer arithmetic is
8411 restricted to retain within an object and overflow on pointer differences
8412 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8414 We check flag_wrapv directly because pointers types are unsigned,
8415 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8416 normally what we want to avoid certain odd overflow cases, but
8417 not here. */
8418 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8419 && !flag_wrapv
8420 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8422 tree base0, offset0, base1, offset1;
8424 if (extract_array_ref (arg0, &base0, &offset0)
8425 && extract_array_ref (arg1, &base1, &offset1)
8426 && operand_equal_p (base0, base1, 0))
8428 tree signed_size_type_node;
8429 signed_size_type_node = signed_type_for (size_type_node);
8431 /* By converting to signed size type we cover middle-end pointer
8432 arithmetic which operates on unsigned pointer types of size
8433 type size and ARRAY_REF offsets which are properly sign or
8434 zero extended from their type in case it is narrower than
8435 size type. */
8436 if (offset0 == NULL_TREE)
8437 offset0 = build_int_cst (signed_size_type_node, 0);
8438 else
8439 offset0 = fold_convert (signed_size_type_node, offset0);
8440 if (offset1 == NULL_TREE)
8441 offset1 = build_int_cst (signed_size_type_node, 0);
8442 else
8443 offset1 = fold_convert (signed_size_type_node, offset1);
8445 return fold_build2 (code, type, offset0, offset1);
8449 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8450 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8451 the resulting offset is smaller in absolute value than the
8452 original one. */
8453 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8454 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8455 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8456 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8457 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8458 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8459 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8461 tree const1 = TREE_OPERAND (arg0, 1);
8462 tree const2 = TREE_OPERAND (arg1, 1);
8463 tree variable1 = TREE_OPERAND (arg0, 0);
8464 tree variable2 = TREE_OPERAND (arg1, 0);
8465 tree cst;
8466 const char * const warnmsg = G_("assuming signed overflow does not "
8467 "occur when combining constants around "
8468 "a comparison");
8470 /* Put the constant on the side where it doesn't overflow and is
8471 of lower absolute value than before. */
8472 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8473 ? MINUS_EXPR : PLUS_EXPR,
8474 const2, const1, 0);
8475 if (!TREE_OVERFLOW (cst)
8476 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8478 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8479 return fold_build2 (code, type,
8480 variable1,
8481 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8482 variable2, cst));
8485 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8486 ? MINUS_EXPR : PLUS_EXPR,
8487 const1, const2, 0);
8488 if (!TREE_OVERFLOW (cst)
8489 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8491 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8492 return fold_build2 (code, type,
8493 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8494 variable1, cst),
8495 variable2);
8499 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8500 signed arithmetic case. That form is created by the compiler
8501 often enough for folding it to be of value. One example is in
8502 computing loop trip counts after Operator Strength Reduction. */
8503 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8504 && TREE_CODE (arg0) == MULT_EXPR
8505 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8506 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8507 && integer_zerop (arg1))
8509 tree const1 = TREE_OPERAND (arg0, 1);
8510 tree const2 = arg1; /* zero */
8511 tree variable1 = TREE_OPERAND (arg0, 0);
8512 enum tree_code cmp_code = code;
8514 gcc_assert (!integer_zerop (const1));
8516 fold_overflow_warning (("assuming signed overflow does not occur when "
8517 "eliminating multiplication in comparison "
8518 "with zero"),
8519 WARN_STRICT_OVERFLOW_COMPARISON);
8521 /* If const1 is negative we swap the sense of the comparison. */
8522 if (tree_int_cst_sgn (const1) < 0)
8523 cmp_code = swap_tree_comparison (cmp_code);
8525 return fold_build2 (cmp_code, type, variable1, const2);
8528 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8529 if (tem)
8530 return tem;
8532 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8534 tree targ0 = strip_float_extensions (arg0);
8535 tree targ1 = strip_float_extensions (arg1);
8536 tree newtype = TREE_TYPE (targ0);
8538 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8539 newtype = TREE_TYPE (targ1);
8541 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8542 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8543 return fold_build2 (code, type, fold_convert (newtype, targ0),
8544 fold_convert (newtype, targ1));
8546 /* (-a) CMP (-b) -> b CMP a */
8547 if (TREE_CODE (arg0) == NEGATE_EXPR
8548 && TREE_CODE (arg1) == NEGATE_EXPR)
8549 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8550 TREE_OPERAND (arg0, 0));
8552 if (TREE_CODE (arg1) == REAL_CST)
8554 REAL_VALUE_TYPE cst;
8555 cst = TREE_REAL_CST (arg1);
8557 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8558 if (TREE_CODE (arg0) == NEGATE_EXPR)
8559 return fold_build2 (swap_tree_comparison (code), type,
8560 TREE_OPERAND (arg0, 0),
8561 build_real (TREE_TYPE (arg1),
8562 REAL_VALUE_NEGATE (cst)));
8564 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8565 /* a CMP (-0) -> a CMP 0 */
8566 if (REAL_VALUE_MINUS_ZERO (cst))
8567 return fold_build2 (code, type, arg0,
8568 build_real (TREE_TYPE (arg1), dconst0));
8570 /* x != NaN is always true, other ops are always false. */
8571 if (REAL_VALUE_ISNAN (cst)
8572 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8574 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8575 return omit_one_operand (type, tem, arg0);
8578 /* Fold comparisons against infinity. */
8579 if (REAL_VALUE_ISINF (cst))
8581 tem = fold_inf_compare (code, type, arg0, arg1);
8582 if (tem != NULL_TREE)
8583 return tem;
8587 /* If this is a comparison of a real constant with a PLUS_EXPR
8588 or a MINUS_EXPR of a real constant, we can convert it into a
8589 comparison with a revised real constant as long as no overflow
8590 occurs when unsafe_math_optimizations are enabled. */
8591 if (flag_unsafe_math_optimizations
8592 && TREE_CODE (arg1) == REAL_CST
8593 && (TREE_CODE (arg0) == PLUS_EXPR
8594 || TREE_CODE (arg0) == MINUS_EXPR)
8595 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8596 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8597 ? MINUS_EXPR : PLUS_EXPR,
8598 arg1, TREE_OPERAND (arg0, 1), 0))
8599 && !TREE_OVERFLOW (tem))
8600 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8602 /* Likewise, we can simplify a comparison of a real constant with
8603 a MINUS_EXPR whose first operand is also a real constant, i.e.
8604 (c1 - x) < c2 becomes x > c1-c2. */
8605 if (flag_unsafe_math_optimizations
8606 && TREE_CODE (arg1) == REAL_CST
8607 && TREE_CODE (arg0) == MINUS_EXPR
8608 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8609 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8610 arg1, 0))
8611 && !TREE_OVERFLOW (tem))
8612 return fold_build2 (swap_tree_comparison (code), type,
8613 TREE_OPERAND (arg0, 1), tem);
8615 /* Fold comparisons against built-in math functions. */
8616 if (TREE_CODE (arg1) == REAL_CST
8617 && flag_unsafe_math_optimizations
8618 && ! flag_errno_math)
8620 enum built_in_function fcode = builtin_mathfn_code (arg0);
8622 if (fcode != END_BUILTINS)
8624 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8625 if (tem != NULL_TREE)
8626 return tem;
8631 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8632 if (TREE_CONSTANT (arg1)
8633 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8634 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8635 /* This optimization is invalid for ordered comparisons
8636 if CONST+INCR overflows or if foo+incr might overflow.
8637 This optimization is invalid for floating point due to rounding.
8638 For pointer types we assume overflow doesn't happen. */
8639 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8640 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8641 && (code == EQ_EXPR || code == NE_EXPR))))
8643 tree varop, newconst;
8645 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8647 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8648 arg1, TREE_OPERAND (arg0, 1));
8649 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8650 TREE_OPERAND (arg0, 0),
8651 TREE_OPERAND (arg0, 1));
8653 else
8655 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8656 arg1, TREE_OPERAND (arg0, 1));
8657 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8658 TREE_OPERAND (arg0, 0),
8659 TREE_OPERAND (arg0, 1));
8663 /* If VAROP is a reference to a bitfield, we must mask
8664 the constant by the width of the field. */
8665 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8666 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8667 && host_integerp (DECL_SIZE (TREE_OPERAND
8668 (TREE_OPERAND (varop, 0), 1)), 1))
8670 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8671 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8672 tree folded_compare, shift;
8674 /* First check whether the comparison would come out
8675 always the same. If we don't do that we would
8676 change the meaning with the masking. */
8677 folded_compare = fold_build2 (code, type,
8678 TREE_OPERAND (varop, 0), arg1);
8679 if (TREE_CODE (folded_compare) == INTEGER_CST)
8680 return omit_one_operand (type, folded_compare, varop);
8682 shift = build_int_cst (NULL_TREE,
8683 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8684 shift = fold_convert (TREE_TYPE (varop), shift);
8685 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8686 newconst, shift);
8687 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8688 newconst, shift);
8691 return fold_build2 (code, type, varop, newconst);
8694 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8695 && (TREE_CODE (arg0) == NOP_EXPR
8696 || TREE_CODE (arg0) == CONVERT_EXPR))
8698 /* If we are widening one operand of an integer comparison,
8699 see if the other operand is similarly being widened. Perhaps we
8700 can do the comparison in the narrower type. */
8701 tem = fold_widened_comparison (code, type, arg0, arg1);
8702 if (tem)
8703 return tem;
8705 /* Or if we are changing signedness. */
8706 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8707 if (tem)
8708 return tem;
8711 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8712 constant, we can simplify it. */
8713 if (TREE_CODE (arg1) == INTEGER_CST
8714 && (TREE_CODE (arg0) == MIN_EXPR
8715 || TREE_CODE (arg0) == MAX_EXPR)
8716 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8718 tem = optimize_minmax_comparison (code, type, op0, op1);
8719 if (tem)
8720 return tem;
8723 /* Simplify comparison of something with itself. (For IEEE
8724 floating-point, we can only do some of these simplifications.) */
8725 if (operand_equal_p (arg0, arg1, 0))
8727 switch (code)
8729 case EQ_EXPR:
8730 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8731 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8732 return constant_boolean_node (1, type);
8733 break;
8735 case GE_EXPR:
8736 case LE_EXPR:
8737 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8738 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8739 return constant_boolean_node (1, type);
8740 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8742 case NE_EXPR:
8743 /* For NE, we can only do this simplification if integer
8744 or we don't honor IEEE floating point NaNs. */
8745 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8746 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8747 break;
8748 /* ... fall through ... */
8749 case GT_EXPR:
8750 case LT_EXPR:
8751 return constant_boolean_node (0, type);
8752 default:
8753 gcc_unreachable ();
8757 /* If we are comparing an expression that just has comparisons
8758 of two integer values, arithmetic expressions of those comparisons,
8759 and constants, we can simplify it. There are only three cases
8760 to check: the two values can either be equal, the first can be
8761 greater, or the second can be greater. Fold the expression for
8762 those three values. Since each value must be 0 or 1, we have
8763 eight possibilities, each of which corresponds to the constant 0
8764 or 1 or one of the six possible comparisons.
8766 This handles common cases like (a > b) == 0 but also handles
8767 expressions like ((x > y) - (y > x)) > 0, which supposedly
8768 occur in macroized code. */
8770 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8772 tree cval1 = 0, cval2 = 0;
8773 int save_p = 0;
8775 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8776 /* Don't handle degenerate cases here; they should already
8777 have been handled anyway. */
8778 && cval1 != 0 && cval2 != 0
8779 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8780 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8781 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8782 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8783 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8784 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8785 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8787 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8788 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8790 /* We can't just pass T to eval_subst in case cval1 or cval2
8791 was the same as ARG1. */
8793 tree high_result
8794 = fold_build2 (code, type,
8795 eval_subst (arg0, cval1, maxval,
8796 cval2, minval),
8797 arg1);
8798 tree equal_result
8799 = fold_build2 (code, type,
8800 eval_subst (arg0, cval1, maxval,
8801 cval2, maxval),
8802 arg1);
8803 tree low_result
8804 = fold_build2 (code, type,
8805 eval_subst (arg0, cval1, minval,
8806 cval2, maxval),
8807 arg1);
8809 /* All three of these results should be 0 or 1. Confirm they are.
8810 Then use those values to select the proper code to use. */
8812 if (TREE_CODE (high_result) == INTEGER_CST
8813 && TREE_CODE (equal_result) == INTEGER_CST
8814 && TREE_CODE (low_result) == INTEGER_CST)
8816 /* Make a 3-bit mask with the high-order bit being the
8817 value for `>', the next for '=', and the low for '<'. */
8818 switch ((integer_onep (high_result) * 4)
8819 + (integer_onep (equal_result) * 2)
8820 + integer_onep (low_result))
8822 case 0:
8823 /* Always false. */
8824 return omit_one_operand (type, integer_zero_node, arg0);
8825 case 1:
8826 code = LT_EXPR;
8827 break;
8828 case 2:
8829 code = EQ_EXPR;
8830 break;
8831 case 3:
8832 code = LE_EXPR;
8833 break;
8834 case 4:
8835 code = GT_EXPR;
8836 break;
8837 case 5:
8838 code = NE_EXPR;
8839 break;
8840 case 6:
8841 code = GE_EXPR;
8842 break;
8843 case 7:
8844 /* Always true. */
8845 return omit_one_operand (type, integer_one_node, arg0);
8848 if (save_p)
8849 return save_expr (build2 (code, type, cval1, cval2));
8850 return fold_build2 (code, type, cval1, cval2);
8855 /* If this is a comparison of complex values and both sides
8856 are COMPLEX_CST, do the comparision by parts to fold the
8857 comparision. */
8858 if ((code == EQ_EXPR || code == NE_EXPR)
8859 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8860 && TREE_CODE (arg0) == COMPLEX_CST
8861 && TREE_CODE (arg1) == COMPLEX_CST)
8863 tree real0, imag0, real1, imag1;
8864 enum tree_code outercode;
8866 real0 = TREE_REALPART (arg0);
8867 imag0 = TREE_IMAGPART (arg0);
8868 real1 = TREE_REALPART (arg1);
8869 imag1 = TREE_IMAGPART (arg1);
8870 outercode = code == EQ_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8872 return fold_build2 (outercode, type,
8873 fold_build2 (code, type, real0, real1),
8874 fold_build2 (code, type, imag0, imag1));
8878 /* Fold a comparison of the address of COMPONENT_REFs with the same
8879 type and component to a comparison of the address of the base
8880 object. In short, &x->a OP &y->a to x OP y and
8881 &x->a OP &y.a to x OP &y */
8882 if (TREE_CODE (arg0) == ADDR_EXPR
8883 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8884 && TREE_CODE (arg1) == ADDR_EXPR
8885 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8887 tree cref0 = TREE_OPERAND (arg0, 0);
8888 tree cref1 = TREE_OPERAND (arg1, 0);
8889 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8891 tree op0 = TREE_OPERAND (cref0, 0);
8892 tree op1 = TREE_OPERAND (cref1, 0);
8893 return fold_build2 (code, type,
8894 build_fold_addr_expr (op0),
8895 build_fold_addr_expr (op1));
8899 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8900 into a single range test. */
8901 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8902 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8903 && TREE_CODE (arg1) == INTEGER_CST
8904 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8905 && !integer_zerop (TREE_OPERAND (arg0, 1))
8906 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8907 && !TREE_OVERFLOW (arg1))
8909 tem = fold_div_compare (code, type, arg0, arg1);
8910 if (tem != NULL_TREE)
8911 return tem;
8914 /* Fold ~X op ~Y as Y op X. */
8915 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8916 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8917 return fold_build2 (code, type,
8918 TREE_OPERAND (arg1, 0),
8919 TREE_OPERAND (arg0, 0));
8921 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8922 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8923 && TREE_CODE (arg1) == INTEGER_CST)
8924 return fold_build2 (swap_tree_comparison (code), type,
8925 TREE_OPERAND (arg0, 0),
8926 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8928 return NULL_TREE;
8932 /* Subroutine of fold_binary. Optimize complex multiplications of the
8933 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8934 argument EXPR represents the expression "z" of type TYPE. */
8936 static tree
8937 fold_mult_zconjz (tree type, tree expr)
8939 tree itype = TREE_TYPE (type);
8940 tree rpart, ipart, tem;
8942 if (TREE_CODE (expr) == COMPLEX_EXPR)
8944 rpart = TREE_OPERAND (expr, 0);
8945 ipart = TREE_OPERAND (expr, 1);
8947 else if (TREE_CODE (expr) == COMPLEX_CST)
8949 rpart = TREE_REALPART (expr);
8950 ipart = TREE_IMAGPART (expr);
8952 else
8954 expr = save_expr (expr);
8955 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8956 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8959 rpart = save_expr (rpart);
8960 ipart = save_expr (ipart);
8961 tem = fold_build2 (PLUS_EXPR, itype,
8962 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8963 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8964 return fold_build2 (COMPLEX_EXPR, type, tem,
8965 fold_convert (itype, integer_zero_node));
8969 /* Fold a binary expression of code CODE and type TYPE with operands
8970 OP0 and OP1. Return the folded expression if folding is
8971 successful. Otherwise, return NULL_TREE. */
8973 tree
8974 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8976 enum tree_code_class kind = TREE_CODE_CLASS (code);
8977 tree arg0, arg1, tem;
8978 tree t1 = NULL_TREE;
8979 bool strict_overflow_p;
8981 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
8982 || IS_GIMPLE_STMT_CODE_CLASS (kind))
8983 && TREE_CODE_LENGTH (code) == 2
8984 && op0 != NULL_TREE
8985 && op1 != NULL_TREE);
8987 arg0 = op0;
8988 arg1 = op1;
8990 /* Strip any conversions that don't change the mode. This is
8991 safe for every expression, except for a comparison expression
8992 because its signedness is derived from its operands. So, in
8993 the latter case, only strip conversions that don't change the
8994 signedness.
8996 Note that this is done as an internal manipulation within the
8997 constant folder, in order to find the simplest representation
8998 of the arguments so that their form can be studied. In any
8999 cases, the appropriate type conversions should be put back in
9000 the tree that will get out of the constant folder. */
9002 if (kind == tcc_comparison)
9004 STRIP_SIGN_NOPS (arg0);
9005 STRIP_SIGN_NOPS (arg1);
9007 else
9009 STRIP_NOPS (arg0);
9010 STRIP_NOPS (arg1);
9013 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9014 constant but we can't do arithmetic on them. */
9015 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9016 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9017 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9018 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9020 if (kind == tcc_binary)
9021 tem = const_binop (code, arg0, arg1, 0);
9022 else if (kind == tcc_comparison)
9023 tem = fold_relational_const (code, type, arg0, arg1);
9024 else
9025 tem = NULL_TREE;
9027 if (tem != NULL_TREE)
9029 if (TREE_TYPE (tem) != type)
9030 tem = fold_convert (type, tem);
9031 return tem;
9035 /* If this is a commutative operation, and ARG0 is a constant, move it
9036 to ARG1 to reduce the number of tests below. */
9037 if (commutative_tree_code (code)
9038 && tree_swap_operands_p (arg0, arg1, true))
9039 return fold_build2 (code, type, op1, op0);
9041 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9043 First check for cases where an arithmetic operation is applied to a
9044 compound, conditional, or comparison operation. Push the arithmetic
9045 operation inside the compound or conditional to see if any folding
9046 can then be done. Convert comparison to conditional for this purpose.
9047 The also optimizes non-constant cases that used to be done in
9048 expand_expr.
9050 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9051 one of the operands is a comparison and the other is a comparison, a
9052 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9053 code below would make the expression more complex. Change it to a
9054 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9055 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9057 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9058 || code == EQ_EXPR || code == NE_EXPR)
9059 && ((truth_value_p (TREE_CODE (arg0))
9060 && (truth_value_p (TREE_CODE (arg1))
9061 || (TREE_CODE (arg1) == BIT_AND_EXPR
9062 && integer_onep (TREE_OPERAND (arg1, 1)))))
9063 || (truth_value_p (TREE_CODE (arg1))
9064 && (truth_value_p (TREE_CODE (arg0))
9065 || (TREE_CODE (arg0) == BIT_AND_EXPR
9066 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9068 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9069 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9070 : TRUTH_XOR_EXPR,
9071 boolean_type_node,
9072 fold_convert (boolean_type_node, arg0),
9073 fold_convert (boolean_type_node, arg1));
9075 if (code == EQ_EXPR)
9076 tem = invert_truthvalue (tem);
9078 return fold_convert (type, tem);
9081 if (TREE_CODE_CLASS (code) == tcc_binary
9082 || TREE_CODE_CLASS (code) == tcc_comparison)
9084 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9085 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9086 fold_build2 (code, type,
9087 TREE_OPERAND (arg0, 1), op1));
9088 if (TREE_CODE (arg1) == COMPOUND_EXPR
9089 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9090 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9091 fold_build2 (code, type,
9092 op0, TREE_OPERAND (arg1, 1)));
9094 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9096 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9097 arg0, arg1,
9098 /*cond_first_p=*/1);
9099 if (tem != NULL_TREE)
9100 return tem;
9103 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9105 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9106 arg1, arg0,
9107 /*cond_first_p=*/0);
9108 if (tem != NULL_TREE)
9109 return tem;
9113 switch (code)
9115 case PLUS_EXPR:
9116 /* A + (-B) -> A - B */
9117 if (TREE_CODE (arg1) == NEGATE_EXPR)
9118 return fold_build2 (MINUS_EXPR, type,
9119 fold_convert (type, arg0),
9120 fold_convert (type, TREE_OPERAND (arg1, 0)));
9121 /* (-A) + B -> B - A */
9122 if (TREE_CODE (arg0) == NEGATE_EXPR
9123 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9124 return fold_build2 (MINUS_EXPR, type,
9125 fold_convert (type, arg1),
9126 fold_convert (type, TREE_OPERAND (arg0, 0)));
9127 /* Convert ~A + 1 to -A. */
9128 if (INTEGRAL_TYPE_P (type)
9129 && TREE_CODE (arg0) == BIT_NOT_EXPR
9130 && integer_onep (arg1))
9131 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9133 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9134 same or one. */
9135 if ((TREE_CODE (arg0) == MULT_EXPR
9136 || TREE_CODE (arg1) == MULT_EXPR)
9137 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9139 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9140 if (tem)
9141 return tem;
9144 if (! FLOAT_TYPE_P (type))
9146 if (integer_zerop (arg1))
9147 return non_lvalue (fold_convert (type, arg0));
9149 /* ~X + X is -1. */
9150 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9151 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9152 && !TYPE_OVERFLOW_TRAPS (type))
9154 t1 = build_int_cst_type (type, -1);
9155 return omit_one_operand (type, t1, arg1);
9158 /* X + ~X is -1. */
9159 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9160 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9161 && !TYPE_OVERFLOW_TRAPS (type))
9163 t1 = build_int_cst_type (type, -1);
9164 return omit_one_operand (type, t1, arg0);
9167 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9168 with a constant, and the two constants have no bits in common,
9169 we should treat this as a BIT_IOR_EXPR since this may produce more
9170 simplifications. */
9171 if (TREE_CODE (arg0) == BIT_AND_EXPR
9172 && TREE_CODE (arg1) == BIT_AND_EXPR
9173 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9174 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9175 && integer_zerop (const_binop (BIT_AND_EXPR,
9176 TREE_OPERAND (arg0, 1),
9177 TREE_OPERAND (arg1, 1), 0)))
9179 code = BIT_IOR_EXPR;
9180 goto bit_ior;
9183 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9184 (plus (plus (mult) (mult)) (foo)) so that we can
9185 take advantage of the factoring cases below. */
9186 if (((TREE_CODE (arg0) == PLUS_EXPR
9187 || TREE_CODE (arg0) == MINUS_EXPR)
9188 && TREE_CODE (arg1) == MULT_EXPR)
9189 || ((TREE_CODE (arg1) == PLUS_EXPR
9190 || TREE_CODE (arg1) == MINUS_EXPR)
9191 && TREE_CODE (arg0) == MULT_EXPR))
9193 tree parg0, parg1, parg, marg;
9194 enum tree_code pcode;
9196 if (TREE_CODE (arg1) == MULT_EXPR)
9197 parg = arg0, marg = arg1;
9198 else
9199 parg = arg1, marg = arg0;
9200 pcode = TREE_CODE (parg);
9201 parg0 = TREE_OPERAND (parg, 0);
9202 parg1 = TREE_OPERAND (parg, 1);
9203 STRIP_NOPS (parg0);
9204 STRIP_NOPS (parg1);
9206 if (TREE_CODE (parg0) == MULT_EXPR
9207 && TREE_CODE (parg1) != MULT_EXPR)
9208 return fold_build2 (pcode, type,
9209 fold_build2 (PLUS_EXPR, type,
9210 fold_convert (type, parg0),
9211 fold_convert (type, marg)),
9212 fold_convert (type, parg1));
9213 if (TREE_CODE (parg0) != MULT_EXPR
9214 && TREE_CODE (parg1) == MULT_EXPR)
9215 return fold_build2 (PLUS_EXPR, type,
9216 fold_convert (type, parg0),
9217 fold_build2 (pcode, type,
9218 fold_convert (type, marg),
9219 fold_convert (type,
9220 parg1)));
9223 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
9224 of the array. Loop optimizer sometimes produce this type of
9225 expressions. */
9226 if (TREE_CODE (arg0) == ADDR_EXPR)
9228 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
9229 if (tem)
9230 return fold_convert (type, tem);
9232 else if (TREE_CODE (arg1) == ADDR_EXPR)
9234 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
9235 if (tem)
9236 return fold_convert (type, tem);
9239 else
9241 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9242 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9243 return non_lvalue (fold_convert (type, arg0));
9245 /* Likewise if the operands are reversed. */
9246 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9247 return non_lvalue (fold_convert (type, arg1));
9249 /* Convert X + -C into X - C. */
9250 if (TREE_CODE (arg1) == REAL_CST
9251 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9253 tem = fold_negate_const (arg1, type);
9254 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9255 return fold_build2 (MINUS_EXPR, type,
9256 fold_convert (type, arg0),
9257 fold_convert (type, tem));
9260 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9261 to __complex__ ( x, y ). This is not the same for SNaNs or
9262 if signed zeros are involved. */
9263 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9264 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9265 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9267 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9268 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9269 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9270 bool arg0rz = false, arg0iz = false;
9271 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9272 || (arg0i && (arg0iz = real_zerop (arg0i))))
9274 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9275 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9276 if (arg0rz && arg1i && real_zerop (arg1i))
9278 tree rp = arg1r ? arg1r
9279 : build1 (REALPART_EXPR, rtype, arg1);
9280 tree ip = arg0i ? arg0i
9281 : build1 (IMAGPART_EXPR, rtype, arg0);
9282 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9284 else if (arg0iz && arg1r && real_zerop (arg1r))
9286 tree rp = arg0r ? arg0r
9287 : build1 (REALPART_EXPR, rtype, arg0);
9288 tree ip = arg1i ? arg1i
9289 : build1 (IMAGPART_EXPR, rtype, arg1);
9290 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9295 if (flag_unsafe_math_optimizations
9296 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9297 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9298 && (tem = distribute_real_division (code, type, arg0, arg1)))
9299 return tem;
9301 /* Convert x+x into x*2.0. */
9302 if (operand_equal_p (arg0, arg1, 0)
9303 && SCALAR_FLOAT_TYPE_P (type))
9304 return fold_build2 (MULT_EXPR, type, arg0,
9305 build_real (type, dconst2));
9307 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
9308 if (flag_unsafe_math_optimizations
9309 && TREE_CODE (arg1) == PLUS_EXPR
9310 && TREE_CODE (arg0) != MULT_EXPR)
9312 tree tree10 = TREE_OPERAND (arg1, 0);
9313 tree tree11 = TREE_OPERAND (arg1, 1);
9314 if (TREE_CODE (tree11) == MULT_EXPR
9315 && TREE_CODE (tree10) == MULT_EXPR)
9317 tree tree0;
9318 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9319 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9322 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
9323 if (flag_unsafe_math_optimizations
9324 && TREE_CODE (arg0) == PLUS_EXPR
9325 && TREE_CODE (arg1) != MULT_EXPR)
9327 tree tree00 = TREE_OPERAND (arg0, 0);
9328 tree tree01 = TREE_OPERAND (arg0, 1);
9329 if (TREE_CODE (tree01) == MULT_EXPR
9330 && TREE_CODE (tree00) == MULT_EXPR)
9332 tree tree0;
9333 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9334 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9339 bit_rotate:
9340 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9341 is a rotate of A by C1 bits. */
9342 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9343 is a rotate of A by B bits. */
9345 enum tree_code code0, code1;
9346 code0 = TREE_CODE (arg0);
9347 code1 = TREE_CODE (arg1);
9348 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9349 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9350 && operand_equal_p (TREE_OPERAND (arg0, 0),
9351 TREE_OPERAND (arg1, 0), 0)
9352 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9354 tree tree01, tree11;
9355 enum tree_code code01, code11;
9357 tree01 = TREE_OPERAND (arg0, 1);
9358 tree11 = TREE_OPERAND (arg1, 1);
9359 STRIP_NOPS (tree01);
9360 STRIP_NOPS (tree11);
9361 code01 = TREE_CODE (tree01);
9362 code11 = TREE_CODE (tree11);
9363 if (code01 == INTEGER_CST
9364 && code11 == INTEGER_CST
9365 && TREE_INT_CST_HIGH (tree01) == 0
9366 && TREE_INT_CST_HIGH (tree11) == 0
9367 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9368 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9369 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9370 code0 == LSHIFT_EXPR ? tree01 : tree11);
9371 else if (code11 == MINUS_EXPR)
9373 tree tree110, tree111;
9374 tree110 = TREE_OPERAND (tree11, 0);
9375 tree111 = TREE_OPERAND (tree11, 1);
9376 STRIP_NOPS (tree110);
9377 STRIP_NOPS (tree111);
9378 if (TREE_CODE (tree110) == INTEGER_CST
9379 && 0 == compare_tree_int (tree110,
9380 TYPE_PRECISION
9381 (TREE_TYPE (TREE_OPERAND
9382 (arg0, 0))))
9383 && operand_equal_p (tree01, tree111, 0))
9384 return build2 ((code0 == LSHIFT_EXPR
9385 ? LROTATE_EXPR
9386 : RROTATE_EXPR),
9387 type, TREE_OPERAND (arg0, 0), tree01);
9389 else if (code01 == MINUS_EXPR)
9391 tree tree010, tree011;
9392 tree010 = TREE_OPERAND (tree01, 0);
9393 tree011 = TREE_OPERAND (tree01, 1);
9394 STRIP_NOPS (tree010);
9395 STRIP_NOPS (tree011);
9396 if (TREE_CODE (tree010) == INTEGER_CST
9397 && 0 == compare_tree_int (tree010,
9398 TYPE_PRECISION
9399 (TREE_TYPE (TREE_OPERAND
9400 (arg0, 0))))
9401 && operand_equal_p (tree11, tree011, 0))
9402 return build2 ((code0 != LSHIFT_EXPR
9403 ? LROTATE_EXPR
9404 : RROTATE_EXPR),
9405 type, TREE_OPERAND (arg0, 0), tree11);
9410 associate:
9411 /* In most languages, can't associate operations on floats through
9412 parentheses. Rather than remember where the parentheses were, we
9413 don't associate floats at all, unless the user has specified
9414 -funsafe-math-optimizations. */
9416 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9418 tree var0, con0, lit0, minus_lit0;
9419 tree var1, con1, lit1, minus_lit1;
9421 /* Split both trees into variables, constants, and literals. Then
9422 associate each group together, the constants with literals,
9423 then the result with variables. This increases the chances of
9424 literals being recombined later and of generating relocatable
9425 expressions for the sum of a constant and literal. */
9426 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9427 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9428 code == MINUS_EXPR);
9430 /* Only do something if we found more than two objects. Otherwise,
9431 nothing has changed and we risk infinite recursion. */
9432 if (2 < ((var0 != 0) + (var1 != 0)
9433 + (con0 != 0) + (con1 != 0)
9434 + (lit0 != 0) + (lit1 != 0)
9435 + (minus_lit0 != 0) + (minus_lit1 != 0)))
9437 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9438 if (code == MINUS_EXPR)
9439 code = PLUS_EXPR;
9441 var0 = associate_trees (var0, var1, code, type);
9442 con0 = associate_trees (con0, con1, code, type);
9443 lit0 = associate_trees (lit0, lit1, code, type);
9444 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9446 /* Preserve the MINUS_EXPR if the negative part of the literal is
9447 greater than the positive part. Otherwise, the multiplicative
9448 folding code (i.e extract_muldiv) may be fooled in case
9449 unsigned constants are subtracted, like in the following
9450 example: ((X*2 + 4) - 8U)/2. */
9451 if (minus_lit0 && lit0)
9453 if (TREE_CODE (lit0) == INTEGER_CST
9454 && TREE_CODE (minus_lit0) == INTEGER_CST
9455 && tree_int_cst_lt (lit0, minus_lit0))
9457 minus_lit0 = associate_trees (minus_lit0, lit0,
9458 MINUS_EXPR, type);
9459 lit0 = 0;
9461 else
9463 lit0 = associate_trees (lit0, minus_lit0,
9464 MINUS_EXPR, type);
9465 minus_lit0 = 0;
9468 if (minus_lit0)
9470 if (con0 == 0)
9471 return fold_convert (type,
9472 associate_trees (var0, minus_lit0,
9473 MINUS_EXPR, type));
9474 else
9476 con0 = associate_trees (con0, minus_lit0,
9477 MINUS_EXPR, type);
9478 return fold_convert (type,
9479 associate_trees (var0, con0,
9480 PLUS_EXPR, type));
9484 con0 = associate_trees (con0, lit0, code, type);
9485 return fold_convert (type, associate_trees (var0, con0,
9486 code, type));
9490 return NULL_TREE;
9492 case MINUS_EXPR:
9493 /* A - (-B) -> A + B */
9494 if (TREE_CODE (arg1) == NEGATE_EXPR)
9495 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9496 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9497 if (TREE_CODE (arg0) == NEGATE_EXPR
9498 && (FLOAT_TYPE_P (type)
9499 || INTEGRAL_TYPE_P (type))
9500 && negate_expr_p (arg1)
9501 && reorder_operands_p (arg0, arg1))
9502 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9503 TREE_OPERAND (arg0, 0));
9504 /* Convert -A - 1 to ~A. */
9505 if (INTEGRAL_TYPE_P (type)
9506 && TREE_CODE (arg0) == NEGATE_EXPR
9507 && integer_onep (arg1)
9508 && !TYPE_OVERFLOW_TRAPS (type))
9509 return fold_build1 (BIT_NOT_EXPR, type,
9510 fold_convert (type, TREE_OPERAND (arg0, 0)));
9512 /* Convert -1 - A to ~A. */
9513 if (INTEGRAL_TYPE_P (type)
9514 && integer_all_onesp (arg0))
9515 return fold_build1 (BIT_NOT_EXPR, type, op1);
9517 if (! FLOAT_TYPE_P (type))
9519 if (integer_zerop (arg0))
9520 return negate_expr (fold_convert (type, arg1));
9521 if (integer_zerop (arg1))
9522 return non_lvalue (fold_convert (type, arg0));
9524 /* Fold A - (A & B) into ~B & A. */
9525 if (!TREE_SIDE_EFFECTS (arg0)
9526 && TREE_CODE (arg1) == BIT_AND_EXPR)
9528 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9529 return fold_build2 (BIT_AND_EXPR, type,
9530 fold_build1 (BIT_NOT_EXPR, type,
9531 TREE_OPERAND (arg1, 0)),
9532 arg0);
9533 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9534 return fold_build2 (BIT_AND_EXPR, type,
9535 fold_build1 (BIT_NOT_EXPR, type,
9536 TREE_OPERAND (arg1, 1)),
9537 arg0);
9540 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9541 any power of 2 minus 1. */
9542 if (TREE_CODE (arg0) == BIT_AND_EXPR
9543 && TREE_CODE (arg1) == BIT_AND_EXPR
9544 && operand_equal_p (TREE_OPERAND (arg0, 0),
9545 TREE_OPERAND (arg1, 0), 0))
9547 tree mask0 = TREE_OPERAND (arg0, 1);
9548 tree mask1 = TREE_OPERAND (arg1, 1);
9549 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9551 if (operand_equal_p (tem, mask1, 0))
9553 tem = fold_build2 (BIT_XOR_EXPR, type,
9554 TREE_OPERAND (arg0, 0), mask1);
9555 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9560 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9561 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9562 return non_lvalue (fold_convert (type, arg0));
9564 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9565 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9566 (-ARG1 + ARG0) reduces to -ARG1. */
9567 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9568 return negate_expr (fold_convert (type, arg1));
9570 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9571 __complex__ ( x, -y ). This is not the same for SNaNs or if
9572 signed zeros are involved. */
9573 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9574 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9575 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9577 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9578 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9579 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9580 bool arg0rz = false, arg0iz = false;
9581 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9582 || (arg0i && (arg0iz = real_zerop (arg0i))))
9584 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9585 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9586 if (arg0rz && arg1i && real_zerop (arg1i))
9588 tree rp = fold_build1 (NEGATE_EXPR, rtype,
9589 arg1r ? arg1r
9590 : build1 (REALPART_EXPR, rtype, arg1));
9591 tree ip = arg0i ? arg0i
9592 : build1 (IMAGPART_EXPR, rtype, arg0);
9593 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9595 else if (arg0iz && arg1r && real_zerop (arg1r))
9597 tree rp = arg0r ? arg0r
9598 : build1 (REALPART_EXPR, rtype, arg0);
9599 tree ip = fold_build1 (NEGATE_EXPR, rtype,
9600 arg1i ? arg1i
9601 : build1 (IMAGPART_EXPR, rtype, arg1));
9602 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9607 /* Fold &x - &x. This can happen from &x.foo - &x.
9608 This is unsafe for certain floats even in non-IEEE formats.
9609 In IEEE, it is unsafe because it does wrong for NaNs.
9610 Also note that operand_equal_p is always false if an operand
9611 is volatile. */
9613 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9614 && operand_equal_p (arg0, arg1, 0))
9615 return fold_convert (type, integer_zero_node);
9617 /* A - B -> A + (-B) if B is easily negatable. */
9618 if (negate_expr_p (arg1)
9619 && ((FLOAT_TYPE_P (type)
9620 /* Avoid this transformation if B is a positive REAL_CST. */
9621 && (TREE_CODE (arg1) != REAL_CST
9622 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9623 || INTEGRAL_TYPE_P (type)))
9624 return fold_build2 (PLUS_EXPR, type,
9625 fold_convert (type, arg0),
9626 fold_convert (type, negate_expr (arg1)));
9628 /* Try folding difference of addresses. */
9630 HOST_WIDE_INT diff;
9632 if ((TREE_CODE (arg0) == ADDR_EXPR
9633 || TREE_CODE (arg1) == ADDR_EXPR)
9634 && ptr_difference_const (arg0, arg1, &diff))
9635 return build_int_cst_type (type, diff);
9638 /* Fold &a[i] - &a[j] to i-j. */
9639 if (TREE_CODE (arg0) == ADDR_EXPR
9640 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9641 && TREE_CODE (arg1) == ADDR_EXPR
9642 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9644 tree aref0 = TREE_OPERAND (arg0, 0);
9645 tree aref1 = TREE_OPERAND (arg1, 0);
9646 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9647 TREE_OPERAND (aref1, 0), 0))
9649 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9650 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9651 tree esz = array_ref_element_size (aref0);
9652 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9653 return fold_build2 (MULT_EXPR, type, diff,
9654 fold_convert (type, esz));
9659 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9660 of the array. Loop optimizer sometimes produce this type of
9661 expressions. */
9662 if (TREE_CODE (arg0) == ADDR_EXPR)
9664 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9665 if (tem)
9666 return fold_convert (type, tem);
9669 if (flag_unsafe_math_optimizations
9670 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9671 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9672 && (tem = distribute_real_division (code, type, arg0, arg1)))
9673 return tem;
9675 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9676 same or one. */
9677 if ((TREE_CODE (arg0) == MULT_EXPR
9678 || TREE_CODE (arg1) == MULT_EXPR)
9679 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9681 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9682 if (tem)
9683 return tem;
9686 goto associate;
9688 case MULT_EXPR:
9689 /* (-A) * (-B) -> A * B */
9690 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9691 return fold_build2 (MULT_EXPR, type,
9692 fold_convert (type, TREE_OPERAND (arg0, 0)),
9693 fold_convert (type, negate_expr (arg1)));
9694 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9695 return fold_build2 (MULT_EXPR, type,
9696 fold_convert (type, negate_expr (arg0)),
9697 fold_convert (type, TREE_OPERAND (arg1, 0)));
9699 if (! FLOAT_TYPE_P (type))
9701 if (integer_zerop (arg1))
9702 return omit_one_operand (type, arg1, arg0);
9703 if (integer_onep (arg1))
9704 return non_lvalue (fold_convert (type, arg0));
9705 /* Transform x * -1 into -x. */
9706 if (integer_all_onesp (arg1))
9707 return fold_convert (type, negate_expr (arg0));
9708 /* Transform x * -C into -x * C if x is easily negatable. */
9709 if (TREE_CODE (arg1) == INTEGER_CST
9710 && tree_int_cst_sgn (arg1) == -1
9711 && negate_expr_p (arg0)
9712 && (tem = negate_expr (arg1)) != arg1
9713 && !TREE_OVERFLOW (tem))
9714 return fold_build2 (MULT_EXPR, type,
9715 negate_expr (arg0), tem);
9717 /* (a * (1 << b)) is (a << b) */
9718 if (TREE_CODE (arg1) == LSHIFT_EXPR
9719 && integer_onep (TREE_OPERAND (arg1, 0)))
9720 return fold_build2 (LSHIFT_EXPR, type, arg0,
9721 TREE_OPERAND (arg1, 1));
9722 if (TREE_CODE (arg0) == LSHIFT_EXPR
9723 && integer_onep (TREE_OPERAND (arg0, 0)))
9724 return fold_build2 (LSHIFT_EXPR, type, arg1,
9725 TREE_OPERAND (arg0, 1));
9727 strict_overflow_p = false;
9728 if (TREE_CODE (arg1) == INTEGER_CST
9729 && 0 != (tem = extract_muldiv (op0,
9730 fold_convert (type, arg1),
9731 code, NULL_TREE,
9732 &strict_overflow_p)))
9734 if (strict_overflow_p)
9735 fold_overflow_warning (("assuming signed overflow does not "
9736 "occur when simplifying "
9737 "multiplication"),
9738 WARN_STRICT_OVERFLOW_MISC);
9739 return fold_convert (type, tem);
9742 /* Optimize z * conj(z) for integer complex numbers. */
9743 if (TREE_CODE (arg0) == CONJ_EXPR
9744 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9745 return fold_mult_zconjz (type, arg1);
9746 if (TREE_CODE (arg1) == CONJ_EXPR
9747 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9748 return fold_mult_zconjz (type, arg0);
9750 else
9752 /* Maybe fold x * 0 to 0. The expressions aren't the same
9753 when x is NaN, since x * 0 is also NaN. Nor are they the
9754 same in modes with signed zeros, since multiplying a
9755 negative value by 0 gives -0, not +0. */
9756 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9757 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9758 && real_zerop (arg1))
9759 return omit_one_operand (type, arg1, arg0);
9760 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9761 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9762 && real_onep (arg1))
9763 return non_lvalue (fold_convert (type, arg0));
9765 /* Transform x * -1.0 into -x. */
9766 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9767 && real_minus_onep (arg1))
9768 return fold_convert (type, negate_expr (arg0));
9770 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9771 if (flag_unsafe_math_optimizations
9772 && TREE_CODE (arg0) == RDIV_EXPR
9773 && TREE_CODE (arg1) == REAL_CST
9774 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9776 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9777 arg1, 0);
9778 if (tem)
9779 return fold_build2 (RDIV_EXPR, type, tem,
9780 TREE_OPERAND (arg0, 1));
9783 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9784 if (operand_equal_p (arg0, arg1, 0))
9786 tree tem = fold_strip_sign_ops (arg0);
9787 if (tem != NULL_TREE)
9789 tem = fold_convert (type, tem);
9790 return fold_build2 (MULT_EXPR, type, tem, tem);
9794 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9795 This is not the same for NaNs or if signed zeros are
9796 involved. */
9797 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9798 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9799 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9800 && TREE_CODE (arg1) == COMPLEX_CST
9801 && real_zerop (TREE_REALPART (arg1)))
9803 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9804 if (real_onep (TREE_IMAGPART (arg1)))
9805 return fold_build2 (COMPLEX_EXPR, type,
9806 negate_expr (fold_build1 (IMAGPART_EXPR,
9807 rtype, arg0)),
9808 fold_build1 (REALPART_EXPR, rtype, arg0));
9809 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9810 return fold_build2 (COMPLEX_EXPR, type,
9811 fold_build1 (IMAGPART_EXPR, rtype, arg0),
9812 negate_expr (fold_build1 (REALPART_EXPR,
9813 rtype, arg0)));
9816 /* Optimize z * conj(z) for floating point complex numbers.
9817 Guarded by flag_unsafe_math_optimizations as non-finite
9818 imaginary components don't produce scalar results. */
9819 if (flag_unsafe_math_optimizations
9820 && TREE_CODE (arg0) == CONJ_EXPR
9821 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9822 return fold_mult_zconjz (type, arg1);
9823 if (flag_unsafe_math_optimizations
9824 && TREE_CODE (arg1) == CONJ_EXPR
9825 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9826 return fold_mult_zconjz (type, arg0);
9828 if (flag_unsafe_math_optimizations)
9830 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9831 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9833 /* Optimizations of root(...)*root(...). */
9834 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9836 tree rootfn, arg;
9837 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9838 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9840 /* Optimize sqrt(x)*sqrt(x) as x. */
9841 if (BUILTIN_SQRT_P (fcode0)
9842 && operand_equal_p (arg00, arg10, 0)
9843 && ! HONOR_SNANS (TYPE_MODE (type)))
9844 return arg00;
9846 /* Optimize root(x)*root(y) as root(x*y). */
9847 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9848 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9849 return build_call_expr (rootfn, 1, arg);
9852 /* Optimize expN(x)*expN(y) as expN(x+y). */
9853 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9855 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9856 tree arg = fold_build2 (PLUS_EXPR, type,
9857 CALL_EXPR_ARG (arg0, 0),
9858 CALL_EXPR_ARG (arg1, 0));
9859 return build_call_expr (expfn, 1, arg);
9862 /* Optimizations of pow(...)*pow(...). */
9863 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9864 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9865 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9867 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9868 tree arg01 = CALL_EXPR_ARG (arg0, 1);
9869 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9870 tree arg11 = CALL_EXPR_ARG (arg1, 1);
9872 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9873 if (operand_equal_p (arg01, arg11, 0))
9875 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9876 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9877 return build_call_expr (powfn, 2, arg, arg01);
9880 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9881 if (operand_equal_p (arg00, arg10, 0))
9883 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9884 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9885 return build_call_expr (powfn, 2, arg00, arg);
9889 /* Optimize tan(x)*cos(x) as sin(x). */
9890 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9891 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9892 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9893 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9894 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9895 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9896 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
9897 CALL_EXPR_ARG (arg1, 0), 0))
9899 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9901 if (sinfn != NULL_TREE)
9902 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
9905 /* Optimize x*pow(x,c) as pow(x,c+1). */
9906 if (fcode1 == BUILT_IN_POW
9907 || fcode1 == BUILT_IN_POWF
9908 || fcode1 == BUILT_IN_POWL)
9910 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9911 tree arg11 = CALL_EXPR_ARG (arg1, 1);
9912 if (TREE_CODE (arg11) == REAL_CST
9913 && !TREE_OVERFLOW (arg11)
9914 && operand_equal_p (arg0, arg10, 0))
9916 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
9917 REAL_VALUE_TYPE c;
9918 tree arg;
9920 c = TREE_REAL_CST (arg11);
9921 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9922 arg = build_real (type, c);
9923 return build_call_expr (powfn, 2, arg0, arg);
9927 /* Optimize pow(x,c)*x as pow(x,c+1). */
9928 if (fcode0 == BUILT_IN_POW
9929 || fcode0 == BUILT_IN_POWF
9930 || fcode0 == BUILT_IN_POWL)
9932 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9933 tree arg01 = CALL_EXPR_ARG (arg0, 1);
9934 if (TREE_CODE (arg01) == REAL_CST
9935 && !TREE_OVERFLOW (arg01)
9936 && operand_equal_p (arg1, arg00, 0))
9938 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9939 REAL_VALUE_TYPE c;
9940 tree arg;
9942 c = TREE_REAL_CST (arg01);
9943 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9944 arg = build_real (type, c);
9945 return build_call_expr (powfn, 2, arg1, arg);
9949 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9950 if (! optimize_size
9951 && operand_equal_p (arg0, arg1, 0))
9953 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9955 if (powfn)
9957 tree arg = build_real (type, dconst2);
9958 return build_call_expr (powfn, 2, arg0, arg);
9963 goto associate;
9965 case BIT_IOR_EXPR:
9966 bit_ior:
9967 if (integer_all_onesp (arg1))
9968 return omit_one_operand (type, arg1, arg0);
9969 if (integer_zerop (arg1))
9970 return non_lvalue (fold_convert (type, arg0));
9971 if (operand_equal_p (arg0, arg1, 0))
9972 return non_lvalue (fold_convert (type, arg0));
9974 /* ~X | X is -1. */
9975 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9976 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9978 t1 = build_int_cst_type (type, -1);
9979 return omit_one_operand (type, t1, arg1);
9982 /* X | ~X is -1. */
9983 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9984 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9986 t1 = build_int_cst_type (type, -1);
9987 return omit_one_operand (type, t1, arg0);
9990 /* Canonicalize (X & C1) | C2. */
9991 if (TREE_CODE (arg0) == BIT_AND_EXPR
9992 && TREE_CODE (arg1) == INTEGER_CST
9993 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9995 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9996 int width = TYPE_PRECISION (type);
9997 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9998 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9999 hi2 = TREE_INT_CST_HIGH (arg1);
10000 lo2 = TREE_INT_CST_LOW (arg1);
10002 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10003 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10004 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10006 if (width > HOST_BITS_PER_WIDE_INT)
10008 mhi = (unsigned HOST_WIDE_INT) -1
10009 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10010 mlo = -1;
10012 else
10014 mhi = 0;
10015 mlo = (unsigned HOST_WIDE_INT) -1
10016 >> (HOST_BITS_PER_WIDE_INT - width);
10019 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10020 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10021 return fold_build2 (BIT_IOR_EXPR, type,
10022 TREE_OPERAND (arg0, 0), arg1);
10024 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
10025 hi1 &= mhi;
10026 lo1 &= mlo;
10027 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
10028 return fold_build2 (BIT_IOR_EXPR, type,
10029 fold_build2 (BIT_AND_EXPR, type,
10030 TREE_OPERAND (arg0, 0),
10031 build_int_cst_wide (type,
10032 lo1 & ~lo2,
10033 hi1 & ~hi2)),
10034 arg1);
10037 /* (X & Y) | Y is (X, Y). */
10038 if (TREE_CODE (arg0) == BIT_AND_EXPR
10039 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10040 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10041 /* (X & Y) | X is (Y, X). */
10042 if (TREE_CODE (arg0) == BIT_AND_EXPR
10043 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10044 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10045 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10046 /* X | (X & Y) is (Y, X). */
10047 if (TREE_CODE (arg1) == BIT_AND_EXPR
10048 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10049 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10050 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10051 /* X | (Y & X) is (Y, X). */
10052 if (TREE_CODE (arg1) == BIT_AND_EXPR
10053 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10054 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10055 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10057 t1 = distribute_bit_expr (code, type, arg0, arg1);
10058 if (t1 != NULL_TREE)
10059 return t1;
10061 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10063 This results in more efficient code for machines without a NAND
10064 instruction. Combine will canonicalize to the first form
10065 which will allow use of NAND instructions provided by the
10066 backend if they exist. */
10067 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10068 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10070 return fold_build1 (BIT_NOT_EXPR, type,
10071 build2 (BIT_AND_EXPR, type,
10072 TREE_OPERAND (arg0, 0),
10073 TREE_OPERAND (arg1, 0)));
10076 /* See if this can be simplified into a rotate first. If that
10077 is unsuccessful continue in the association code. */
10078 goto bit_rotate;
10080 case BIT_XOR_EXPR:
10081 if (integer_zerop (arg1))
10082 return non_lvalue (fold_convert (type, arg0));
10083 if (integer_all_onesp (arg1))
10084 return fold_build1 (BIT_NOT_EXPR, type, arg0);
10085 if (operand_equal_p (arg0, arg1, 0))
10086 return omit_one_operand (type, integer_zero_node, arg0);
10088 /* ~X ^ X is -1. */
10089 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10090 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10092 t1 = build_int_cst_type (type, -1);
10093 return omit_one_operand (type, t1, arg1);
10096 /* X ^ ~X is -1. */
10097 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10098 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10100 t1 = build_int_cst_type (type, -1);
10101 return omit_one_operand (type, t1, arg0);
10104 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10105 with a constant, and the two constants have no bits in common,
10106 we should treat this as a BIT_IOR_EXPR since this may produce more
10107 simplifications. */
10108 if (TREE_CODE (arg0) == BIT_AND_EXPR
10109 && TREE_CODE (arg1) == BIT_AND_EXPR
10110 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10111 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10112 && integer_zerop (const_binop (BIT_AND_EXPR,
10113 TREE_OPERAND (arg0, 1),
10114 TREE_OPERAND (arg1, 1), 0)))
10116 code = BIT_IOR_EXPR;
10117 goto bit_ior;
10120 /* (X | Y) ^ X -> Y & ~ X*/
10121 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10122 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10124 tree t2 = TREE_OPERAND (arg0, 1);
10125 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10126 arg1);
10127 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10128 fold_convert (type, t1));
10129 return t1;
10132 /* (Y | X) ^ X -> Y & ~ X*/
10133 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10134 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10136 tree t2 = TREE_OPERAND (arg0, 0);
10137 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10138 arg1);
10139 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10140 fold_convert (type, t1));
10141 return t1;
10144 /* X ^ (X | Y) -> Y & ~ X*/
10145 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10146 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10148 tree t2 = TREE_OPERAND (arg1, 1);
10149 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10150 arg0);
10151 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10152 fold_convert (type, t1));
10153 return t1;
10156 /* X ^ (Y | X) -> Y & ~ X*/
10157 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10158 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10160 tree t2 = TREE_OPERAND (arg1, 0);
10161 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10162 arg0);
10163 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10164 fold_convert (type, t1));
10165 return t1;
10168 /* Convert ~X ^ ~Y to X ^ Y. */
10169 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10170 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10171 return fold_build2 (code, type,
10172 fold_convert (type, TREE_OPERAND (arg0, 0)),
10173 fold_convert (type, TREE_OPERAND (arg1, 0)));
10175 /* Convert ~X ^ C to X ^ ~C. */
10176 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10177 && TREE_CODE (arg1) == INTEGER_CST)
10178 return fold_build2 (code, type,
10179 fold_convert (type, TREE_OPERAND (arg0, 0)),
10180 fold_build1 (BIT_NOT_EXPR, type, arg1));
10182 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10183 if (TREE_CODE (arg0) == BIT_AND_EXPR
10184 && integer_onep (TREE_OPERAND (arg0, 1))
10185 && integer_onep (arg1))
10186 return fold_build2 (EQ_EXPR, type, arg0,
10187 build_int_cst (TREE_TYPE (arg0), 0));
10189 /* Fold (X & Y) ^ Y as ~X & Y. */
10190 if (TREE_CODE (arg0) == BIT_AND_EXPR
10191 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10193 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10194 return fold_build2 (BIT_AND_EXPR, type,
10195 fold_build1 (BIT_NOT_EXPR, type, tem),
10196 fold_convert (type, arg1));
10198 /* Fold (X & Y) ^ X as ~Y & X. */
10199 if (TREE_CODE (arg0) == BIT_AND_EXPR
10200 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10201 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10203 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10204 return fold_build2 (BIT_AND_EXPR, type,
10205 fold_build1 (BIT_NOT_EXPR, type, tem),
10206 fold_convert (type, arg1));
10208 /* Fold X ^ (X & Y) as X & ~Y. */
10209 if (TREE_CODE (arg1) == BIT_AND_EXPR
10210 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10212 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10213 return fold_build2 (BIT_AND_EXPR, type,
10214 fold_convert (type, arg0),
10215 fold_build1 (BIT_NOT_EXPR, type, tem));
10217 /* Fold X ^ (Y & X) as ~Y & X. */
10218 if (TREE_CODE (arg1) == BIT_AND_EXPR
10219 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10220 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10222 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10223 return fold_build2 (BIT_AND_EXPR, type,
10224 fold_build1 (BIT_NOT_EXPR, type, tem),
10225 fold_convert (type, arg0));
10228 /* See if this can be simplified into a rotate first. If that
10229 is unsuccessful continue in the association code. */
10230 goto bit_rotate;
10232 case BIT_AND_EXPR:
10233 if (integer_all_onesp (arg1))
10234 return non_lvalue (fold_convert (type, arg0));
10235 if (integer_zerop (arg1))
10236 return omit_one_operand (type, arg1, arg0);
10237 if (operand_equal_p (arg0, arg1, 0))
10238 return non_lvalue (fold_convert (type, arg0));
10240 /* ~X & X is always zero. */
10241 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10242 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10243 return omit_one_operand (type, integer_zero_node, arg1);
10245 /* X & ~X is always zero. */
10246 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10247 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10248 return omit_one_operand (type, integer_zero_node, arg0);
10250 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10251 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10252 && TREE_CODE (arg1) == INTEGER_CST
10253 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10254 return fold_build2 (BIT_IOR_EXPR, type,
10255 fold_build2 (BIT_AND_EXPR, type,
10256 TREE_OPERAND (arg0, 0), arg1),
10257 fold_build2 (BIT_AND_EXPR, type,
10258 TREE_OPERAND (arg0, 1), arg1));
10260 /* (X | Y) & Y is (X, Y). */
10261 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10262 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10263 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10264 /* (X | Y) & X is (Y, X). */
10265 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10266 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10267 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10268 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10269 /* X & (X | Y) is (Y, X). */
10270 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10271 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10272 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10273 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10274 /* X & (Y | X) is (Y, X). */
10275 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10276 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10277 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10278 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10280 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10281 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10282 && integer_onep (TREE_OPERAND (arg0, 1))
10283 && integer_onep (arg1))
10285 tem = TREE_OPERAND (arg0, 0);
10286 return fold_build2 (EQ_EXPR, type,
10287 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10288 build_int_cst (TREE_TYPE (tem), 1)),
10289 build_int_cst (TREE_TYPE (tem), 0));
10291 /* Fold ~X & 1 as (X & 1) == 0. */
10292 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10293 && integer_onep (arg1))
10295 tem = TREE_OPERAND (arg0, 0);
10296 return fold_build2 (EQ_EXPR, type,
10297 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10298 build_int_cst (TREE_TYPE (tem), 1)),
10299 build_int_cst (TREE_TYPE (tem), 0));
10302 /* Fold (X ^ Y) & Y as ~X & Y. */
10303 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10304 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10306 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10307 return fold_build2 (BIT_AND_EXPR, type,
10308 fold_build1 (BIT_NOT_EXPR, type, tem),
10309 fold_convert (type, arg1));
10311 /* Fold (X ^ Y) & X as ~Y & X. */
10312 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10313 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10314 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10316 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10317 return fold_build2 (BIT_AND_EXPR, type,
10318 fold_build1 (BIT_NOT_EXPR, type, tem),
10319 fold_convert (type, arg1));
10321 /* Fold X & (X ^ Y) as X & ~Y. */
10322 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10323 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10325 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10326 return fold_build2 (BIT_AND_EXPR, type,
10327 fold_convert (type, arg0),
10328 fold_build1 (BIT_NOT_EXPR, type, tem));
10330 /* Fold X & (Y ^ X) as ~Y & X. */
10331 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10332 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10333 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10335 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10336 return fold_build2 (BIT_AND_EXPR, type,
10337 fold_build1 (BIT_NOT_EXPR, type, tem),
10338 fold_convert (type, arg0));
10341 t1 = distribute_bit_expr (code, type, arg0, arg1);
10342 if (t1 != NULL_TREE)
10343 return t1;
10344 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10345 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10346 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10348 unsigned int prec
10349 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10351 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10352 && (~TREE_INT_CST_LOW (arg1)
10353 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10354 return fold_convert (type, TREE_OPERAND (arg0, 0));
10357 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10359 This results in more efficient code for machines without a NOR
10360 instruction. Combine will canonicalize to the first form
10361 which will allow use of NOR instructions provided by the
10362 backend if they exist. */
10363 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10364 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10366 return fold_build1 (BIT_NOT_EXPR, type,
10367 build2 (BIT_IOR_EXPR, type,
10368 TREE_OPERAND (arg0, 0),
10369 TREE_OPERAND (arg1, 0)));
10372 goto associate;
10374 case RDIV_EXPR:
10375 /* Don't touch a floating-point divide by zero unless the mode
10376 of the constant can represent infinity. */
10377 if (TREE_CODE (arg1) == REAL_CST
10378 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10379 && real_zerop (arg1))
10380 return NULL_TREE;
10382 /* Optimize A / A to 1.0 if we don't care about
10383 NaNs or Infinities. Skip the transformation
10384 for non-real operands. */
10385 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10386 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10387 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10388 && operand_equal_p (arg0, arg1, 0))
10390 tree r = build_real (TREE_TYPE (arg0), dconst1);
10392 return omit_two_operands (type, r, arg0, arg1);
10395 /* The complex version of the above A / A optimization. */
10396 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10397 && operand_equal_p (arg0, arg1, 0))
10399 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10400 if (! HONOR_NANS (TYPE_MODE (elem_type))
10401 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10403 tree r = build_real (elem_type, dconst1);
10404 /* omit_two_operands will call fold_convert for us. */
10405 return omit_two_operands (type, r, arg0, arg1);
10409 /* (-A) / (-B) -> A / B */
10410 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10411 return fold_build2 (RDIV_EXPR, type,
10412 TREE_OPERAND (arg0, 0),
10413 negate_expr (arg1));
10414 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10415 return fold_build2 (RDIV_EXPR, type,
10416 negate_expr (arg0),
10417 TREE_OPERAND (arg1, 0));
10419 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10420 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10421 && real_onep (arg1))
10422 return non_lvalue (fold_convert (type, arg0));
10424 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10425 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10426 && real_minus_onep (arg1))
10427 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10429 /* If ARG1 is a constant, we can convert this to a multiply by the
10430 reciprocal. This does not have the same rounding properties,
10431 so only do this if -funsafe-math-optimizations. We can actually
10432 always safely do it if ARG1 is a power of two, but it's hard to
10433 tell if it is or not in a portable manner. */
10434 if (TREE_CODE (arg1) == REAL_CST)
10436 if (flag_unsafe_math_optimizations
10437 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10438 arg1, 0)))
10439 return fold_build2 (MULT_EXPR, type, arg0, tem);
10440 /* Find the reciprocal if optimizing and the result is exact. */
10441 if (optimize)
10443 REAL_VALUE_TYPE r;
10444 r = TREE_REAL_CST (arg1);
10445 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10447 tem = build_real (type, r);
10448 return fold_build2 (MULT_EXPR, type,
10449 fold_convert (type, arg0), tem);
10453 /* Convert A/B/C to A/(B*C). */
10454 if (flag_unsafe_math_optimizations
10455 && TREE_CODE (arg0) == RDIV_EXPR)
10456 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10457 fold_build2 (MULT_EXPR, type,
10458 TREE_OPERAND (arg0, 1), arg1));
10460 /* Convert A/(B/C) to (A/B)*C. */
10461 if (flag_unsafe_math_optimizations
10462 && TREE_CODE (arg1) == RDIV_EXPR)
10463 return fold_build2 (MULT_EXPR, type,
10464 fold_build2 (RDIV_EXPR, type, arg0,
10465 TREE_OPERAND (arg1, 0)),
10466 TREE_OPERAND (arg1, 1));
10468 /* Convert C1/(X*C2) into (C1/C2)/X. */
10469 if (flag_unsafe_math_optimizations
10470 && TREE_CODE (arg1) == MULT_EXPR
10471 && TREE_CODE (arg0) == REAL_CST
10472 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10474 tree tem = const_binop (RDIV_EXPR, arg0,
10475 TREE_OPERAND (arg1, 1), 0);
10476 if (tem)
10477 return fold_build2 (RDIV_EXPR, type, tem,
10478 TREE_OPERAND (arg1, 0));
10481 if (flag_unsafe_math_optimizations)
10483 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10484 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10486 /* Optimize sin(x)/cos(x) as tan(x). */
10487 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10488 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10489 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10490 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10491 CALL_EXPR_ARG (arg1, 0), 0))
10493 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10495 if (tanfn != NULL_TREE)
10496 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10499 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10500 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10501 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10502 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10503 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10504 CALL_EXPR_ARG (arg1, 0), 0))
10506 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10508 if (tanfn != NULL_TREE)
10510 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10511 return fold_build2 (RDIV_EXPR, type,
10512 build_real (type, dconst1), tmp);
10516 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10517 NaNs or Infinities. */
10518 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10519 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10520 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10522 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10523 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10525 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10526 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10527 && operand_equal_p (arg00, arg01, 0))
10529 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10531 if (cosfn != NULL_TREE)
10532 return build_call_expr (cosfn, 1, arg00);
10536 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10537 NaNs or Infinities. */
10538 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10539 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10540 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10542 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10543 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10545 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10546 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10547 && operand_equal_p (arg00, arg01, 0))
10549 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10551 if (cosfn != NULL_TREE)
10553 tree tmp = build_call_expr (cosfn, 1, arg00);
10554 return fold_build2 (RDIV_EXPR, type,
10555 build_real (type, dconst1),
10556 tmp);
10561 /* Optimize pow(x,c)/x as pow(x,c-1). */
10562 if (fcode0 == BUILT_IN_POW
10563 || fcode0 == BUILT_IN_POWF
10564 || fcode0 == BUILT_IN_POWL)
10566 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10567 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10568 if (TREE_CODE (arg01) == REAL_CST
10569 && !TREE_OVERFLOW (arg01)
10570 && operand_equal_p (arg1, arg00, 0))
10572 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10573 REAL_VALUE_TYPE c;
10574 tree arg;
10576 c = TREE_REAL_CST (arg01);
10577 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10578 arg = build_real (type, c);
10579 return build_call_expr (powfn, 2, arg1, arg);
10583 /* Optimize x/expN(y) into x*expN(-y). */
10584 if (BUILTIN_EXPONENT_P (fcode1))
10586 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10587 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
10588 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
10589 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10592 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10593 if (fcode1 == BUILT_IN_POW
10594 || fcode1 == BUILT_IN_POWF
10595 || fcode1 == BUILT_IN_POWL)
10597 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10598 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10599 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10600 tree neg11 = fold_convert (type, negate_expr (arg11));
10601 arg1 = build_call_expr (powfn, 2, arg10, neg11);
10602 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10605 return NULL_TREE;
10607 case TRUNC_DIV_EXPR:
10608 case FLOOR_DIV_EXPR:
10609 /* Simplify A / (B << N) where A and B are positive and B is
10610 a power of 2, to A >> (N + log2(B)). */
10611 strict_overflow_p = false;
10612 if (TREE_CODE (arg1) == LSHIFT_EXPR
10613 && (TYPE_UNSIGNED (type)
10614 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10616 tree sval = TREE_OPERAND (arg1, 0);
10617 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10619 tree sh_cnt = TREE_OPERAND (arg1, 1);
10620 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10622 if (strict_overflow_p)
10623 fold_overflow_warning (("assuming signed overflow does not "
10624 "occur when simplifying A / (B << N)"),
10625 WARN_STRICT_OVERFLOW_MISC);
10627 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10628 sh_cnt, build_int_cst (NULL_TREE, pow2));
10629 return fold_build2 (RSHIFT_EXPR, type,
10630 fold_convert (type, arg0), sh_cnt);
10633 /* Fall thru */
10635 case ROUND_DIV_EXPR:
10636 case CEIL_DIV_EXPR:
10637 case EXACT_DIV_EXPR:
10638 if (integer_onep (arg1))
10639 return non_lvalue (fold_convert (type, arg0));
10640 if (integer_zerop (arg1))
10641 return NULL_TREE;
10642 /* X / -1 is -X. */
10643 if (!TYPE_UNSIGNED (type)
10644 && TREE_CODE (arg1) == INTEGER_CST
10645 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10646 && TREE_INT_CST_HIGH (arg1) == -1)
10647 return fold_convert (type, negate_expr (arg0));
10649 /* Convert -A / -B to A / B when the type is signed and overflow is
10650 undefined. */
10651 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10652 && TREE_CODE (arg0) == NEGATE_EXPR
10653 && negate_expr_p (arg1))
10655 if (INTEGRAL_TYPE_P (type))
10656 fold_overflow_warning (("assuming signed overflow does not occur "
10657 "when distributing negation across "
10658 "division"),
10659 WARN_STRICT_OVERFLOW_MISC);
10660 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10661 negate_expr (arg1));
10663 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10664 && TREE_CODE (arg1) == NEGATE_EXPR
10665 && negate_expr_p (arg0))
10667 if (INTEGRAL_TYPE_P (type))
10668 fold_overflow_warning (("assuming signed overflow does not occur "
10669 "when distributing negation across "
10670 "division"),
10671 WARN_STRICT_OVERFLOW_MISC);
10672 return fold_build2 (code, type, negate_expr (arg0),
10673 TREE_OPERAND (arg1, 0));
10676 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10677 operation, EXACT_DIV_EXPR.
10679 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10680 At one time others generated faster code, it's not clear if they do
10681 after the last round to changes to the DIV code in expmed.c. */
10682 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10683 && multiple_of_p (type, arg0, arg1))
10684 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10686 strict_overflow_p = false;
10687 if (TREE_CODE (arg1) == INTEGER_CST
10688 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10689 &strict_overflow_p)))
10691 if (strict_overflow_p)
10692 fold_overflow_warning (("assuming signed overflow does not occur "
10693 "when simplifying division"),
10694 WARN_STRICT_OVERFLOW_MISC);
10695 return fold_convert (type, tem);
10698 return NULL_TREE;
10700 case CEIL_MOD_EXPR:
10701 case FLOOR_MOD_EXPR:
10702 case ROUND_MOD_EXPR:
10703 case TRUNC_MOD_EXPR:
10704 /* X % 1 is always zero, but be sure to preserve any side
10705 effects in X. */
10706 if (integer_onep (arg1))
10707 return omit_one_operand (type, integer_zero_node, arg0);
10709 /* X % 0, return X % 0 unchanged so that we can get the
10710 proper warnings and errors. */
10711 if (integer_zerop (arg1))
10712 return NULL_TREE;
10714 /* 0 % X is always zero, but be sure to preserve any side
10715 effects in X. Place this after checking for X == 0. */
10716 if (integer_zerop (arg0))
10717 return omit_one_operand (type, integer_zero_node, arg1);
10719 /* X % -1 is zero. */
10720 if (!TYPE_UNSIGNED (type)
10721 && TREE_CODE (arg1) == INTEGER_CST
10722 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10723 && TREE_INT_CST_HIGH (arg1) == -1)
10724 return omit_one_operand (type, integer_zero_node, arg0);
10726 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10727 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10728 strict_overflow_p = false;
10729 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10730 && (TYPE_UNSIGNED (type)
10731 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10733 tree c = arg1;
10734 /* Also optimize A % (C << N) where C is a power of 2,
10735 to A & ((C << N) - 1). */
10736 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10737 c = TREE_OPERAND (arg1, 0);
10739 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10741 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10742 build_int_cst (TREE_TYPE (arg1), 1));
10743 if (strict_overflow_p)
10744 fold_overflow_warning (("assuming signed overflow does not "
10745 "occur when simplifying "
10746 "X % (power of two)"),
10747 WARN_STRICT_OVERFLOW_MISC);
10748 return fold_build2 (BIT_AND_EXPR, type,
10749 fold_convert (type, arg0),
10750 fold_convert (type, mask));
10754 /* X % -C is the same as X % C. */
10755 if (code == TRUNC_MOD_EXPR
10756 && !TYPE_UNSIGNED (type)
10757 && TREE_CODE (arg1) == INTEGER_CST
10758 && !TREE_OVERFLOW (arg1)
10759 && TREE_INT_CST_HIGH (arg1) < 0
10760 && !TYPE_OVERFLOW_TRAPS (type)
10761 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10762 && !sign_bit_p (arg1, arg1))
10763 return fold_build2 (code, type, fold_convert (type, arg0),
10764 fold_convert (type, negate_expr (arg1)));
10766 /* X % -Y is the same as X % Y. */
10767 if (code == TRUNC_MOD_EXPR
10768 && !TYPE_UNSIGNED (type)
10769 && TREE_CODE (arg1) == NEGATE_EXPR
10770 && !TYPE_OVERFLOW_TRAPS (type))
10771 return fold_build2 (code, type, fold_convert (type, arg0),
10772 fold_convert (type, TREE_OPERAND (arg1, 0)));
10774 if (TREE_CODE (arg1) == INTEGER_CST
10775 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10776 &strict_overflow_p)))
10778 if (strict_overflow_p)
10779 fold_overflow_warning (("assuming signed overflow does not occur "
10780 "when simplifying modulos"),
10781 WARN_STRICT_OVERFLOW_MISC);
10782 return fold_convert (type, tem);
10785 return NULL_TREE;
10787 case LROTATE_EXPR:
10788 case RROTATE_EXPR:
10789 if (integer_all_onesp (arg0))
10790 return omit_one_operand (type, arg0, arg1);
10791 goto shift;
10793 case RSHIFT_EXPR:
10794 /* Optimize -1 >> x for arithmetic right shifts. */
10795 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10796 return omit_one_operand (type, arg0, arg1);
10797 /* ... fall through ... */
10799 case LSHIFT_EXPR:
10800 shift:
10801 if (integer_zerop (arg1))
10802 return non_lvalue (fold_convert (type, arg0));
10803 if (integer_zerop (arg0))
10804 return omit_one_operand (type, arg0, arg1);
10806 /* Since negative shift count is not well-defined,
10807 don't try to compute it in the compiler. */
10808 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10809 return NULL_TREE;
10811 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10812 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10813 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10814 && host_integerp (TREE_OPERAND (arg0, 1), false)
10815 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10817 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10818 + TREE_INT_CST_LOW (arg1));
10820 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10821 being well defined. */
10822 if (low >= TYPE_PRECISION (type))
10824 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10825 low = low % TYPE_PRECISION (type);
10826 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10827 return build_int_cst (type, 0);
10828 else
10829 low = TYPE_PRECISION (type) - 1;
10832 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10833 build_int_cst (type, low));
10836 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10837 into x & ((unsigned)-1 >> c) for unsigned types. */
10838 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10839 || (TYPE_UNSIGNED (type)
10840 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10841 && host_integerp (arg1, false)
10842 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10843 && host_integerp (TREE_OPERAND (arg0, 1), false)
10844 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10846 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10847 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10848 tree lshift;
10849 tree arg00;
10851 if (low0 == low1)
10853 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10855 lshift = build_int_cst (type, -1);
10856 lshift = int_const_binop (code, lshift, arg1, 0);
10858 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10862 /* Rewrite an LROTATE_EXPR by a constant into an
10863 RROTATE_EXPR by a new constant. */
10864 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10866 tree tem = build_int_cst (TREE_TYPE (arg1),
10867 GET_MODE_BITSIZE (TYPE_MODE (type)));
10868 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10869 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10872 /* If we have a rotate of a bit operation with the rotate count and
10873 the second operand of the bit operation both constant,
10874 permute the two operations. */
10875 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10876 && (TREE_CODE (arg0) == BIT_AND_EXPR
10877 || TREE_CODE (arg0) == BIT_IOR_EXPR
10878 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10879 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10880 return fold_build2 (TREE_CODE (arg0), type,
10881 fold_build2 (code, type,
10882 TREE_OPERAND (arg0, 0), arg1),
10883 fold_build2 (code, type,
10884 TREE_OPERAND (arg0, 1), arg1));
10886 /* Two consecutive rotates adding up to the width of the mode can
10887 be ignored. */
10888 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10889 && TREE_CODE (arg0) == RROTATE_EXPR
10890 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10891 && TREE_INT_CST_HIGH (arg1) == 0
10892 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10893 && ((TREE_INT_CST_LOW (arg1)
10894 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10895 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10896 return TREE_OPERAND (arg0, 0);
10898 return NULL_TREE;
10900 case MIN_EXPR:
10901 if (operand_equal_p (arg0, arg1, 0))
10902 return omit_one_operand (type, arg0, arg1);
10903 if (INTEGRAL_TYPE_P (type)
10904 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10905 return omit_one_operand (type, arg1, arg0);
10906 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10907 if (tem)
10908 return tem;
10909 goto associate;
10911 case MAX_EXPR:
10912 if (operand_equal_p (arg0, arg1, 0))
10913 return omit_one_operand (type, arg0, arg1);
10914 if (INTEGRAL_TYPE_P (type)
10915 && TYPE_MAX_VALUE (type)
10916 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10917 return omit_one_operand (type, arg1, arg0);
10918 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10919 if (tem)
10920 return tem;
10921 goto associate;
10923 case TRUTH_ANDIF_EXPR:
10924 /* Note that the operands of this must be ints
10925 and their values must be 0 or 1.
10926 ("true" is a fixed value perhaps depending on the language.) */
10927 /* If first arg is constant zero, return it. */
10928 if (integer_zerop (arg0))
10929 return fold_convert (type, arg0);
10930 case TRUTH_AND_EXPR:
10931 /* If either arg is constant true, drop it. */
10932 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10933 return non_lvalue (fold_convert (type, arg1));
10934 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10935 /* Preserve sequence points. */
10936 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10937 return non_lvalue (fold_convert (type, arg0));
10938 /* If second arg is constant zero, result is zero, but first arg
10939 must be evaluated. */
10940 if (integer_zerop (arg1))
10941 return omit_one_operand (type, arg1, arg0);
10942 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10943 case will be handled here. */
10944 if (integer_zerop (arg0))
10945 return omit_one_operand (type, arg0, arg1);
10947 /* !X && X is always false. */
10948 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10949 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10950 return omit_one_operand (type, integer_zero_node, arg1);
10951 /* X && !X is always false. */
10952 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10953 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10954 return omit_one_operand (type, integer_zero_node, arg0);
10956 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10957 means A >= Y && A != MAX, but in this case we know that
10958 A < X <= MAX. */
10960 if (!TREE_SIDE_EFFECTS (arg0)
10961 && !TREE_SIDE_EFFECTS (arg1))
10963 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10964 if (tem && !operand_equal_p (tem, arg0, 0))
10965 return fold_build2 (code, type, tem, arg1);
10967 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10968 if (tem && !operand_equal_p (tem, arg1, 0))
10969 return fold_build2 (code, type, arg0, tem);
10972 truth_andor:
10973 /* We only do these simplifications if we are optimizing. */
10974 if (!optimize)
10975 return NULL_TREE;
10977 /* Check for things like (A || B) && (A || C). We can convert this
10978 to A || (B && C). Note that either operator can be any of the four
10979 truth and/or operations and the transformation will still be
10980 valid. Also note that we only care about order for the
10981 ANDIF and ORIF operators. If B contains side effects, this
10982 might change the truth-value of A. */
10983 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10984 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10985 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10986 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10987 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10988 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10990 tree a00 = TREE_OPERAND (arg0, 0);
10991 tree a01 = TREE_OPERAND (arg0, 1);
10992 tree a10 = TREE_OPERAND (arg1, 0);
10993 tree a11 = TREE_OPERAND (arg1, 1);
10994 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10995 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10996 && (code == TRUTH_AND_EXPR
10997 || code == TRUTH_OR_EXPR));
10999 if (operand_equal_p (a00, a10, 0))
11000 return fold_build2 (TREE_CODE (arg0), type, a00,
11001 fold_build2 (code, type, a01, a11));
11002 else if (commutative && operand_equal_p (a00, a11, 0))
11003 return fold_build2 (TREE_CODE (arg0), type, a00,
11004 fold_build2 (code, type, a01, a10));
11005 else if (commutative && operand_equal_p (a01, a10, 0))
11006 return fold_build2 (TREE_CODE (arg0), type, a01,
11007 fold_build2 (code, type, a00, a11));
11009 /* This case if tricky because we must either have commutative
11010 operators or else A10 must not have side-effects. */
11012 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11013 && operand_equal_p (a01, a11, 0))
11014 return fold_build2 (TREE_CODE (arg0), type,
11015 fold_build2 (code, type, a00, a10),
11016 a01);
11019 /* See if we can build a range comparison. */
11020 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11021 return tem;
11023 /* Check for the possibility of merging component references. If our
11024 lhs is another similar operation, try to merge its rhs with our
11025 rhs. Then try to merge our lhs and rhs. */
11026 if (TREE_CODE (arg0) == code
11027 && 0 != (tem = fold_truthop (code, type,
11028 TREE_OPERAND (arg0, 1), arg1)))
11029 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11031 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11032 return tem;
11034 return NULL_TREE;
11036 case TRUTH_ORIF_EXPR:
11037 /* Note that the operands of this must be ints
11038 and their values must be 0 or true.
11039 ("true" is a fixed value perhaps depending on the language.) */
11040 /* If first arg is constant true, return it. */
11041 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11042 return fold_convert (type, arg0);
11043 case TRUTH_OR_EXPR:
11044 /* If either arg is constant zero, drop it. */
11045 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11046 return non_lvalue (fold_convert (type, arg1));
11047 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11048 /* Preserve sequence points. */
11049 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11050 return non_lvalue (fold_convert (type, arg0));
11051 /* If second arg is constant true, result is true, but we must
11052 evaluate first arg. */
11053 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11054 return omit_one_operand (type, arg1, arg0);
11055 /* Likewise for first arg, but note this only occurs here for
11056 TRUTH_OR_EXPR. */
11057 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11058 return omit_one_operand (type, arg0, arg1);
11060 /* !X || X is always true. */
11061 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11062 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11063 return omit_one_operand (type, integer_one_node, arg1);
11064 /* X || !X is always true. */
11065 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11066 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11067 return omit_one_operand (type, integer_one_node, arg0);
11069 goto truth_andor;
11071 case TRUTH_XOR_EXPR:
11072 /* If the second arg is constant zero, drop it. */
11073 if (integer_zerop (arg1))
11074 return non_lvalue (fold_convert (type, arg0));
11075 /* If the second arg is constant true, this is a logical inversion. */
11076 if (integer_onep (arg1))
11078 /* Only call invert_truthvalue if operand is a truth value. */
11079 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11080 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11081 else
11082 tem = invert_truthvalue (arg0);
11083 return non_lvalue (fold_convert (type, tem));
11085 /* Identical arguments cancel to zero. */
11086 if (operand_equal_p (arg0, arg1, 0))
11087 return omit_one_operand (type, integer_zero_node, arg0);
11089 /* !X ^ X is always true. */
11090 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11091 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11092 return omit_one_operand (type, integer_one_node, arg1);
11094 /* X ^ !X is always true. */
11095 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11096 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11097 return omit_one_operand (type, integer_one_node, arg0);
11099 return NULL_TREE;
11101 case EQ_EXPR:
11102 case NE_EXPR:
11103 tem = fold_comparison (code, type, op0, op1);
11104 if (tem != NULL_TREE)
11105 return tem;
11107 /* bool_var != 0 becomes bool_var. */
11108 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11109 && code == NE_EXPR)
11110 return non_lvalue (fold_convert (type, arg0));
11112 /* bool_var == 1 becomes bool_var. */
11113 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11114 && code == EQ_EXPR)
11115 return non_lvalue (fold_convert (type, arg0));
11117 /* bool_var != 1 becomes !bool_var. */
11118 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11119 && code == NE_EXPR)
11120 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11122 /* bool_var == 0 becomes !bool_var. */
11123 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11124 && code == EQ_EXPR)
11125 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11127 /* If this is an equality comparison of the address of a non-weak
11128 object against zero, then we know the result. */
11129 if (TREE_CODE (arg0) == ADDR_EXPR
11130 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11131 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11132 && integer_zerop (arg1))
11133 return constant_boolean_node (code != EQ_EXPR, type);
11135 /* If this is an equality comparison of the address of two non-weak,
11136 unaliased symbols neither of which are extern (since we do not
11137 have access to attributes for externs), then we know the result. */
11138 if (TREE_CODE (arg0) == ADDR_EXPR
11139 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11140 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11141 && ! lookup_attribute ("alias",
11142 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11143 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11144 && TREE_CODE (arg1) == ADDR_EXPR
11145 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11146 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11147 && ! lookup_attribute ("alias",
11148 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11149 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11151 /* We know that we're looking at the address of two
11152 non-weak, unaliased, static _DECL nodes.
11154 It is both wasteful and incorrect to call operand_equal_p
11155 to compare the two ADDR_EXPR nodes. It is wasteful in that
11156 all we need to do is test pointer equality for the arguments
11157 to the two ADDR_EXPR nodes. It is incorrect to use
11158 operand_equal_p as that function is NOT equivalent to a
11159 C equality test. It can in fact return false for two
11160 objects which would test as equal using the C equality
11161 operator. */
11162 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11163 return constant_boolean_node (equal
11164 ? code == EQ_EXPR : code != EQ_EXPR,
11165 type);
11168 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11169 a MINUS_EXPR of a constant, we can convert it into a comparison with
11170 a revised constant as long as no overflow occurs. */
11171 if (TREE_CODE (arg1) == INTEGER_CST
11172 && (TREE_CODE (arg0) == PLUS_EXPR
11173 || TREE_CODE (arg0) == MINUS_EXPR)
11174 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11175 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11176 ? MINUS_EXPR : PLUS_EXPR,
11177 fold_convert (TREE_TYPE (arg0), arg1),
11178 TREE_OPERAND (arg0, 1), 0))
11179 && !TREE_OVERFLOW (tem))
11180 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11182 /* Similarly for a NEGATE_EXPR. */
11183 if (TREE_CODE (arg0) == NEGATE_EXPR
11184 && TREE_CODE (arg1) == INTEGER_CST
11185 && 0 != (tem = negate_expr (arg1))
11186 && TREE_CODE (tem) == INTEGER_CST
11187 && !TREE_OVERFLOW (tem))
11188 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11190 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11191 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11192 && TREE_CODE (arg1) == INTEGER_CST
11193 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11194 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11195 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11196 fold_convert (TREE_TYPE (arg0), arg1),
11197 TREE_OPERAND (arg0, 1)));
11199 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11200 for !=. Don't do this for ordered comparisons due to overflow. */
11201 if (TREE_CODE (arg0) == MINUS_EXPR
11202 && integer_zerop (arg1))
11203 return fold_build2 (code, type,
11204 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11206 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11207 if (TREE_CODE (arg0) == ABS_EXPR
11208 && (integer_zerop (arg1) || real_zerop (arg1)))
11209 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11211 /* If this is an EQ or NE comparison with zero and ARG0 is
11212 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11213 two operations, but the latter can be done in one less insn
11214 on machines that have only two-operand insns or on which a
11215 constant cannot be the first operand. */
11216 if (TREE_CODE (arg0) == BIT_AND_EXPR
11217 && integer_zerop (arg1))
11219 tree arg00 = TREE_OPERAND (arg0, 0);
11220 tree arg01 = TREE_OPERAND (arg0, 1);
11221 if (TREE_CODE (arg00) == LSHIFT_EXPR
11222 && integer_onep (TREE_OPERAND (arg00, 0)))
11223 return
11224 fold_build2 (code, type,
11225 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11226 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11227 arg01, TREE_OPERAND (arg00, 1)),
11228 fold_convert (TREE_TYPE (arg0),
11229 integer_one_node)),
11230 arg1);
11231 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
11232 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
11233 return
11234 fold_build2 (code, type,
11235 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11236 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11237 arg00, TREE_OPERAND (arg01, 1)),
11238 fold_convert (TREE_TYPE (arg0),
11239 integer_one_node)),
11240 arg1);
11243 /* If this is an NE or EQ comparison of zero against the result of a
11244 signed MOD operation whose second operand is a power of 2, make
11245 the MOD operation unsigned since it is simpler and equivalent. */
11246 if (integer_zerop (arg1)
11247 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11248 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11249 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11250 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11251 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11252 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11254 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
11255 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11256 fold_convert (newtype,
11257 TREE_OPERAND (arg0, 0)),
11258 fold_convert (newtype,
11259 TREE_OPERAND (arg0, 1)));
11261 return fold_build2 (code, type, newmod,
11262 fold_convert (newtype, arg1));
11265 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11266 C1 is a valid shift constant, and C2 is a power of two, i.e.
11267 a single bit. */
11268 if (TREE_CODE (arg0) == BIT_AND_EXPR
11269 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11270 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11271 == INTEGER_CST
11272 && integer_pow2p (TREE_OPERAND (arg0, 1))
11273 && integer_zerop (arg1))
11275 tree itype = TREE_TYPE (arg0);
11276 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11277 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11279 /* Check for a valid shift count. */
11280 if (TREE_INT_CST_HIGH (arg001) == 0
11281 && TREE_INT_CST_LOW (arg001) < prec)
11283 tree arg01 = TREE_OPERAND (arg0, 1);
11284 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11285 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11286 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11287 can be rewritten as (X & (C2 << C1)) != 0. */
11288 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11290 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11291 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11292 return fold_build2 (code, type, tem, arg1);
11294 /* Otherwise, for signed (arithmetic) shifts,
11295 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11296 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11297 else if (!TYPE_UNSIGNED (itype))
11298 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11299 arg000, build_int_cst (itype, 0));
11300 /* Otherwise, of unsigned (logical) shifts,
11301 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11302 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11303 else
11304 return omit_one_operand (type,
11305 code == EQ_EXPR ? integer_one_node
11306 : integer_zero_node,
11307 arg000);
11311 /* If this is an NE comparison of zero with an AND of one, remove the
11312 comparison since the AND will give the correct value. */
11313 if (code == NE_EXPR
11314 && integer_zerop (arg1)
11315 && TREE_CODE (arg0) == BIT_AND_EXPR
11316 && integer_onep (TREE_OPERAND (arg0, 1)))
11317 return fold_convert (type, arg0);
11319 /* If we have (A & C) == C where C is a power of 2, convert this into
11320 (A & C) != 0. Similarly for NE_EXPR. */
11321 if (TREE_CODE (arg0) == BIT_AND_EXPR
11322 && integer_pow2p (TREE_OPERAND (arg0, 1))
11323 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11324 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11325 arg0, fold_convert (TREE_TYPE (arg0),
11326 integer_zero_node));
11328 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11329 bit, then fold the expression into A < 0 or A >= 0. */
11330 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11331 if (tem)
11332 return tem;
11334 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11335 Similarly for NE_EXPR. */
11336 if (TREE_CODE (arg0) == BIT_AND_EXPR
11337 && TREE_CODE (arg1) == INTEGER_CST
11338 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11340 tree notc = fold_build1 (BIT_NOT_EXPR,
11341 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11342 TREE_OPERAND (arg0, 1));
11343 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11344 arg1, notc);
11345 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11346 if (integer_nonzerop (dandnotc))
11347 return omit_one_operand (type, rslt, arg0);
11350 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11351 Similarly for NE_EXPR. */
11352 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11353 && TREE_CODE (arg1) == INTEGER_CST
11354 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11356 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11357 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11358 TREE_OPERAND (arg0, 1), notd);
11359 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11360 if (integer_nonzerop (candnotd))
11361 return omit_one_operand (type, rslt, arg0);
11364 /* If this is a comparison of a field, we may be able to simplify it. */
11365 if ((TREE_CODE (arg0) == COMPONENT_REF
11366 || TREE_CODE (arg0) == BIT_FIELD_REF)
11367 /* Handle the constant case even without -O
11368 to make sure the warnings are given. */
11369 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11371 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
11372 if (t1)
11373 return t1;
11376 /* Optimize comparisons of strlen vs zero to a compare of the
11377 first character of the string vs zero. To wit,
11378 strlen(ptr) == 0 => *ptr == 0
11379 strlen(ptr) != 0 => *ptr != 0
11380 Other cases should reduce to one of these two (or a constant)
11381 due to the return value of strlen being unsigned. */
11382 if (TREE_CODE (arg0) == CALL_EXPR
11383 && integer_zerop (arg1))
11385 tree fndecl = get_callee_fndecl (arg0);
11387 if (fndecl
11388 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11389 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11390 && call_expr_nargs (arg0) == 1
11391 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
11393 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
11394 return fold_build2 (code, type, iref,
11395 build_int_cst (TREE_TYPE (iref), 0));
11399 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11400 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11401 if (TREE_CODE (arg0) == RSHIFT_EXPR
11402 && integer_zerop (arg1)
11403 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11405 tree arg00 = TREE_OPERAND (arg0, 0);
11406 tree arg01 = TREE_OPERAND (arg0, 1);
11407 tree itype = TREE_TYPE (arg00);
11408 if (TREE_INT_CST_HIGH (arg01) == 0
11409 && TREE_INT_CST_LOW (arg01)
11410 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
11412 if (TYPE_UNSIGNED (itype))
11414 itype = lang_hooks.types.signed_type (itype);
11415 arg00 = fold_convert (itype, arg00);
11417 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11418 type, arg00, build_int_cst (itype, 0));
11422 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11423 if (integer_zerop (arg1)
11424 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11425 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11426 TREE_OPERAND (arg0, 1));
11428 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11429 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11430 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11431 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11432 build_int_cst (TREE_TYPE (arg1), 0));
11433 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11434 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11435 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11436 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11437 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11438 build_int_cst (TREE_TYPE (arg1), 0));
11440 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11441 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11442 && TREE_CODE (arg1) == INTEGER_CST
11443 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11444 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11445 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11446 TREE_OPERAND (arg0, 1), arg1));
11448 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11449 (X & C) == 0 when C is a single bit. */
11450 if (TREE_CODE (arg0) == BIT_AND_EXPR
11451 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11452 && integer_zerop (arg1)
11453 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11455 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11456 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11457 TREE_OPERAND (arg0, 1));
11458 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11459 type, tem, arg1);
11462 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11463 constant C is a power of two, i.e. a single bit. */
11464 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11465 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11466 && integer_zerop (arg1)
11467 && integer_pow2p (TREE_OPERAND (arg0, 1))
11468 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11469 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11471 tree arg00 = TREE_OPERAND (arg0, 0);
11472 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11473 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11476 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11477 when is C is a power of two, i.e. a single bit. */
11478 if (TREE_CODE (arg0) == BIT_AND_EXPR
11479 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11480 && integer_zerop (arg1)
11481 && integer_pow2p (TREE_OPERAND (arg0, 1))
11482 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11483 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11485 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11486 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11487 arg000, TREE_OPERAND (arg0, 1));
11488 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11489 tem, build_int_cst (TREE_TYPE (tem), 0));
11492 if (integer_zerop (arg1)
11493 && tree_expr_nonzero_p (arg0))
11495 tree res = constant_boolean_node (code==NE_EXPR, type);
11496 return omit_one_operand (type, res, arg0);
11499 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11500 if (TREE_CODE (arg0) == NEGATE_EXPR
11501 && TREE_CODE (arg1) == NEGATE_EXPR)
11502 return fold_build2 (code, type,
11503 TREE_OPERAND (arg0, 0),
11504 TREE_OPERAND (arg1, 0));
11506 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11507 if (TREE_CODE (arg0) == BIT_AND_EXPR
11508 && TREE_CODE (arg1) == BIT_AND_EXPR)
11510 tree arg00 = TREE_OPERAND (arg0, 0);
11511 tree arg01 = TREE_OPERAND (arg0, 1);
11512 tree arg10 = TREE_OPERAND (arg1, 0);
11513 tree arg11 = TREE_OPERAND (arg1, 1);
11514 tree itype = TREE_TYPE (arg0);
11516 if (operand_equal_p (arg01, arg11, 0))
11517 return fold_build2 (code, type,
11518 fold_build2 (BIT_AND_EXPR, itype,
11519 fold_build2 (BIT_XOR_EXPR, itype,
11520 arg00, arg10),
11521 arg01),
11522 build_int_cst (itype, 0));
11524 if (operand_equal_p (arg01, arg10, 0))
11525 return fold_build2 (code, type,
11526 fold_build2 (BIT_AND_EXPR, itype,
11527 fold_build2 (BIT_XOR_EXPR, itype,
11528 arg00, arg11),
11529 arg01),
11530 build_int_cst (itype, 0));
11532 if (operand_equal_p (arg00, arg11, 0))
11533 return fold_build2 (code, type,
11534 fold_build2 (BIT_AND_EXPR, itype,
11535 fold_build2 (BIT_XOR_EXPR, itype,
11536 arg01, arg10),
11537 arg00),
11538 build_int_cst (itype, 0));
11540 if (operand_equal_p (arg00, arg10, 0))
11541 return fold_build2 (code, type,
11542 fold_build2 (BIT_AND_EXPR, itype,
11543 fold_build2 (BIT_XOR_EXPR, itype,
11544 arg01, arg11),
11545 arg00),
11546 build_int_cst (itype, 0));
11549 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11550 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11552 tree arg00 = TREE_OPERAND (arg0, 0);
11553 tree arg01 = TREE_OPERAND (arg0, 1);
11554 tree arg10 = TREE_OPERAND (arg1, 0);
11555 tree arg11 = TREE_OPERAND (arg1, 1);
11556 tree itype = TREE_TYPE (arg0);
11558 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11559 operand_equal_p guarantees no side-effects so we don't need
11560 to use omit_one_operand on Z. */
11561 if (operand_equal_p (arg01, arg11, 0))
11562 return fold_build2 (code, type, arg00, arg10);
11563 if (operand_equal_p (arg01, arg10, 0))
11564 return fold_build2 (code, type, arg00, arg11);
11565 if (operand_equal_p (arg00, arg11, 0))
11566 return fold_build2 (code, type, arg01, arg10);
11567 if (operand_equal_p (arg00, arg10, 0))
11568 return fold_build2 (code, type, arg01, arg11);
11570 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11571 if (TREE_CODE (arg01) == INTEGER_CST
11572 && TREE_CODE (arg11) == INTEGER_CST)
11573 return fold_build2 (code, type,
11574 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11575 fold_build2 (BIT_XOR_EXPR, itype,
11576 arg01, arg11)),
11577 arg10);
11579 return NULL_TREE;
11581 case LT_EXPR:
11582 case GT_EXPR:
11583 case LE_EXPR:
11584 case GE_EXPR:
11585 tem = fold_comparison (code, type, op0, op1);
11586 if (tem != NULL_TREE)
11587 return tem;
11589 /* Transform comparisons of the form X +- C CMP X. */
11590 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11591 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11592 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11593 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11594 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11595 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11597 tree arg01 = TREE_OPERAND (arg0, 1);
11598 enum tree_code code0 = TREE_CODE (arg0);
11599 int is_positive;
11601 if (TREE_CODE (arg01) == REAL_CST)
11602 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11603 else
11604 is_positive = tree_int_cst_sgn (arg01);
11606 /* (X - c) > X becomes false. */
11607 if (code == GT_EXPR
11608 && ((code0 == MINUS_EXPR && is_positive >= 0)
11609 || (code0 == PLUS_EXPR && is_positive <= 0)))
11611 if (TREE_CODE (arg01) == INTEGER_CST
11612 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11613 fold_overflow_warning (("assuming signed overflow does not "
11614 "occur when assuming that (X - c) > X "
11615 "is always false"),
11616 WARN_STRICT_OVERFLOW_ALL);
11617 return constant_boolean_node (0, type);
11620 /* Likewise (X + c) < X becomes false. */
11621 if (code == LT_EXPR
11622 && ((code0 == PLUS_EXPR && is_positive >= 0)
11623 || (code0 == MINUS_EXPR && is_positive <= 0)))
11625 if (TREE_CODE (arg01) == INTEGER_CST
11626 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11627 fold_overflow_warning (("assuming signed overflow does not "
11628 "occur when assuming that "
11629 "(X + c) < X is always false"),
11630 WARN_STRICT_OVERFLOW_ALL);
11631 return constant_boolean_node (0, type);
11634 /* Convert (X - c) <= X to true. */
11635 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11636 && code == LE_EXPR
11637 && ((code0 == MINUS_EXPR && is_positive >= 0)
11638 || (code0 == PLUS_EXPR && is_positive <= 0)))
11640 if (TREE_CODE (arg01) == INTEGER_CST
11641 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11642 fold_overflow_warning (("assuming signed overflow does not "
11643 "occur when assuming that "
11644 "(X - c) <= X is always true"),
11645 WARN_STRICT_OVERFLOW_ALL);
11646 return constant_boolean_node (1, type);
11649 /* Convert (X + c) >= X to true. */
11650 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11651 && code == GE_EXPR
11652 && ((code0 == PLUS_EXPR && is_positive >= 0)
11653 || (code0 == MINUS_EXPR && is_positive <= 0)))
11655 if (TREE_CODE (arg01) == INTEGER_CST
11656 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11657 fold_overflow_warning (("assuming signed overflow does not "
11658 "occur when assuming that "
11659 "(X + c) >= X is always true"),
11660 WARN_STRICT_OVERFLOW_ALL);
11661 return constant_boolean_node (1, type);
11664 if (TREE_CODE (arg01) == INTEGER_CST)
11666 /* Convert X + c > X and X - c < X to true for integers. */
11667 if (code == GT_EXPR
11668 && ((code0 == PLUS_EXPR && is_positive > 0)
11669 || (code0 == MINUS_EXPR && is_positive < 0)))
11671 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11672 fold_overflow_warning (("assuming signed overflow does "
11673 "not occur when assuming that "
11674 "(X + c) > X is always true"),
11675 WARN_STRICT_OVERFLOW_ALL);
11676 return constant_boolean_node (1, type);
11679 if (code == LT_EXPR
11680 && ((code0 == MINUS_EXPR && is_positive > 0)
11681 || (code0 == PLUS_EXPR && is_positive < 0)))
11683 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11684 fold_overflow_warning (("assuming signed overflow does "
11685 "not occur when assuming that "
11686 "(X - c) < X is always true"),
11687 WARN_STRICT_OVERFLOW_ALL);
11688 return constant_boolean_node (1, type);
11691 /* Convert X + c <= X and X - c >= X to false for integers. */
11692 if (code == LE_EXPR
11693 && ((code0 == PLUS_EXPR && is_positive > 0)
11694 || (code0 == MINUS_EXPR && is_positive < 0)))
11696 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11697 fold_overflow_warning (("assuming signed overflow does "
11698 "not occur when assuming that "
11699 "(X + c) <= X is always false"),
11700 WARN_STRICT_OVERFLOW_ALL);
11701 return constant_boolean_node (0, type);
11704 if (code == GE_EXPR
11705 && ((code0 == MINUS_EXPR && is_positive > 0)
11706 || (code0 == PLUS_EXPR && is_positive < 0)))
11708 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11709 fold_overflow_warning (("assuming signed overflow does "
11710 "not occur when assuming that "
11711 "(X - c) >= X is always true"),
11712 WARN_STRICT_OVERFLOW_ALL);
11713 return constant_boolean_node (0, type);
11718 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11719 This transformation affects the cases which are handled in later
11720 optimizations involving comparisons with non-negative constants. */
11721 if (TREE_CODE (arg1) == INTEGER_CST
11722 && TREE_CODE (arg0) != INTEGER_CST
11723 && tree_int_cst_sgn (arg1) > 0)
11725 if (code == GE_EXPR)
11727 arg1 = const_binop (MINUS_EXPR, arg1,
11728 build_int_cst (TREE_TYPE (arg1), 1), 0);
11729 return fold_build2 (GT_EXPR, type, arg0,
11730 fold_convert (TREE_TYPE (arg0), arg1));
11732 if (code == LT_EXPR)
11734 arg1 = const_binop (MINUS_EXPR, arg1,
11735 build_int_cst (TREE_TYPE (arg1), 1), 0);
11736 return fold_build2 (LE_EXPR, type, arg0,
11737 fold_convert (TREE_TYPE (arg0), arg1));
11741 /* Comparisons with the highest or lowest possible integer of
11742 the specified precision will have known values. */
11744 tree arg1_type = TREE_TYPE (arg1);
11745 unsigned int width = TYPE_PRECISION (arg1_type);
11747 if (TREE_CODE (arg1) == INTEGER_CST
11748 && !TREE_OVERFLOW (arg1)
11749 && width <= 2 * HOST_BITS_PER_WIDE_INT
11750 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11752 HOST_WIDE_INT signed_max_hi;
11753 unsigned HOST_WIDE_INT signed_max_lo;
11754 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11756 if (width <= HOST_BITS_PER_WIDE_INT)
11758 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11759 - 1;
11760 signed_max_hi = 0;
11761 max_hi = 0;
11763 if (TYPE_UNSIGNED (arg1_type))
11765 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11766 min_lo = 0;
11767 min_hi = 0;
11769 else
11771 max_lo = signed_max_lo;
11772 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11773 min_hi = -1;
11776 else
11778 width -= HOST_BITS_PER_WIDE_INT;
11779 signed_max_lo = -1;
11780 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11781 - 1;
11782 max_lo = -1;
11783 min_lo = 0;
11785 if (TYPE_UNSIGNED (arg1_type))
11787 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11788 min_hi = 0;
11790 else
11792 max_hi = signed_max_hi;
11793 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11797 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11798 && TREE_INT_CST_LOW (arg1) == max_lo)
11799 switch (code)
11801 case GT_EXPR:
11802 return omit_one_operand (type, integer_zero_node, arg0);
11804 case GE_EXPR:
11805 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11807 case LE_EXPR:
11808 return omit_one_operand (type, integer_one_node, arg0);
11810 case LT_EXPR:
11811 return fold_build2 (NE_EXPR, type, arg0, arg1);
11813 /* The GE_EXPR and LT_EXPR cases above are not normally
11814 reached because of previous transformations. */
11816 default:
11817 break;
11819 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11820 == max_hi
11821 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11822 switch (code)
11824 case GT_EXPR:
11825 arg1 = const_binop (PLUS_EXPR, arg1,
11826 build_int_cst (TREE_TYPE (arg1), 1), 0);
11827 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11828 case LE_EXPR:
11829 arg1 = const_binop (PLUS_EXPR, arg1,
11830 build_int_cst (TREE_TYPE (arg1), 1), 0);
11831 return fold_build2 (NE_EXPR, type, arg0, arg1);
11832 default:
11833 break;
11835 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11836 == min_hi
11837 && TREE_INT_CST_LOW (arg1) == min_lo)
11838 switch (code)
11840 case LT_EXPR:
11841 return omit_one_operand (type, integer_zero_node, arg0);
11843 case LE_EXPR:
11844 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11846 case GE_EXPR:
11847 return omit_one_operand (type, integer_one_node, arg0);
11849 case GT_EXPR:
11850 return fold_build2 (NE_EXPR, type, op0, op1);
11852 default:
11853 break;
11855 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11856 == min_hi
11857 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11858 switch (code)
11860 case GE_EXPR:
11861 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11862 return fold_build2 (NE_EXPR, type, arg0, arg1);
11863 case LT_EXPR:
11864 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11865 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11866 default:
11867 break;
11870 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11871 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11872 && TYPE_UNSIGNED (arg1_type)
11873 /* We will flip the signedness of the comparison operator
11874 associated with the mode of arg1, so the sign bit is
11875 specified by this mode. Check that arg1 is the signed
11876 max associated with this sign bit. */
11877 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
11878 /* signed_type does not work on pointer types. */
11879 && INTEGRAL_TYPE_P (arg1_type))
11881 /* The following case also applies to X < signed_max+1
11882 and X >= signed_max+1 because previous transformations. */
11883 if (code == LE_EXPR || code == GT_EXPR)
11885 tree st0, st1;
11886 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11887 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11888 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11889 type, fold_convert (st0, arg0),
11890 build_int_cst (st1, 0));
11896 /* If we are comparing an ABS_EXPR with a constant, we can
11897 convert all the cases into explicit comparisons, but they may
11898 well not be faster than doing the ABS and one comparison.
11899 But ABS (X) <= C is a range comparison, which becomes a subtraction
11900 and a comparison, and is probably faster. */
11901 if (code == LE_EXPR
11902 && TREE_CODE (arg1) == INTEGER_CST
11903 && TREE_CODE (arg0) == ABS_EXPR
11904 && ! TREE_SIDE_EFFECTS (arg0)
11905 && (0 != (tem = negate_expr (arg1)))
11906 && TREE_CODE (tem) == INTEGER_CST
11907 && !TREE_OVERFLOW (tem))
11908 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11909 build2 (GE_EXPR, type,
11910 TREE_OPERAND (arg0, 0), tem),
11911 build2 (LE_EXPR, type,
11912 TREE_OPERAND (arg0, 0), arg1));
11914 /* Convert ABS_EXPR<x> >= 0 to true. */
11915 strict_overflow_p = false;
11916 if (code == GE_EXPR
11917 && (integer_zerop (arg1)
11918 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11919 && real_zerop (arg1)))
11920 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11922 if (strict_overflow_p)
11923 fold_overflow_warning (("assuming signed overflow does not occur "
11924 "when simplifying comparison of "
11925 "absolute value and zero"),
11926 WARN_STRICT_OVERFLOW_CONDITIONAL);
11927 return omit_one_operand (type, integer_one_node, arg0);
11930 /* Convert ABS_EXPR<x> < 0 to false. */
11931 strict_overflow_p = false;
11932 if (code == LT_EXPR
11933 && (integer_zerop (arg1) || real_zerop (arg1))
11934 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11936 if (strict_overflow_p)
11937 fold_overflow_warning (("assuming signed overflow does not occur "
11938 "when simplifying comparison of "
11939 "absolute value and zero"),
11940 WARN_STRICT_OVERFLOW_CONDITIONAL);
11941 return omit_one_operand (type, integer_zero_node, arg0);
11944 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11945 and similarly for >= into !=. */
11946 if ((code == LT_EXPR || code == GE_EXPR)
11947 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11948 && TREE_CODE (arg1) == LSHIFT_EXPR
11949 && integer_onep (TREE_OPERAND (arg1, 0)))
11950 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11951 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11952 TREE_OPERAND (arg1, 1)),
11953 build_int_cst (TREE_TYPE (arg0), 0));
11955 if ((code == LT_EXPR || code == GE_EXPR)
11956 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11957 && (TREE_CODE (arg1) == NOP_EXPR
11958 || TREE_CODE (arg1) == CONVERT_EXPR)
11959 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11960 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11961 return
11962 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11963 fold_convert (TREE_TYPE (arg0),
11964 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11965 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11966 1))),
11967 build_int_cst (TREE_TYPE (arg0), 0));
11969 return NULL_TREE;
11971 case UNORDERED_EXPR:
11972 case ORDERED_EXPR:
11973 case UNLT_EXPR:
11974 case UNLE_EXPR:
11975 case UNGT_EXPR:
11976 case UNGE_EXPR:
11977 case UNEQ_EXPR:
11978 case LTGT_EXPR:
11979 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11981 t1 = fold_relational_const (code, type, arg0, arg1);
11982 if (t1 != NULL_TREE)
11983 return t1;
11986 /* If the first operand is NaN, the result is constant. */
11987 if (TREE_CODE (arg0) == REAL_CST
11988 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11989 && (code != LTGT_EXPR || ! flag_trapping_math))
11991 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11992 ? integer_zero_node
11993 : integer_one_node;
11994 return omit_one_operand (type, t1, arg1);
11997 /* If the second operand is NaN, the result is constant. */
11998 if (TREE_CODE (arg1) == REAL_CST
11999 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12000 && (code != LTGT_EXPR || ! flag_trapping_math))
12002 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12003 ? integer_zero_node
12004 : integer_one_node;
12005 return omit_one_operand (type, t1, arg0);
12008 /* Simplify unordered comparison of something with itself. */
12009 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12010 && operand_equal_p (arg0, arg1, 0))
12011 return constant_boolean_node (1, type);
12013 if (code == LTGT_EXPR
12014 && !flag_trapping_math
12015 && operand_equal_p (arg0, arg1, 0))
12016 return constant_boolean_node (0, type);
12018 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12020 tree targ0 = strip_float_extensions (arg0);
12021 tree targ1 = strip_float_extensions (arg1);
12022 tree newtype = TREE_TYPE (targ0);
12024 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12025 newtype = TREE_TYPE (targ1);
12027 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12028 return fold_build2 (code, type, fold_convert (newtype, targ0),
12029 fold_convert (newtype, targ1));
12032 return NULL_TREE;
12034 case COMPOUND_EXPR:
12035 /* When pedantic, a compound expression can be neither an lvalue
12036 nor an integer constant expression. */
12037 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12038 return NULL_TREE;
12039 /* Don't let (0, 0) be null pointer constant. */
12040 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12041 : fold_convert (type, arg1);
12042 return pedantic_non_lvalue (tem);
12044 case COMPLEX_EXPR:
12045 if ((TREE_CODE (arg0) == REAL_CST
12046 && TREE_CODE (arg1) == REAL_CST)
12047 || (TREE_CODE (arg0) == INTEGER_CST
12048 && TREE_CODE (arg1) == INTEGER_CST))
12049 return build_complex (type, arg0, arg1);
12050 return NULL_TREE;
12052 case ASSERT_EXPR:
12053 /* An ASSERT_EXPR should never be passed to fold_binary. */
12054 gcc_unreachable ();
12056 default:
12057 return NULL_TREE;
12058 } /* switch (code) */
12061 /* Callback for walk_tree, looking for LABEL_EXPR.
12062 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12063 Do not check the sub-tree of GOTO_EXPR. */
12065 static tree
12066 contains_label_1 (tree *tp,
12067 int *walk_subtrees,
12068 void *data ATTRIBUTE_UNUSED)
12070 switch (TREE_CODE (*tp))
12072 case LABEL_EXPR:
12073 return *tp;
12074 case GOTO_EXPR:
12075 *walk_subtrees = 0;
12076 /* no break */
12077 default:
12078 return NULL_TREE;
12082 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12083 accessible from outside the sub-tree. Returns NULL_TREE if no
12084 addressable label is found. */
12086 static bool
12087 contains_label_p (tree st)
12089 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12092 /* Fold a ternary expression of code CODE and type TYPE with operands
12093 OP0, OP1, and OP2. Return the folded expression if folding is
12094 successful. Otherwise, return NULL_TREE. */
12096 tree
12097 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12099 tree tem;
12100 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12101 enum tree_code_class kind = TREE_CODE_CLASS (code);
12103 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12104 && TREE_CODE_LENGTH (code) == 3);
12106 /* Strip any conversions that don't change the mode. This is safe
12107 for every expression, except for a comparison expression because
12108 its signedness is derived from its operands. So, in the latter
12109 case, only strip conversions that don't change the signedness.
12111 Note that this is done as an internal manipulation within the
12112 constant folder, in order to find the simplest representation of
12113 the arguments so that their form can be studied. In any cases,
12114 the appropriate type conversions should be put back in the tree
12115 that will get out of the constant folder. */
12116 if (op0)
12118 arg0 = op0;
12119 STRIP_NOPS (arg0);
12122 if (op1)
12124 arg1 = op1;
12125 STRIP_NOPS (arg1);
12128 switch (code)
12130 case COMPONENT_REF:
12131 if (TREE_CODE (arg0) == CONSTRUCTOR
12132 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12134 unsigned HOST_WIDE_INT idx;
12135 tree field, value;
12136 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12137 if (field == arg1)
12138 return value;
12140 return NULL_TREE;
12142 case COND_EXPR:
12143 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12144 so all simple results must be passed through pedantic_non_lvalue. */
12145 if (TREE_CODE (arg0) == INTEGER_CST)
12147 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12148 tem = integer_zerop (arg0) ? op2 : op1;
12149 /* Only optimize constant conditions when the selected branch
12150 has the same type as the COND_EXPR. This avoids optimizing
12151 away "c ? x : throw", where the throw has a void type.
12152 Avoid throwing away that operand which contains label. */
12153 if ((!TREE_SIDE_EFFECTS (unused_op)
12154 || !contains_label_p (unused_op))
12155 && (! VOID_TYPE_P (TREE_TYPE (tem))
12156 || VOID_TYPE_P (type)))
12157 return pedantic_non_lvalue (tem);
12158 return NULL_TREE;
12160 if (operand_equal_p (arg1, op2, 0))
12161 return pedantic_omit_one_operand (type, arg1, arg0);
12163 /* If we have A op B ? A : C, we may be able to convert this to a
12164 simpler expression, depending on the operation and the values
12165 of B and C. Signed zeros prevent all of these transformations,
12166 for reasons given above each one.
12168 Also try swapping the arguments and inverting the conditional. */
12169 if (COMPARISON_CLASS_P (arg0)
12170 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12171 arg1, TREE_OPERAND (arg0, 1))
12172 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12174 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12175 if (tem)
12176 return tem;
12179 if (COMPARISON_CLASS_P (arg0)
12180 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12181 op2,
12182 TREE_OPERAND (arg0, 1))
12183 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12185 tem = fold_truth_not_expr (arg0);
12186 if (tem && COMPARISON_CLASS_P (tem))
12188 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12189 if (tem)
12190 return tem;
12194 /* If the second operand is simpler than the third, swap them
12195 since that produces better jump optimization results. */
12196 if (truth_value_p (TREE_CODE (arg0))
12197 && tree_swap_operands_p (op1, op2, false))
12199 /* See if this can be inverted. If it can't, possibly because
12200 it was a floating-point inequality comparison, don't do
12201 anything. */
12202 tem = fold_truth_not_expr (arg0);
12203 if (tem)
12204 return fold_build3 (code, type, tem, op2, op1);
12207 /* Convert A ? 1 : 0 to simply A. */
12208 if (integer_onep (op1)
12209 && integer_zerop (op2)
12210 /* If we try to convert OP0 to our type, the
12211 call to fold will try to move the conversion inside
12212 a COND, which will recurse. In that case, the COND_EXPR
12213 is probably the best choice, so leave it alone. */
12214 && type == TREE_TYPE (arg0))
12215 return pedantic_non_lvalue (arg0);
12217 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12218 over COND_EXPR in cases such as floating point comparisons. */
12219 if (integer_zerop (op1)
12220 && integer_onep (op2)
12221 && truth_value_p (TREE_CODE (arg0)))
12222 return pedantic_non_lvalue (fold_convert (type,
12223 invert_truthvalue (arg0)));
12225 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12226 if (TREE_CODE (arg0) == LT_EXPR
12227 && integer_zerop (TREE_OPERAND (arg0, 1))
12228 && integer_zerop (op2)
12229 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12231 /* sign_bit_p only checks ARG1 bits within A's precision.
12232 If <sign bit of A> has wider type than A, bits outside
12233 of A's precision in <sign bit of A> need to be checked.
12234 If they are all 0, this optimization needs to be done
12235 in unsigned A's type, if they are all 1 in signed A's type,
12236 otherwise this can't be done. */
12237 if (TYPE_PRECISION (TREE_TYPE (tem))
12238 < TYPE_PRECISION (TREE_TYPE (arg1))
12239 && TYPE_PRECISION (TREE_TYPE (tem))
12240 < TYPE_PRECISION (type))
12242 unsigned HOST_WIDE_INT mask_lo;
12243 HOST_WIDE_INT mask_hi;
12244 int inner_width, outer_width;
12245 tree tem_type;
12247 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12248 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12249 if (outer_width > TYPE_PRECISION (type))
12250 outer_width = TYPE_PRECISION (type);
12252 if (outer_width > HOST_BITS_PER_WIDE_INT)
12254 mask_hi = ((unsigned HOST_WIDE_INT) -1
12255 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12256 mask_lo = -1;
12258 else
12260 mask_hi = 0;
12261 mask_lo = ((unsigned HOST_WIDE_INT) -1
12262 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12264 if (inner_width > HOST_BITS_PER_WIDE_INT)
12266 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12267 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12268 mask_lo = 0;
12270 else
12271 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12272 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12274 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12275 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12277 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
12278 tem = fold_convert (tem_type, tem);
12280 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12281 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12283 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
12284 tem = fold_convert (tem_type, tem);
12286 else
12287 tem = NULL;
12290 if (tem)
12291 return fold_convert (type,
12292 fold_build2 (BIT_AND_EXPR,
12293 TREE_TYPE (tem), tem,
12294 fold_convert (TREE_TYPE (tem),
12295 arg1)));
12298 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12299 already handled above. */
12300 if (TREE_CODE (arg0) == BIT_AND_EXPR
12301 && integer_onep (TREE_OPERAND (arg0, 1))
12302 && integer_zerop (op2)
12303 && integer_pow2p (arg1))
12305 tree tem = TREE_OPERAND (arg0, 0);
12306 STRIP_NOPS (tem);
12307 if (TREE_CODE (tem) == RSHIFT_EXPR
12308 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
12309 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
12310 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
12311 return fold_build2 (BIT_AND_EXPR, type,
12312 TREE_OPERAND (tem, 0), arg1);
12315 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12316 is probably obsolete because the first operand should be a
12317 truth value (that's why we have the two cases above), but let's
12318 leave it in until we can confirm this for all front-ends. */
12319 if (integer_zerop (op2)
12320 && TREE_CODE (arg0) == NE_EXPR
12321 && integer_zerop (TREE_OPERAND (arg0, 1))
12322 && integer_pow2p (arg1)
12323 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12324 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12325 arg1, OEP_ONLY_CONST))
12326 return pedantic_non_lvalue (fold_convert (type,
12327 TREE_OPERAND (arg0, 0)));
12329 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12330 if (integer_zerop (op2)
12331 && truth_value_p (TREE_CODE (arg0))
12332 && truth_value_p (TREE_CODE (arg1)))
12333 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12334 fold_convert (type, arg0),
12335 arg1);
12337 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12338 if (integer_onep (op2)
12339 && truth_value_p (TREE_CODE (arg0))
12340 && truth_value_p (TREE_CODE (arg1)))
12342 /* Only perform transformation if ARG0 is easily inverted. */
12343 tem = fold_truth_not_expr (arg0);
12344 if (tem)
12345 return fold_build2 (TRUTH_ORIF_EXPR, type,
12346 fold_convert (type, tem),
12347 arg1);
12350 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12351 if (integer_zerop (arg1)
12352 && truth_value_p (TREE_CODE (arg0))
12353 && truth_value_p (TREE_CODE (op2)))
12355 /* Only perform transformation if ARG0 is easily inverted. */
12356 tem = fold_truth_not_expr (arg0);
12357 if (tem)
12358 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12359 fold_convert (type, tem),
12360 op2);
12363 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12364 if (integer_onep (arg1)
12365 && truth_value_p (TREE_CODE (arg0))
12366 && truth_value_p (TREE_CODE (op2)))
12367 return fold_build2 (TRUTH_ORIF_EXPR, type,
12368 fold_convert (type, arg0),
12369 op2);
12371 return NULL_TREE;
12373 case CALL_EXPR:
12374 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12375 of fold_ternary on them. */
12376 gcc_unreachable ();
12378 case BIT_FIELD_REF:
12379 if (TREE_CODE (arg0) == VECTOR_CST
12380 && type == TREE_TYPE (TREE_TYPE (arg0))
12381 && host_integerp (arg1, 1)
12382 && host_integerp (op2, 1))
12384 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
12385 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
12387 if (width != 0
12388 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
12389 && (idx % width) == 0
12390 && (idx = idx / width)
12391 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
12393 tree elements = TREE_VECTOR_CST_ELTS (arg0);
12394 while (idx-- > 0 && elements)
12395 elements = TREE_CHAIN (elements);
12396 if (elements)
12397 return TREE_VALUE (elements);
12398 else
12399 return fold_convert (type, integer_zero_node);
12402 return NULL_TREE;
12404 default:
12405 return NULL_TREE;
12406 } /* switch (code) */
12409 /* Perform constant folding and related simplification of EXPR.
12410 The related simplifications include x*1 => x, x*0 => 0, etc.,
12411 and application of the associative law.
12412 NOP_EXPR conversions may be removed freely (as long as we
12413 are careful not to change the type of the overall expression).
12414 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12415 but we can constant-fold them if they have constant operands. */
12417 #ifdef ENABLE_FOLD_CHECKING
12418 # define fold(x) fold_1 (x)
12419 static tree fold_1 (tree);
12420 static
12421 #endif
12422 tree
12423 fold (tree expr)
12425 const tree t = expr;
12426 enum tree_code code = TREE_CODE (t);
12427 enum tree_code_class kind = TREE_CODE_CLASS (code);
12428 tree tem;
12430 /* Return right away if a constant. */
12431 if (kind == tcc_constant)
12432 return t;
12434 /* CALL_EXPR-like objects with variable numbers of operands are
12435 treated specially. */
12436 if (kind == tcc_vl_exp)
12438 if (code == CALL_EXPR)
12440 tem = fold_call_expr (expr, false);
12441 return tem ? tem : expr;
12443 return expr;
12446 if (IS_EXPR_CODE_CLASS (kind)
12447 || IS_GIMPLE_STMT_CODE_CLASS (kind))
12449 tree type = TREE_TYPE (t);
12450 tree op0, op1, op2;
12452 switch (TREE_CODE_LENGTH (code))
12454 case 1:
12455 op0 = TREE_OPERAND (t, 0);
12456 tem = fold_unary (code, type, op0);
12457 return tem ? tem : expr;
12458 case 2:
12459 op0 = TREE_OPERAND (t, 0);
12460 op1 = TREE_OPERAND (t, 1);
12461 tem = fold_binary (code, type, op0, op1);
12462 return tem ? tem : expr;
12463 case 3:
12464 op0 = TREE_OPERAND (t, 0);
12465 op1 = TREE_OPERAND (t, 1);
12466 op2 = TREE_OPERAND (t, 2);
12467 tem = fold_ternary (code, type, op0, op1, op2);
12468 return tem ? tem : expr;
12469 default:
12470 break;
12474 switch (code)
12476 case CONST_DECL:
12477 return fold (DECL_INITIAL (t));
12479 default:
12480 return t;
12481 } /* switch (code) */
12484 #ifdef ENABLE_FOLD_CHECKING
12485 #undef fold
12487 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
12488 static void fold_check_failed (tree, tree);
12489 void print_fold_checksum (tree);
12491 /* When --enable-checking=fold, compute a digest of expr before
12492 and after actual fold call to see if fold did not accidentally
12493 change original expr. */
12495 tree
12496 fold (tree expr)
12498 tree ret;
12499 struct md5_ctx ctx;
12500 unsigned char checksum_before[16], checksum_after[16];
12501 htab_t ht;
12503 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12504 md5_init_ctx (&ctx);
12505 fold_checksum_tree (expr, &ctx, ht);
12506 md5_finish_ctx (&ctx, checksum_before);
12507 htab_empty (ht);
12509 ret = fold_1 (expr);
12511 md5_init_ctx (&ctx);
12512 fold_checksum_tree (expr, &ctx, ht);
12513 md5_finish_ctx (&ctx, checksum_after);
12514 htab_delete (ht);
12516 if (memcmp (checksum_before, checksum_after, 16))
12517 fold_check_failed (expr, ret);
12519 return ret;
12522 void
12523 print_fold_checksum (tree expr)
12525 struct md5_ctx ctx;
12526 unsigned char checksum[16], cnt;
12527 htab_t ht;
12529 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12530 md5_init_ctx (&ctx);
12531 fold_checksum_tree (expr, &ctx, ht);
12532 md5_finish_ctx (&ctx, checksum);
12533 htab_delete (ht);
12534 for (cnt = 0; cnt < 16; ++cnt)
12535 fprintf (stderr, "%02x", checksum[cnt]);
12536 putc ('\n', stderr);
12539 static void
12540 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12542 internal_error ("fold check: original tree changed by fold");
12545 static void
12546 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12548 void **slot;
12549 enum tree_code code;
12550 struct tree_function_decl buf;
12551 int i, len;
12553 recursive_label:
12555 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12556 <= sizeof (struct tree_function_decl))
12557 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12558 if (expr == NULL)
12559 return;
12560 slot = htab_find_slot (ht, expr, INSERT);
12561 if (*slot != NULL)
12562 return;
12563 *slot = expr;
12564 code = TREE_CODE (expr);
12565 if (TREE_CODE_CLASS (code) == tcc_declaration
12566 && DECL_ASSEMBLER_NAME_SET_P (expr))
12568 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12569 memcpy ((char *) &buf, expr, tree_size (expr));
12570 expr = (tree) &buf;
12571 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12573 else if (TREE_CODE_CLASS (code) == tcc_type
12574 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12575 || TYPE_CACHED_VALUES_P (expr)
12576 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12578 /* Allow these fields to be modified. */
12579 memcpy ((char *) &buf, expr, tree_size (expr));
12580 expr = (tree) &buf;
12581 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12582 TYPE_POINTER_TO (expr) = NULL;
12583 TYPE_REFERENCE_TO (expr) = NULL;
12584 if (TYPE_CACHED_VALUES_P (expr))
12586 TYPE_CACHED_VALUES_P (expr) = 0;
12587 TYPE_CACHED_VALUES (expr) = NULL;
12590 md5_process_bytes (expr, tree_size (expr), ctx);
12591 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12592 if (TREE_CODE_CLASS (code) != tcc_type
12593 && TREE_CODE_CLASS (code) != tcc_declaration
12594 && code != TREE_LIST)
12595 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12596 switch (TREE_CODE_CLASS (code))
12598 case tcc_constant:
12599 switch (code)
12601 case STRING_CST:
12602 md5_process_bytes (TREE_STRING_POINTER (expr),
12603 TREE_STRING_LENGTH (expr), ctx);
12604 break;
12605 case COMPLEX_CST:
12606 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12607 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12608 break;
12609 case VECTOR_CST:
12610 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12611 break;
12612 default:
12613 break;
12615 break;
12616 case tcc_exceptional:
12617 switch (code)
12619 case TREE_LIST:
12620 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12621 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12622 expr = TREE_CHAIN (expr);
12623 goto recursive_label;
12624 break;
12625 case TREE_VEC:
12626 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12627 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12628 break;
12629 default:
12630 break;
12632 break;
12633 case tcc_expression:
12634 case tcc_reference:
12635 case tcc_comparison:
12636 case tcc_unary:
12637 case tcc_binary:
12638 case tcc_statement:
12639 case tcc_vl_exp:
12640 len = TREE_OPERAND_LENGTH (expr);
12641 for (i = 0; i < len; ++i)
12642 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12643 break;
12644 case tcc_declaration:
12645 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12646 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12647 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12649 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12650 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12651 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12652 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12653 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12655 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12656 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12658 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12660 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12661 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12662 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12664 break;
12665 case tcc_type:
12666 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12667 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12668 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12669 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12670 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12671 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12672 if (INTEGRAL_TYPE_P (expr)
12673 || SCALAR_FLOAT_TYPE_P (expr))
12675 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12676 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12678 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12679 if (TREE_CODE (expr) == RECORD_TYPE
12680 || TREE_CODE (expr) == UNION_TYPE
12681 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12682 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12683 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12684 break;
12685 default:
12686 break;
12690 #endif
12692 /* Fold a unary tree expression with code CODE of type TYPE with an
12693 operand OP0. Return a folded expression if successful. Otherwise,
12694 return a tree expression with code CODE of type TYPE with an
12695 operand OP0. */
12697 tree
12698 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12700 tree tem;
12701 #ifdef ENABLE_FOLD_CHECKING
12702 unsigned char checksum_before[16], checksum_after[16];
12703 struct md5_ctx ctx;
12704 htab_t ht;
12706 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12707 md5_init_ctx (&ctx);
12708 fold_checksum_tree (op0, &ctx, ht);
12709 md5_finish_ctx (&ctx, checksum_before);
12710 htab_empty (ht);
12711 #endif
12713 tem = fold_unary (code, type, op0);
12714 if (!tem)
12715 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12717 #ifdef ENABLE_FOLD_CHECKING
12718 md5_init_ctx (&ctx);
12719 fold_checksum_tree (op0, &ctx, ht);
12720 md5_finish_ctx (&ctx, checksum_after);
12721 htab_delete (ht);
12723 if (memcmp (checksum_before, checksum_after, 16))
12724 fold_check_failed (op0, tem);
12725 #endif
12726 return tem;
12729 /* Fold a binary tree expression with code CODE of type TYPE with
12730 operands OP0 and OP1. Return a folded expression if successful.
12731 Otherwise, return a tree expression with code CODE of type TYPE
12732 with operands OP0 and OP1. */
12734 tree
12735 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12736 MEM_STAT_DECL)
12738 tree tem;
12739 #ifdef ENABLE_FOLD_CHECKING
12740 unsigned char checksum_before_op0[16],
12741 checksum_before_op1[16],
12742 checksum_after_op0[16],
12743 checksum_after_op1[16];
12744 struct md5_ctx ctx;
12745 htab_t ht;
12747 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12748 md5_init_ctx (&ctx);
12749 fold_checksum_tree (op0, &ctx, ht);
12750 md5_finish_ctx (&ctx, checksum_before_op0);
12751 htab_empty (ht);
12753 md5_init_ctx (&ctx);
12754 fold_checksum_tree (op1, &ctx, ht);
12755 md5_finish_ctx (&ctx, checksum_before_op1);
12756 htab_empty (ht);
12757 #endif
12759 tem = fold_binary (code, type, op0, op1);
12760 if (!tem)
12761 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12763 #ifdef ENABLE_FOLD_CHECKING
12764 md5_init_ctx (&ctx);
12765 fold_checksum_tree (op0, &ctx, ht);
12766 md5_finish_ctx (&ctx, checksum_after_op0);
12767 htab_empty (ht);
12769 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12770 fold_check_failed (op0, tem);
12772 md5_init_ctx (&ctx);
12773 fold_checksum_tree (op1, &ctx, ht);
12774 md5_finish_ctx (&ctx, checksum_after_op1);
12775 htab_delete (ht);
12777 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12778 fold_check_failed (op1, tem);
12779 #endif
12780 return tem;
12783 /* Fold a ternary tree expression with code CODE of type TYPE with
12784 operands OP0, OP1, and OP2. Return a folded expression if
12785 successful. Otherwise, return a tree expression with code CODE of
12786 type TYPE with operands OP0, OP1, and OP2. */
12788 tree
12789 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12790 MEM_STAT_DECL)
12792 tree tem;
12793 #ifdef ENABLE_FOLD_CHECKING
12794 unsigned char checksum_before_op0[16],
12795 checksum_before_op1[16],
12796 checksum_before_op2[16],
12797 checksum_after_op0[16],
12798 checksum_after_op1[16],
12799 checksum_after_op2[16];
12800 struct md5_ctx ctx;
12801 htab_t ht;
12803 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12804 md5_init_ctx (&ctx);
12805 fold_checksum_tree (op0, &ctx, ht);
12806 md5_finish_ctx (&ctx, checksum_before_op0);
12807 htab_empty (ht);
12809 md5_init_ctx (&ctx);
12810 fold_checksum_tree (op1, &ctx, ht);
12811 md5_finish_ctx (&ctx, checksum_before_op1);
12812 htab_empty (ht);
12814 md5_init_ctx (&ctx);
12815 fold_checksum_tree (op2, &ctx, ht);
12816 md5_finish_ctx (&ctx, checksum_before_op2);
12817 htab_empty (ht);
12818 #endif
12820 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12821 tem = fold_ternary (code, type, op0, op1, op2);
12822 if (!tem)
12823 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12825 #ifdef ENABLE_FOLD_CHECKING
12826 md5_init_ctx (&ctx);
12827 fold_checksum_tree (op0, &ctx, ht);
12828 md5_finish_ctx (&ctx, checksum_after_op0);
12829 htab_empty (ht);
12831 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12832 fold_check_failed (op0, tem);
12834 md5_init_ctx (&ctx);
12835 fold_checksum_tree (op1, &ctx, ht);
12836 md5_finish_ctx (&ctx, checksum_after_op1);
12837 htab_empty (ht);
12839 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12840 fold_check_failed (op1, tem);
12842 md5_init_ctx (&ctx);
12843 fold_checksum_tree (op2, &ctx, ht);
12844 md5_finish_ctx (&ctx, checksum_after_op2);
12845 htab_delete (ht);
12847 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12848 fold_check_failed (op2, tem);
12849 #endif
12850 return tem;
12853 /* Fold a CALL_EXPR expression of type TYPE with operands FN and ARGLIST
12854 and a null static chain.
12855 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12856 of type TYPE from the given operands as constructed by build_call_list. */
12858 tree
12859 fold_build_call_list (tree type, tree fn, tree arglist)
12861 tree tem;
12862 #ifdef ENABLE_FOLD_CHECKING
12863 unsigned char checksum_before_fn[16],
12864 checksum_before_arglist[16],
12865 checksum_after_fn[16],
12866 checksum_after_arglist[16];
12867 struct md5_ctx ctx;
12868 htab_t ht;
12870 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12871 md5_init_ctx (&ctx);
12872 fold_checksum_tree (fn, &ctx, ht);
12873 md5_finish_ctx (&ctx, checksum_before_fn);
12874 htab_empty (ht);
12876 md5_init_ctx (&ctx);
12877 fold_checksum_tree (arglist, &ctx, ht);
12878 md5_finish_ctx (&ctx, checksum_before_arglist);
12879 htab_empty (ht);
12880 #endif
12882 tem = fold_builtin_call_list (type, fn, arglist);
12884 #ifdef ENABLE_FOLD_CHECKING
12885 md5_init_ctx (&ctx);
12886 fold_checksum_tree (fn, &ctx, ht);
12887 md5_finish_ctx (&ctx, checksum_after_fn);
12888 htab_empty (ht);
12890 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12891 fold_check_failed (fn, tem);
12893 md5_init_ctx (&ctx);
12894 fold_checksum_tree (arglist, &ctx, ht);
12895 md5_finish_ctx (&ctx, checksum_after_arglist);
12896 htab_delete (ht);
12898 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12899 fold_check_failed (arglist, tem);
12900 #endif
12901 return tem;
12904 /* Perform constant folding and related simplification of initializer
12905 expression EXPR. These behave identically to "fold_buildN" but ignore
12906 potential run-time traps and exceptions that fold must preserve. */
12908 #define START_FOLD_INIT \
12909 int saved_signaling_nans = flag_signaling_nans;\
12910 int saved_trapping_math = flag_trapping_math;\
12911 int saved_rounding_math = flag_rounding_math;\
12912 int saved_trapv = flag_trapv;\
12913 int saved_folding_initializer = folding_initializer;\
12914 flag_signaling_nans = 0;\
12915 flag_trapping_math = 0;\
12916 flag_rounding_math = 0;\
12917 flag_trapv = 0;\
12918 folding_initializer = 1;
12920 #define END_FOLD_INIT \
12921 flag_signaling_nans = saved_signaling_nans;\
12922 flag_trapping_math = saved_trapping_math;\
12923 flag_rounding_math = saved_rounding_math;\
12924 flag_trapv = saved_trapv;\
12925 folding_initializer = saved_folding_initializer;
12927 tree
12928 fold_build1_initializer (enum tree_code code, tree type, tree op)
12930 tree result;
12931 START_FOLD_INIT;
12933 result = fold_build1 (code, type, op);
12935 END_FOLD_INIT;
12936 return result;
12939 tree
12940 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12942 tree result;
12943 START_FOLD_INIT;
12945 result = fold_build2 (code, type, op0, op1);
12947 END_FOLD_INIT;
12948 return result;
12951 tree
12952 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12953 tree op2)
12955 tree result;
12956 START_FOLD_INIT;
12958 result = fold_build3 (code, type, op0, op1, op2);
12960 END_FOLD_INIT;
12961 return result;
12964 tree
12965 fold_build_call_list_initializer (tree type, tree fn, tree arglist)
12967 tree result;
12968 START_FOLD_INIT;
12970 result = fold_build_call_list (type, fn, arglist);
12972 END_FOLD_INIT;
12973 return result;
12976 #undef START_FOLD_INIT
12977 #undef END_FOLD_INIT
12979 /* Determine if first argument is a multiple of second argument. Return 0 if
12980 it is not, or we cannot easily determined it to be.
12982 An example of the sort of thing we care about (at this point; this routine
12983 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12984 fold cases do now) is discovering that
12986 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12988 is a multiple of
12990 SAVE_EXPR (J * 8)
12992 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12994 This code also handles discovering that
12996 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12998 is a multiple of 8 so we don't have to worry about dealing with a
12999 possible remainder.
13001 Note that we *look* inside a SAVE_EXPR only to determine how it was
13002 calculated; it is not safe for fold to do much of anything else with the
13003 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13004 at run time. For example, the latter example above *cannot* be implemented
13005 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13006 evaluation time of the original SAVE_EXPR is not necessarily the same at
13007 the time the new expression is evaluated. The only optimization of this
13008 sort that would be valid is changing
13010 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13012 divided by 8 to
13014 SAVE_EXPR (I) * SAVE_EXPR (J)
13016 (where the same SAVE_EXPR (J) is used in the original and the
13017 transformed version). */
13019 static int
13020 multiple_of_p (tree type, tree top, tree bottom)
13022 if (operand_equal_p (top, bottom, 0))
13023 return 1;
13025 if (TREE_CODE (type) != INTEGER_TYPE)
13026 return 0;
13028 switch (TREE_CODE (top))
13030 case BIT_AND_EXPR:
13031 /* Bitwise and provides a power of two multiple. If the mask is
13032 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13033 if (!integer_pow2p (bottom))
13034 return 0;
13035 /* FALLTHRU */
13037 case MULT_EXPR:
13038 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13039 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13041 case PLUS_EXPR:
13042 case MINUS_EXPR:
13043 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13044 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13046 case LSHIFT_EXPR:
13047 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13049 tree op1, t1;
13051 op1 = TREE_OPERAND (top, 1);
13052 /* const_binop may not detect overflow correctly,
13053 so check for it explicitly here. */
13054 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13055 > TREE_INT_CST_LOW (op1)
13056 && TREE_INT_CST_HIGH (op1) == 0
13057 && 0 != (t1 = fold_convert (type,
13058 const_binop (LSHIFT_EXPR,
13059 size_one_node,
13060 op1, 0)))
13061 && !TREE_OVERFLOW (t1))
13062 return multiple_of_p (type, t1, bottom);
13064 return 0;
13066 case NOP_EXPR:
13067 /* Can't handle conversions from non-integral or wider integral type. */
13068 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13069 || (TYPE_PRECISION (type)
13070 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13071 return 0;
13073 /* .. fall through ... */
13075 case SAVE_EXPR:
13076 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13078 case INTEGER_CST:
13079 if (TREE_CODE (bottom) != INTEGER_CST
13080 || (TYPE_UNSIGNED (type)
13081 && (tree_int_cst_sgn (top) < 0
13082 || tree_int_cst_sgn (bottom) < 0)))
13083 return 0;
13084 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13085 top, bottom, 0));
13087 default:
13088 return 0;
13092 /* Return true if `t' is known to be non-negative. If the return
13093 value is based on the assumption that signed overflow is undefined,
13094 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13095 *STRICT_OVERFLOW_P. */
13097 bool
13098 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13100 if (t == error_mark_node)
13101 return false;
13103 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13104 return true;
13106 switch (TREE_CODE (t))
13108 case SSA_NAME:
13109 /* Query VRP to see if it has recorded any information about
13110 the range of this object. */
13111 return ssa_name_nonnegative_p (t);
13113 case ABS_EXPR:
13114 /* We can't return 1 if flag_wrapv is set because
13115 ABS_EXPR<INT_MIN> = INT_MIN. */
13116 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13117 return true;
13118 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
13120 *strict_overflow_p = true;
13121 return true;
13123 break;
13125 case INTEGER_CST:
13126 return tree_int_cst_sgn (t) >= 0;
13128 case REAL_CST:
13129 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13131 case PLUS_EXPR:
13132 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13133 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13134 strict_overflow_p)
13135 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13136 strict_overflow_p));
13138 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13139 both unsigned and at least 2 bits shorter than the result. */
13140 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13141 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13142 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13144 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13145 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13146 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13147 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13149 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13150 TYPE_PRECISION (inner2)) + 1;
13151 return prec < TYPE_PRECISION (TREE_TYPE (t));
13154 break;
13156 case MULT_EXPR:
13157 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13159 /* x * x for floating point x is always non-negative. */
13160 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
13161 return true;
13162 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13163 strict_overflow_p)
13164 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13165 strict_overflow_p));
13168 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13169 both unsigned and their total bits is shorter than the result. */
13170 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13171 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13172 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13174 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13175 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13176 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13177 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13178 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
13179 < TYPE_PRECISION (TREE_TYPE (t));
13181 return false;
13183 case BIT_AND_EXPR:
13184 case MAX_EXPR:
13185 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13186 strict_overflow_p)
13187 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13188 strict_overflow_p));
13190 case BIT_IOR_EXPR:
13191 case BIT_XOR_EXPR:
13192 case MIN_EXPR:
13193 case RDIV_EXPR:
13194 case TRUNC_DIV_EXPR:
13195 case CEIL_DIV_EXPR:
13196 case FLOOR_DIV_EXPR:
13197 case ROUND_DIV_EXPR:
13198 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13199 strict_overflow_p)
13200 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13201 strict_overflow_p));
13203 case TRUNC_MOD_EXPR:
13204 case CEIL_MOD_EXPR:
13205 case FLOOR_MOD_EXPR:
13206 case ROUND_MOD_EXPR:
13207 case SAVE_EXPR:
13208 case NON_LVALUE_EXPR:
13209 case FLOAT_EXPR:
13210 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13211 strict_overflow_p);
13213 case COMPOUND_EXPR:
13214 case MODIFY_EXPR:
13215 case GIMPLE_MODIFY_STMT:
13216 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13217 strict_overflow_p);
13219 case BIND_EXPR:
13220 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
13221 strict_overflow_p);
13223 case COND_EXPR:
13224 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13225 strict_overflow_p)
13226 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
13227 strict_overflow_p));
13229 case NOP_EXPR:
13231 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13232 tree outer_type = TREE_TYPE (t);
13234 if (TREE_CODE (outer_type) == REAL_TYPE)
13236 if (TREE_CODE (inner_type) == REAL_TYPE)
13237 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13238 strict_overflow_p);
13239 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13241 if (TYPE_UNSIGNED (inner_type))
13242 return true;
13243 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13244 strict_overflow_p);
13247 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13249 if (TREE_CODE (inner_type) == REAL_TYPE)
13250 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
13251 strict_overflow_p);
13252 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13253 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13254 && TYPE_UNSIGNED (inner_type);
13257 break;
13259 case TARGET_EXPR:
13261 tree temp = TARGET_EXPR_SLOT (t);
13262 t = TARGET_EXPR_INITIAL (t);
13264 /* If the initializer is non-void, then it's a normal expression
13265 that will be assigned to the slot. */
13266 if (!VOID_TYPE_P (t))
13267 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
13269 /* Otherwise, the initializer sets the slot in some way. One common
13270 way is an assignment statement at the end of the initializer. */
13271 while (1)
13273 if (TREE_CODE (t) == BIND_EXPR)
13274 t = expr_last (BIND_EXPR_BODY (t));
13275 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13276 || TREE_CODE (t) == TRY_CATCH_EXPR)
13277 t = expr_last (TREE_OPERAND (t, 0));
13278 else if (TREE_CODE (t) == STATEMENT_LIST)
13279 t = expr_last (t);
13280 else
13281 break;
13283 if ((TREE_CODE (t) == MODIFY_EXPR
13284 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
13285 && GENERIC_TREE_OPERAND (t, 0) == temp)
13286 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13287 strict_overflow_p);
13289 return false;
13292 case CALL_EXPR:
13294 tree fndecl = get_callee_fndecl (t);
13295 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13296 switch (DECL_FUNCTION_CODE (fndecl))
13298 CASE_FLT_FN (BUILT_IN_ACOS):
13299 CASE_FLT_FN (BUILT_IN_ACOSH):
13300 CASE_FLT_FN (BUILT_IN_CABS):
13301 CASE_FLT_FN (BUILT_IN_COSH):
13302 CASE_FLT_FN (BUILT_IN_ERFC):
13303 CASE_FLT_FN (BUILT_IN_EXP):
13304 CASE_FLT_FN (BUILT_IN_EXP10):
13305 CASE_FLT_FN (BUILT_IN_EXP2):
13306 CASE_FLT_FN (BUILT_IN_FABS):
13307 CASE_FLT_FN (BUILT_IN_FDIM):
13308 CASE_FLT_FN (BUILT_IN_HYPOT):
13309 CASE_FLT_FN (BUILT_IN_POW10):
13310 CASE_INT_FN (BUILT_IN_FFS):
13311 CASE_INT_FN (BUILT_IN_PARITY):
13312 CASE_INT_FN (BUILT_IN_POPCOUNT):
13313 case BUILT_IN_BSWAP32:
13314 case BUILT_IN_BSWAP64:
13315 /* Always true. */
13316 return true;
13318 CASE_FLT_FN (BUILT_IN_SQRT):
13319 /* sqrt(-0.0) is -0.0. */
13320 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
13321 return true;
13322 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13323 strict_overflow_p);
13325 CASE_FLT_FN (BUILT_IN_ASINH):
13326 CASE_FLT_FN (BUILT_IN_ATAN):
13327 CASE_FLT_FN (BUILT_IN_ATANH):
13328 CASE_FLT_FN (BUILT_IN_CBRT):
13329 CASE_FLT_FN (BUILT_IN_CEIL):
13330 CASE_FLT_FN (BUILT_IN_ERF):
13331 CASE_FLT_FN (BUILT_IN_EXPM1):
13332 CASE_FLT_FN (BUILT_IN_FLOOR):
13333 CASE_FLT_FN (BUILT_IN_FMOD):
13334 CASE_FLT_FN (BUILT_IN_FREXP):
13335 CASE_FLT_FN (BUILT_IN_LCEIL):
13336 CASE_FLT_FN (BUILT_IN_LDEXP):
13337 CASE_FLT_FN (BUILT_IN_LFLOOR):
13338 CASE_FLT_FN (BUILT_IN_LLCEIL):
13339 CASE_FLT_FN (BUILT_IN_LLFLOOR):
13340 CASE_FLT_FN (BUILT_IN_LLRINT):
13341 CASE_FLT_FN (BUILT_IN_LLROUND):
13342 CASE_FLT_FN (BUILT_IN_LRINT):
13343 CASE_FLT_FN (BUILT_IN_LROUND):
13344 CASE_FLT_FN (BUILT_IN_MODF):
13345 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13346 CASE_FLT_FN (BUILT_IN_RINT):
13347 CASE_FLT_FN (BUILT_IN_ROUND):
13348 CASE_FLT_FN (BUILT_IN_SIGNBIT):
13349 CASE_FLT_FN (BUILT_IN_SINH):
13350 CASE_FLT_FN (BUILT_IN_TANH):
13351 CASE_FLT_FN (BUILT_IN_TRUNC):
13352 /* True if the 1st argument is nonnegative. */
13353 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13354 strict_overflow_p);
13356 CASE_FLT_FN (BUILT_IN_FMAX):
13357 /* True if the 1st OR 2nd arguments are nonnegative. */
13358 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13359 strict_overflow_p)
13360 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13361 strict_overflow_p)));
13363 CASE_FLT_FN (BUILT_IN_FMIN):
13364 /* True if the 1st AND 2nd arguments are nonnegative. */
13365 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13366 strict_overflow_p)
13367 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13368 strict_overflow_p)));
13370 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13371 /* True if the 2nd argument is nonnegative. */
13372 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13373 strict_overflow_p);
13375 CASE_FLT_FN (BUILT_IN_POWI):
13376 /* True if the 1st argument is nonnegative or the second
13377 argument is an even integer. */
13378 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST)
13380 tree arg1 = CALL_EXPR_ARG (t, 1);
13381 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
13382 return true;
13384 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13385 strict_overflow_p);
13387 CASE_FLT_FN (BUILT_IN_POW):
13388 /* True if the 1st argument is nonnegative or the second
13389 argument is an even integer valued real. */
13390 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST)
13392 REAL_VALUE_TYPE c;
13393 HOST_WIDE_INT n;
13395 c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1));
13396 n = real_to_integer (&c);
13397 if ((n & 1) == 0)
13399 REAL_VALUE_TYPE cint;
13400 real_from_integer (&cint, VOIDmode, n,
13401 n < 0 ? -1 : 0, 0);
13402 if (real_identical (&c, &cint))
13403 return true;
13406 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13407 strict_overflow_p);
13409 default:
13410 break;
13414 /* ... fall through ... */
13416 default:
13417 if (truth_value_p (TREE_CODE (t)))
13418 /* Truth values evaluate to 0 or 1, which is nonnegative. */
13419 return true;
13422 /* We don't know sign of `t', so be conservative and return false. */
13423 return false;
13426 /* Return true if `t' is known to be non-negative. Handle warnings
13427 about undefined signed overflow. */
13429 bool
13430 tree_expr_nonnegative_p (tree t)
13432 bool ret, strict_overflow_p;
13434 strict_overflow_p = false;
13435 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13436 if (strict_overflow_p)
13437 fold_overflow_warning (("assuming signed overflow does not occur when "
13438 "determining that expression is always "
13439 "non-negative"),
13440 WARN_STRICT_OVERFLOW_MISC);
13441 return ret;
13444 /* Return true when T is an address and is known to be nonzero.
13445 For floating point we further ensure that T is not denormal.
13446 Similar logic is present in nonzero_address in rtlanal.h.
13448 If the return value is based on the assumption that signed overflow
13449 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13450 change *STRICT_OVERFLOW_P. */
13452 bool
13453 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13455 tree type = TREE_TYPE (t);
13456 bool sub_strict_overflow_p;
13458 /* Doing something useful for floating point would need more work. */
13459 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
13460 return false;
13462 switch (TREE_CODE (t))
13464 case SSA_NAME:
13465 /* Query VRP to see if it has recorded any information about
13466 the range of this object. */
13467 return ssa_name_nonzero_p (t);
13469 case ABS_EXPR:
13470 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13471 strict_overflow_p);
13473 case INTEGER_CST:
13474 return !integer_zerop (t);
13476 case PLUS_EXPR:
13477 if (TYPE_OVERFLOW_UNDEFINED (type))
13479 /* With the presence of negative values it is hard
13480 to say something. */
13481 sub_strict_overflow_p = false;
13482 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13483 &sub_strict_overflow_p)
13484 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13485 &sub_strict_overflow_p))
13486 return false;
13487 /* One of operands must be positive and the other non-negative. */
13488 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13489 overflows, on a twos-complement machine the sum of two
13490 nonnegative numbers can never be zero. */
13491 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13492 strict_overflow_p)
13493 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13494 strict_overflow_p));
13496 break;
13498 case MULT_EXPR:
13499 if (TYPE_OVERFLOW_UNDEFINED (type))
13501 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13502 strict_overflow_p)
13503 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13504 strict_overflow_p))
13506 *strict_overflow_p = true;
13507 return true;
13510 break;
13512 case NOP_EXPR:
13514 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13515 tree outer_type = TREE_TYPE (t);
13517 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13518 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13519 strict_overflow_p));
13521 break;
13523 case ADDR_EXPR:
13525 tree base = get_base_address (TREE_OPERAND (t, 0));
13527 if (!base)
13528 return false;
13530 /* Weak declarations may link to NULL. */
13531 if (VAR_OR_FUNCTION_DECL_P (base))
13532 return !DECL_WEAK (base);
13534 /* Constants are never weak. */
13535 if (CONSTANT_CLASS_P (base))
13536 return true;
13538 return false;
13541 case COND_EXPR:
13542 sub_strict_overflow_p = false;
13543 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13544 &sub_strict_overflow_p)
13545 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13546 &sub_strict_overflow_p))
13548 if (sub_strict_overflow_p)
13549 *strict_overflow_p = true;
13550 return true;
13552 break;
13554 case MIN_EXPR:
13555 sub_strict_overflow_p = false;
13556 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13557 &sub_strict_overflow_p)
13558 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13559 &sub_strict_overflow_p))
13561 if (sub_strict_overflow_p)
13562 *strict_overflow_p = true;
13564 break;
13566 case MAX_EXPR:
13567 sub_strict_overflow_p = false;
13568 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13569 &sub_strict_overflow_p))
13571 if (sub_strict_overflow_p)
13572 *strict_overflow_p = true;
13574 /* When both operands are nonzero, then MAX must be too. */
13575 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13576 strict_overflow_p))
13577 return true;
13579 /* MAX where operand 0 is positive is positive. */
13580 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13581 strict_overflow_p);
13583 /* MAX where operand 1 is positive is positive. */
13584 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13585 &sub_strict_overflow_p)
13586 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13587 &sub_strict_overflow_p))
13589 if (sub_strict_overflow_p)
13590 *strict_overflow_p = true;
13591 return true;
13593 break;
13595 case COMPOUND_EXPR:
13596 case MODIFY_EXPR:
13597 case GIMPLE_MODIFY_STMT:
13598 case BIND_EXPR:
13599 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13600 strict_overflow_p);
13602 case SAVE_EXPR:
13603 case NON_LVALUE_EXPR:
13604 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13605 strict_overflow_p);
13607 case BIT_IOR_EXPR:
13608 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13609 strict_overflow_p)
13610 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13611 strict_overflow_p));
13613 case CALL_EXPR:
13614 return alloca_call_p (t);
13616 default:
13617 break;
13619 return false;
13622 /* Return true when T is an address and is known to be nonzero.
13623 Handle warnings about undefined signed overflow. */
13625 bool
13626 tree_expr_nonzero_p (tree t)
13628 bool ret, strict_overflow_p;
13630 strict_overflow_p = false;
13631 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
13632 if (strict_overflow_p)
13633 fold_overflow_warning (("assuming signed overflow does not occur when "
13634 "determining that expression is always "
13635 "non-zero"),
13636 WARN_STRICT_OVERFLOW_MISC);
13637 return ret;
13640 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13641 attempt to fold the expression to a constant without modifying TYPE,
13642 OP0 or OP1.
13644 If the expression could be simplified to a constant, then return
13645 the constant. If the expression would not be simplified to a
13646 constant, then return NULL_TREE. */
13648 tree
13649 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13651 tree tem = fold_binary (code, type, op0, op1);
13652 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13655 /* Given the components of a unary expression CODE, TYPE and OP0,
13656 attempt to fold the expression to a constant without modifying
13657 TYPE or OP0.
13659 If the expression could be simplified to a constant, then return
13660 the constant. If the expression would not be simplified to a
13661 constant, then return NULL_TREE. */
13663 tree
13664 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13666 tree tem = fold_unary (code, type, op0);
13667 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13670 /* If EXP represents referencing an element in a constant string
13671 (either via pointer arithmetic or array indexing), return the
13672 tree representing the value accessed, otherwise return NULL. */
13674 tree
13675 fold_read_from_constant_string (tree exp)
13677 if ((TREE_CODE (exp) == INDIRECT_REF
13678 || TREE_CODE (exp) == ARRAY_REF)
13679 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13681 tree exp1 = TREE_OPERAND (exp, 0);
13682 tree index;
13683 tree string;
13685 if (TREE_CODE (exp) == INDIRECT_REF)
13686 string = string_constant (exp1, &index);
13687 else
13689 tree low_bound = array_ref_low_bound (exp);
13690 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
13692 /* Optimize the special-case of a zero lower bound.
13694 We convert the low_bound to sizetype to avoid some problems
13695 with constant folding. (E.g. suppose the lower bound is 1,
13696 and its mode is QI. Without the conversion,l (ARRAY
13697 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13698 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13699 if (! integer_zerop (low_bound))
13700 index = size_diffop (index, fold_convert (sizetype, low_bound));
13702 string = exp1;
13705 if (string
13706 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13707 && TREE_CODE (string) == STRING_CST
13708 && TREE_CODE (index) == INTEGER_CST
13709 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13710 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13711 == MODE_INT)
13712 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13713 return fold_convert (TREE_TYPE (exp),
13714 build_int_cst (NULL_TREE,
13715 (TREE_STRING_POINTER (string)
13716 [TREE_INT_CST_LOW (index)])));
13718 return NULL;
13721 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13722 an integer constant or real constant.
13724 TYPE is the type of the result. */
13726 static tree
13727 fold_negate_const (tree arg0, tree type)
13729 tree t = NULL_TREE;
13731 switch (TREE_CODE (arg0))
13733 case INTEGER_CST:
13735 unsigned HOST_WIDE_INT low;
13736 HOST_WIDE_INT high;
13737 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13738 TREE_INT_CST_HIGH (arg0),
13739 &low, &high);
13740 t = force_fit_type_double (type, low, high, 1,
13741 (overflow | TREE_OVERFLOW (arg0))
13742 && !TYPE_UNSIGNED (type));
13743 break;
13746 case REAL_CST:
13747 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13748 break;
13750 default:
13751 gcc_unreachable ();
13754 return t;
13757 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13758 an integer constant or real constant.
13760 TYPE is the type of the result. */
13762 tree
13763 fold_abs_const (tree arg0, tree type)
13765 tree t = NULL_TREE;
13767 switch (TREE_CODE (arg0))
13769 case INTEGER_CST:
13770 /* If the value is unsigned, then the absolute value is
13771 the same as the ordinary value. */
13772 if (TYPE_UNSIGNED (type))
13773 t = arg0;
13774 /* Similarly, if the value is non-negative. */
13775 else if (INT_CST_LT (integer_minus_one_node, arg0))
13776 t = arg0;
13777 /* If the value is negative, then the absolute value is
13778 its negation. */
13779 else
13781 unsigned HOST_WIDE_INT low;
13782 HOST_WIDE_INT high;
13783 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13784 TREE_INT_CST_HIGH (arg0),
13785 &low, &high);
13786 t = force_fit_type_double (type, low, high, -1,
13787 overflow | TREE_OVERFLOW (arg0));
13789 break;
13791 case REAL_CST:
13792 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13793 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13794 else
13795 t = arg0;
13796 break;
13798 default:
13799 gcc_unreachable ();
13802 return t;
13805 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13806 constant. TYPE is the type of the result. */
13808 static tree
13809 fold_not_const (tree arg0, tree type)
13811 tree t = NULL_TREE;
13813 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13815 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
13816 ~TREE_INT_CST_HIGH (arg0), 0,
13817 TREE_OVERFLOW (arg0));
13819 return t;
13822 /* Given CODE, a relational operator, the target type, TYPE and two
13823 constant operands OP0 and OP1, return the result of the
13824 relational operation. If the result is not a compile time
13825 constant, then return NULL_TREE. */
13827 static tree
13828 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13830 int result, invert;
13832 /* From here on, the only cases we handle are when the result is
13833 known to be a constant. */
13835 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13837 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13838 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13840 /* Handle the cases where either operand is a NaN. */
13841 if (real_isnan (c0) || real_isnan (c1))
13843 switch (code)
13845 case EQ_EXPR:
13846 case ORDERED_EXPR:
13847 result = 0;
13848 break;
13850 case NE_EXPR:
13851 case UNORDERED_EXPR:
13852 case UNLT_EXPR:
13853 case UNLE_EXPR:
13854 case UNGT_EXPR:
13855 case UNGE_EXPR:
13856 case UNEQ_EXPR:
13857 result = 1;
13858 break;
13860 case LT_EXPR:
13861 case LE_EXPR:
13862 case GT_EXPR:
13863 case GE_EXPR:
13864 case LTGT_EXPR:
13865 if (flag_trapping_math)
13866 return NULL_TREE;
13867 result = 0;
13868 break;
13870 default:
13871 gcc_unreachable ();
13874 return constant_boolean_node (result, type);
13877 return constant_boolean_node (real_compare (code, c0, c1), type);
13880 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13882 To compute GT, swap the arguments and do LT.
13883 To compute GE, do LT and invert the result.
13884 To compute LE, swap the arguments, do LT and invert the result.
13885 To compute NE, do EQ and invert the result.
13887 Therefore, the code below must handle only EQ and LT. */
13889 if (code == LE_EXPR || code == GT_EXPR)
13891 tree tem = op0;
13892 op0 = op1;
13893 op1 = tem;
13894 code = swap_tree_comparison (code);
13897 /* Note that it is safe to invert for real values here because we
13898 have already handled the one case that it matters. */
13900 invert = 0;
13901 if (code == NE_EXPR || code == GE_EXPR)
13903 invert = 1;
13904 code = invert_tree_comparison (code, false);
13907 /* Compute a result for LT or EQ if args permit;
13908 Otherwise return T. */
13909 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13911 if (code == EQ_EXPR)
13912 result = tree_int_cst_equal (op0, op1);
13913 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13914 result = INT_CST_LT_UNSIGNED (op0, op1);
13915 else
13916 result = INT_CST_LT (op0, op1);
13918 else
13919 return NULL_TREE;
13921 if (invert)
13922 result ^= 1;
13923 return constant_boolean_node (result, type);
13926 /* Build an expression for the a clean point containing EXPR with type TYPE.
13927 Don't build a cleanup point expression for EXPR which don't have side
13928 effects. */
13930 tree
13931 fold_build_cleanup_point_expr (tree type, tree expr)
13933 /* If the expression does not have side effects then we don't have to wrap
13934 it with a cleanup point expression. */
13935 if (!TREE_SIDE_EFFECTS (expr))
13936 return expr;
13938 /* If the expression is a return, check to see if the expression inside the
13939 return has no side effects or the right hand side of the modify expression
13940 inside the return. If either don't have side effects set we don't need to
13941 wrap the expression in a cleanup point expression. Note we don't check the
13942 left hand side of the modify because it should always be a return decl. */
13943 if (TREE_CODE (expr) == RETURN_EXPR)
13945 tree op = TREE_OPERAND (expr, 0);
13946 if (!op || !TREE_SIDE_EFFECTS (op))
13947 return expr;
13948 op = TREE_OPERAND (op, 1);
13949 if (!TREE_SIDE_EFFECTS (op))
13950 return expr;
13953 return build1 (CLEANUP_POINT_EXPR, type, expr);
13956 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13957 avoid confusing the gimplify process. */
13959 tree
13960 build_fold_addr_expr_with_type (tree t, tree ptrtype)
13962 /* The size of the object is not relevant when talking about its address. */
13963 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13964 t = TREE_OPERAND (t, 0);
13966 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13967 if (TREE_CODE (t) == INDIRECT_REF
13968 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13970 t = TREE_OPERAND (t, 0);
13971 if (TREE_TYPE (t) != ptrtype)
13972 t = build1 (NOP_EXPR, ptrtype, t);
13974 else
13976 tree base = t;
13978 while (handled_component_p (base))
13979 base = TREE_OPERAND (base, 0);
13980 if (DECL_P (base))
13981 TREE_ADDRESSABLE (base) = 1;
13983 t = build1 (ADDR_EXPR, ptrtype, t);
13986 return t;
13989 tree
13990 build_fold_addr_expr (tree t)
13992 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13995 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13996 of an indirection through OP0, or NULL_TREE if no simplification is
13997 possible. */
13999 tree
14000 fold_indirect_ref_1 (tree type, tree op0)
14002 tree sub = op0;
14003 tree subtype;
14005 STRIP_NOPS (sub);
14006 subtype = TREE_TYPE (sub);
14007 if (!POINTER_TYPE_P (subtype))
14008 return NULL_TREE;
14010 if (TREE_CODE (sub) == ADDR_EXPR)
14012 tree op = TREE_OPERAND (sub, 0);
14013 tree optype = TREE_TYPE (op);
14014 /* *&CONST_DECL -> to the value of the const decl. */
14015 if (TREE_CODE (op) == CONST_DECL)
14016 return DECL_INITIAL (op);
14017 /* *&p => p; make sure to handle *&"str"[cst] here. */
14018 if (type == optype)
14020 tree fop = fold_read_from_constant_string (op);
14021 if (fop)
14022 return fop;
14023 else
14024 return op;
14026 /* *(foo *)&fooarray => fooarray[0] */
14027 else if (TREE_CODE (optype) == ARRAY_TYPE
14028 && type == TREE_TYPE (optype))
14030 tree type_domain = TYPE_DOMAIN (optype);
14031 tree min_val = size_zero_node;
14032 if (type_domain && TYPE_MIN_VALUE (type_domain))
14033 min_val = TYPE_MIN_VALUE (type_domain);
14034 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
14036 /* *(foo *)&complexfoo => __real__ complexfoo */
14037 else if (TREE_CODE (optype) == COMPLEX_TYPE
14038 && type == TREE_TYPE (optype))
14039 return fold_build1 (REALPART_EXPR, type, op);
14040 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14041 else if (TREE_CODE (optype) == VECTOR_TYPE
14042 && type == TREE_TYPE (optype))
14044 tree part_width = TYPE_SIZE (type);
14045 tree index = bitsize_int (0);
14046 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
14050 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14051 if (TREE_CODE (sub) == PLUS_EXPR
14052 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14054 tree op00 = TREE_OPERAND (sub, 0);
14055 tree op01 = TREE_OPERAND (sub, 1);
14056 tree op00type;
14058 STRIP_NOPS (op00);
14059 op00type = TREE_TYPE (op00);
14060 if (TREE_CODE (op00) == ADDR_EXPR
14061 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
14062 && type == TREE_TYPE (TREE_TYPE (op00type)))
14064 tree size = TYPE_SIZE_UNIT (type);
14065 if (tree_int_cst_equal (size, op01))
14066 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
14070 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14071 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14072 && type == TREE_TYPE (TREE_TYPE (subtype)))
14074 tree type_domain;
14075 tree min_val = size_zero_node;
14076 sub = build_fold_indirect_ref (sub);
14077 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14078 if (type_domain && TYPE_MIN_VALUE (type_domain))
14079 min_val = TYPE_MIN_VALUE (type_domain);
14080 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
14083 return NULL_TREE;
14086 /* Builds an expression for an indirection through T, simplifying some
14087 cases. */
14089 tree
14090 build_fold_indirect_ref (tree t)
14092 tree type = TREE_TYPE (TREE_TYPE (t));
14093 tree sub = fold_indirect_ref_1 (type, t);
14095 if (sub)
14096 return sub;
14097 else
14098 return build1 (INDIRECT_REF, type, t);
14101 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14103 tree
14104 fold_indirect_ref (tree t)
14106 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
14108 if (sub)
14109 return sub;
14110 else
14111 return t;
14114 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14115 whose result is ignored. The type of the returned tree need not be
14116 the same as the original expression. */
14118 tree
14119 fold_ignored_result (tree t)
14121 if (!TREE_SIDE_EFFECTS (t))
14122 return integer_zero_node;
14124 for (;;)
14125 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14127 case tcc_unary:
14128 t = TREE_OPERAND (t, 0);
14129 break;
14131 case tcc_binary:
14132 case tcc_comparison:
14133 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14134 t = TREE_OPERAND (t, 0);
14135 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14136 t = TREE_OPERAND (t, 1);
14137 else
14138 return t;
14139 break;
14141 case tcc_expression:
14142 switch (TREE_CODE (t))
14144 case COMPOUND_EXPR:
14145 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14146 return t;
14147 t = TREE_OPERAND (t, 0);
14148 break;
14150 case COND_EXPR:
14151 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14152 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14153 return t;
14154 t = TREE_OPERAND (t, 0);
14155 break;
14157 default:
14158 return t;
14160 break;
14162 default:
14163 return t;
14167 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
14168 This can only be applied to objects of a sizetype. */
14170 tree
14171 round_up (tree value, int divisor)
14173 tree div = NULL_TREE;
14175 gcc_assert (divisor > 0);
14176 if (divisor == 1)
14177 return value;
14179 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14180 have to do anything. Only do this when we are not given a const,
14181 because in that case, this check is more expensive than just
14182 doing it. */
14183 if (TREE_CODE (value) != INTEGER_CST)
14185 div = build_int_cst (TREE_TYPE (value), divisor);
14187 if (multiple_of_p (TREE_TYPE (value), value, div))
14188 return value;
14191 /* If divisor is a power of two, simplify this to bit manipulation. */
14192 if (divisor == (divisor & -divisor))
14194 if (TREE_CODE (value) == INTEGER_CST)
14196 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
14197 unsigned HOST_WIDE_INT high;
14198 bool overflow_p;
14200 if ((low & (divisor - 1)) == 0)
14201 return value;
14203 overflow_p = TREE_OVERFLOW (value);
14204 high = TREE_INT_CST_HIGH (value);
14205 low &= ~(divisor - 1);
14206 low += divisor;
14207 if (low == 0)
14209 high++;
14210 if (high == 0)
14211 overflow_p = true;
14214 return force_fit_type_double (TREE_TYPE (value), low, high,
14215 -1, overflow_p);
14217 else
14219 tree t;
14221 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14222 value = size_binop (PLUS_EXPR, value, t);
14223 t = build_int_cst (TREE_TYPE (value), -divisor);
14224 value = size_binop (BIT_AND_EXPR, value, t);
14227 else
14229 if (!div)
14230 div = build_int_cst (TREE_TYPE (value), divisor);
14231 value = size_binop (CEIL_DIV_EXPR, value, div);
14232 value = size_binop (MULT_EXPR, value, div);
14235 return value;
14238 /* Likewise, but round down. */
14240 tree
14241 round_down (tree value, int divisor)
14243 tree div = NULL_TREE;
14245 gcc_assert (divisor > 0);
14246 if (divisor == 1)
14247 return value;
14249 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14250 have to do anything. Only do this when we are not given a const,
14251 because in that case, this check is more expensive than just
14252 doing it. */
14253 if (TREE_CODE (value) != INTEGER_CST)
14255 div = build_int_cst (TREE_TYPE (value), divisor);
14257 if (multiple_of_p (TREE_TYPE (value), value, div))
14258 return value;
14261 /* If divisor is a power of two, simplify this to bit manipulation. */
14262 if (divisor == (divisor & -divisor))
14264 tree t;
14266 t = build_int_cst (TREE_TYPE (value), -divisor);
14267 value = size_binop (BIT_AND_EXPR, value, t);
14269 else
14271 if (!div)
14272 div = build_int_cst (TREE_TYPE (value), divisor);
14273 value = size_binop (FLOOR_DIV_EXPR, value, div);
14274 value = size_binop (MULT_EXPR, value, div);
14277 return value;
14280 /* Returns the pointer to the base of the object addressed by EXP and
14281 extracts the information about the offset of the access, storing it
14282 to PBITPOS and POFFSET. */
14284 static tree
14285 split_address_to_core_and_offset (tree exp,
14286 HOST_WIDE_INT *pbitpos, tree *poffset)
14288 tree core;
14289 enum machine_mode mode;
14290 int unsignedp, volatilep;
14291 HOST_WIDE_INT bitsize;
14293 if (TREE_CODE (exp) == ADDR_EXPR)
14295 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14296 poffset, &mode, &unsignedp, &volatilep,
14297 false);
14298 core = build_fold_addr_expr (core);
14300 else
14302 core = exp;
14303 *pbitpos = 0;
14304 *poffset = NULL_TREE;
14307 return core;
14310 /* Returns true if addresses of E1 and E2 differ by a constant, false
14311 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14313 bool
14314 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14316 tree core1, core2;
14317 HOST_WIDE_INT bitpos1, bitpos2;
14318 tree toffset1, toffset2, tdiff, type;
14320 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14321 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14323 if (bitpos1 % BITS_PER_UNIT != 0
14324 || bitpos2 % BITS_PER_UNIT != 0
14325 || !operand_equal_p (core1, core2, 0))
14326 return false;
14328 if (toffset1 && toffset2)
14330 type = TREE_TYPE (toffset1);
14331 if (type != TREE_TYPE (toffset2))
14332 toffset2 = fold_convert (type, toffset2);
14334 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14335 if (!cst_and_fits_in_hwi (tdiff))
14336 return false;
14338 *diff = int_cst_value (tdiff);
14340 else if (toffset1 || toffset2)
14342 /* If only one of the offsets is non-constant, the difference cannot
14343 be a constant. */
14344 return false;
14346 else
14347 *diff = 0;
14349 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14350 return true;
14353 /* Simplify the floating point expression EXP when the sign of the
14354 result is not significant. Return NULL_TREE if no simplification
14355 is possible. */
14357 tree
14358 fold_strip_sign_ops (tree exp)
14360 tree arg0, arg1;
14362 switch (TREE_CODE (exp))
14364 case ABS_EXPR:
14365 case NEGATE_EXPR:
14366 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14367 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
14369 case MULT_EXPR:
14370 case RDIV_EXPR:
14371 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
14372 return NULL_TREE;
14373 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14374 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14375 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
14376 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
14377 arg0 ? arg0 : TREE_OPERAND (exp, 0),
14378 arg1 ? arg1 : TREE_OPERAND (exp, 1));
14379 break;
14381 case COMPOUND_EXPR:
14382 arg0 = TREE_OPERAND (exp, 0);
14383 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14384 if (arg1)
14385 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
14386 break;
14388 case COND_EXPR:
14389 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14390 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
14391 if (arg0 || arg1)
14392 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
14393 arg0 ? arg0 : TREE_OPERAND (exp, 1),
14394 arg1 ? arg1 : TREE_OPERAND (exp, 2));
14395 break;
14397 case CALL_EXPR:
14399 const enum built_in_function fcode = builtin_mathfn_code (exp);
14400 switch (fcode)
14402 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14403 /* Strip copysign function call, return the 1st argument. */
14404 arg0 = CALL_EXPR_ARG (exp, 0);
14405 arg1 = CALL_EXPR_ARG (exp, 1);
14406 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
14408 default:
14409 /* Strip sign ops from the argument of "odd" math functions. */
14410 if (negate_mathfn_p (fcode))
14412 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
14413 if (arg0)
14414 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
14416 break;
14419 break;
14421 default:
14422 break;
14424 return NULL_TREE;