2009-07-17 Richard Guenther <rguenther@suse.de>
[official-gcc.git] / gcc / fold-const.c
blob7bf48060cd145f53e272005239b6606b98e505a1
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "toplev.h"
63 #include "intl.h"
64 #include "ggc.h"
65 #include "hashtab.h"
66 #include "langhooks.h"
67 #include "md5.h"
68 #include "gimple.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
71 otherwise. */
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
78 COMPCODE_FALSE = 0,
79 COMPCODE_LT = 1,
80 COMPCODE_EQ = 2,
81 COMPCODE_LE = 3,
82 COMPCODE_GT = 4,
83 COMPCODE_LTGT = 5,
84 COMPCODE_GE = 6,
85 COMPCODE_ORD = 7,
86 COMPCODE_UNORD = 8,
87 COMPCODE_UNLT = 9,
88 COMPCODE_UNEQ = 10,
89 COMPCODE_UNLE = 11,
90 COMPCODE_UNGT = 12,
91 COMPCODE_NE = 13,
92 COMPCODE_UNGE = 14,
93 COMPCODE_TRUE = 15
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand (tree, tree, tree);
110 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (tree, tree, HOST_WIDE_INT, HOST_WIDE_INT, int);
112 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
113 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
115 tree *, tree *);
116 static int all_ones_mask_p (const_tree, int);
117 static tree sign_bit_p (tree, const_tree);
118 static int simple_operand_p (const_tree);
119 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
120 static tree range_predecessor (tree);
121 static tree range_successor (tree);
122 extern tree make_range (tree, int *, tree *, tree *, bool *);
123 extern tree build_range_check (tree, tree, int, tree, tree);
124 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
125 tree, tree);
126 static tree fold_range_test (enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree fold_truthop (enum tree_code, tree, tree, tree);
130 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
134 tree, tree,
135 tree, tree, int);
136 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
137 tree, tree, tree);
138 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
139 static tree fold_div_compare (enum tree_code, tree, tree, tree);
140 static bool reorder_operands_p (const_tree, const_tree);
141 static tree fold_negate_const (tree, tree);
142 static tree fold_not_const (tree, tree);
143 static tree fold_relational_const (enum tree_code, tree, tree, tree);
144 static tree fold_convert_const (enum tree_code, tree, tree);
147 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
148 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
149 and SUM1. Then this yields nonzero if overflow occurred during the
150 addition.
152 Overflow occurs if A and B have the same sign, but A and SUM differ in
153 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
154 sign. */
155 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
157 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
158 We do that by representing the two-word integer in 4 words, with only
159 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
160 number. The value of the word is LOWPART + HIGHPART * BASE. */
162 #define LOWPART(x) \
163 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
164 #define HIGHPART(x) \
165 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
166 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
168 /* Unpack a two-word integer into 4 words.
169 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
170 WORDS points to the array of HOST_WIDE_INTs. */
172 static void
173 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
175 words[0] = LOWPART (low);
176 words[1] = HIGHPART (low);
177 words[2] = LOWPART (hi);
178 words[3] = HIGHPART (hi);
181 /* Pack an array of 4 words into a two-word integer.
182 WORDS points to the array of words.
183 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
185 static void
186 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
187 HOST_WIDE_INT *hi)
189 *low = words[0] + words[1] * BASE;
190 *hi = words[2] + words[3] * BASE;
193 /* Force the double-word integer L1, H1 to be within the range of the
194 integer type TYPE. Stores the properly truncated and sign-extended
195 double-word integer in *LV, *HV. Returns true if the operation
196 overflows, that is, argument and result are different. */
199 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
200 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
202 unsigned HOST_WIDE_INT low0 = l1;
203 HOST_WIDE_INT high0 = h1;
204 unsigned int prec;
205 int sign_extended_type;
207 if (POINTER_TYPE_P (type)
208 || TREE_CODE (type) == OFFSET_TYPE)
209 prec = POINTER_SIZE;
210 else
211 prec = TYPE_PRECISION (type);
213 /* Size types *are* sign extended. */
214 sign_extended_type = (!TYPE_UNSIGNED (type)
215 || (TREE_CODE (type) == INTEGER_TYPE
216 && TYPE_IS_SIZETYPE (type)));
218 /* First clear all bits that are beyond the type's precision. */
219 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
221 else if (prec > HOST_BITS_PER_WIDE_INT)
222 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
223 else
225 h1 = 0;
226 if (prec < HOST_BITS_PER_WIDE_INT)
227 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
230 /* Then do sign extension if necessary. */
231 if (!sign_extended_type)
232 /* No sign extension */;
233 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
234 /* Correct width already. */;
235 else if (prec > HOST_BITS_PER_WIDE_INT)
237 /* Sign extend top half? */
238 if (h1 & ((unsigned HOST_WIDE_INT)1
239 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
240 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
242 else if (prec == HOST_BITS_PER_WIDE_INT)
244 if ((HOST_WIDE_INT)l1 < 0)
245 h1 = -1;
247 else
249 /* Sign extend bottom half? */
250 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
252 h1 = -1;
253 l1 |= (HOST_WIDE_INT)(-1) << prec;
257 *lv = l1;
258 *hv = h1;
260 /* If the value didn't fit, signal overflow. */
261 return l1 != low0 || h1 != high0;
264 /* We force the double-int HIGH:LOW to the range of the type TYPE by
265 sign or zero extending it.
266 OVERFLOWABLE indicates if we are interested
267 in overflow of the value, when >0 we are only interested in signed
268 overflow, for <0 we are interested in any overflow. OVERFLOWED
269 indicates whether overflow has already occurred. CONST_OVERFLOWED
270 indicates whether constant overflow has already occurred. We force
271 T's value to be within range of T's type (by setting to 0 or 1 all
272 the bits outside the type's range). We set TREE_OVERFLOWED if,
273 OVERFLOWED is nonzero,
274 or OVERFLOWABLE is >0 and signed overflow occurs
275 or OVERFLOWABLE is <0 and any overflow occurs
276 We return a new tree node for the extended double-int. The node
277 is shared if no overflow flags are set. */
279 tree
280 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
281 HOST_WIDE_INT high, int overflowable,
282 bool overflowed)
284 int sign_extended_type;
285 bool overflow;
287 /* Size types *are* sign extended. */
288 sign_extended_type = (!TYPE_UNSIGNED (type)
289 || (TREE_CODE (type) == INTEGER_TYPE
290 && TYPE_IS_SIZETYPE (type)));
292 overflow = fit_double_type (low, high, &low, &high, type);
294 /* If we need to set overflow flags, return a new unshared node. */
295 if (overflowed || overflow)
297 if (overflowed
298 || overflowable < 0
299 || (overflowable > 0 && sign_extended_type))
301 tree t = make_node (INTEGER_CST);
302 TREE_INT_CST_LOW (t) = low;
303 TREE_INT_CST_HIGH (t) = high;
304 TREE_TYPE (t) = type;
305 TREE_OVERFLOW (t) = 1;
306 return t;
310 /* Else build a shared node. */
311 return build_int_cst_wide (type, low, high);
314 /* Add two doubleword integers with doubleword result.
315 Return nonzero if the operation overflows according to UNSIGNED_P.
316 Each argument is given as two `HOST_WIDE_INT' pieces.
317 One argument is L1 and H1; the other, L2 and H2.
318 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
321 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
322 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
323 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
324 bool unsigned_p)
326 unsigned HOST_WIDE_INT l;
327 HOST_WIDE_INT h;
329 l = l1 + l2;
330 h = (HOST_WIDE_INT)((unsigned HOST_WIDE_INT)h1
331 + (unsigned HOST_WIDE_INT)h2
332 + (l < l1));
334 *lv = l;
335 *hv = h;
337 if (unsigned_p)
338 return ((unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1
339 || (h == h1
340 && l < l1));
341 else
342 return OVERFLOW_SUM_SIGN (h1, h2, h);
345 /* Negate a doubleword integer with doubleword result.
346 Return nonzero if the operation overflows, assuming it's signed.
347 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
348 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
351 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
352 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
354 if (l1 == 0)
356 *lv = 0;
357 *hv = - h1;
358 return (*hv & h1) < 0;
360 else
362 *lv = -l1;
363 *hv = ~h1;
364 return 0;
368 /* Multiply two doubleword integers with doubleword result.
369 Return nonzero if the operation overflows according to UNSIGNED_P.
370 Each argument is given as two `HOST_WIDE_INT' pieces.
371 One argument is L1 and H1; the other, L2 and H2.
372 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
375 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
376 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
377 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
378 bool unsigned_p)
380 HOST_WIDE_INT arg1[4];
381 HOST_WIDE_INT arg2[4];
382 HOST_WIDE_INT prod[4 * 2];
383 unsigned HOST_WIDE_INT carry;
384 int i, j, k;
385 unsigned HOST_WIDE_INT toplow, neglow;
386 HOST_WIDE_INT tophigh, neghigh;
388 encode (arg1, l1, h1);
389 encode (arg2, l2, h2);
391 memset (prod, 0, sizeof prod);
393 for (i = 0; i < 4; i++)
395 carry = 0;
396 for (j = 0; j < 4; j++)
398 k = i + j;
399 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
400 carry += arg1[i] * arg2[j];
401 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
402 carry += prod[k];
403 prod[k] = LOWPART (carry);
404 carry = HIGHPART (carry);
406 prod[i + 4] = carry;
409 decode (prod, lv, hv);
410 decode (prod + 4, &toplow, &tophigh);
412 /* Unsigned overflow is immediate. */
413 if (unsigned_p)
414 return (toplow | tophigh) != 0;
416 /* Check for signed overflow by calculating the signed representation of the
417 top half of the result; it should agree with the low half's sign bit. */
418 if (h1 < 0)
420 neg_double (l2, h2, &neglow, &neghigh);
421 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
423 if (h2 < 0)
425 neg_double (l1, h1, &neglow, &neghigh);
426 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
428 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
431 /* Shift the doubleword integer in L1, H1 left by COUNT places
432 keeping only PREC bits of result.
433 Shift right if COUNT is negative.
434 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
435 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
437 void
438 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
439 HOST_WIDE_INT count, unsigned int prec,
440 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
442 unsigned HOST_WIDE_INT signmask;
444 if (count < 0)
446 rshift_double (l1, h1, -count, prec, lv, hv, arith);
447 return;
450 if (SHIFT_COUNT_TRUNCATED)
451 count %= prec;
453 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
455 /* Shifting by the host word size is undefined according to the
456 ANSI standard, so we must handle this as a special case. */
457 *hv = 0;
458 *lv = 0;
460 else if (count >= HOST_BITS_PER_WIDE_INT)
462 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
463 *lv = 0;
465 else
467 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
468 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
469 *lv = l1 << count;
472 /* Sign extend all bits that are beyond the precision. */
474 signmask = -((prec > HOST_BITS_PER_WIDE_INT
475 ? ((unsigned HOST_WIDE_INT) *hv
476 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
477 : (*lv >> (prec - 1))) & 1);
479 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
481 else if (prec >= HOST_BITS_PER_WIDE_INT)
483 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
484 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
486 else
488 *hv = signmask;
489 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
490 *lv |= signmask << prec;
494 /* Shift the doubleword integer in L1, H1 right by COUNT places
495 keeping only PREC bits of result. COUNT must be positive.
496 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
497 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
499 void
500 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
501 HOST_WIDE_INT count, unsigned int prec,
502 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
503 int arith)
505 unsigned HOST_WIDE_INT signmask;
507 signmask = (arith
508 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
509 : 0);
511 if (SHIFT_COUNT_TRUNCATED)
512 count %= prec;
514 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
516 /* Shifting by the host word size is undefined according to the
517 ANSI standard, so we must handle this as a special case. */
518 *hv = 0;
519 *lv = 0;
521 else if (count >= HOST_BITS_PER_WIDE_INT)
523 *hv = 0;
524 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
526 else
528 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
529 *lv = ((l1 >> count)
530 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
533 /* Zero / sign extend all bits that are beyond the precision. */
535 if (count >= (HOST_WIDE_INT)prec)
537 *hv = signmask;
538 *lv = signmask;
540 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
542 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
544 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
545 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
547 else
549 *hv = signmask;
550 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
551 *lv |= signmask << (prec - count);
555 /* Rotate the doubleword integer in L1, H1 left by COUNT places
556 keeping only PREC bits of result.
557 Rotate right if COUNT is negative.
558 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
560 void
561 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
562 HOST_WIDE_INT count, unsigned int prec,
563 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
565 unsigned HOST_WIDE_INT s1l, s2l;
566 HOST_WIDE_INT s1h, s2h;
568 count %= prec;
569 if (count < 0)
570 count += prec;
572 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
573 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
574 *lv = s1l | s2l;
575 *hv = s1h | s2h;
578 /* Rotate the doubleword integer in L1, H1 left by COUNT places
579 keeping only PREC bits of result. COUNT must be positive.
580 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
582 void
583 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
584 HOST_WIDE_INT count, unsigned int prec,
585 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
587 unsigned HOST_WIDE_INT s1l, s2l;
588 HOST_WIDE_INT s1h, s2h;
590 count %= prec;
591 if (count < 0)
592 count += prec;
594 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
595 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
596 *lv = s1l | s2l;
597 *hv = s1h | s2h;
600 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
601 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
602 CODE is a tree code for a kind of division, one of
603 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
604 or EXACT_DIV_EXPR
605 It controls how the quotient is rounded to an integer.
606 Return nonzero if the operation overflows.
607 UNS nonzero says do unsigned division. */
610 div_and_round_double (enum tree_code code, int uns,
611 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
612 HOST_WIDE_INT hnum_orig,
613 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
614 HOST_WIDE_INT hden_orig,
615 unsigned HOST_WIDE_INT *lquo,
616 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
617 HOST_WIDE_INT *hrem)
619 int quo_neg = 0;
620 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
621 HOST_WIDE_INT den[4], quo[4];
622 int i, j;
623 unsigned HOST_WIDE_INT work;
624 unsigned HOST_WIDE_INT carry = 0;
625 unsigned HOST_WIDE_INT lnum = lnum_orig;
626 HOST_WIDE_INT hnum = hnum_orig;
627 unsigned HOST_WIDE_INT lden = lden_orig;
628 HOST_WIDE_INT hden = hden_orig;
629 int overflow = 0;
631 if (hden == 0 && lden == 0)
632 overflow = 1, lden = 1;
634 /* Calculate quotient sign and convert operands to unsigned. */
635 if (!uns)
637 if (hnum < 0)
639 quo_neg = ~ quo_neg;
640 /* (minimum integer) / (-1) is the only overflow case. */
641 if (neg_double (lnum, hnum, &lnum, &hnum)
642 && ((HOST_WIDE_INT) lden & hden) == -1)
643 overflow = 1;
645 if (hden < 0)
647 quo_neg = ~ quo_neg;
648 neg_double (lden, hden, &lden, &hden);
652 if (hnum == 0 && hden == 0)
653 { /* single precision */
654 *hquo = *hrem = 0;
655 /* This unsigned division rounds toward zero. */
656 *lquo = lnum / lden;
657 goto finish_up;
660 if (hnum == 0)
661 { /* trivial case: dividend < divisor */
662 /* hden != 0 already checked. */
663 *hquo = *lquo = 0;
664 *hrem = hnum;
665 *lrem = lnum;
666 goto finish_up;
669 memset (quo, 0, sizeof quo);
671 memset (num, 0, sizeof num); /* to zero 9th element */
672 memset (den, 0, sizeof den);
674 encode (num, lnum, hnum);
675 encode (den, lden, hden);
677 /* Special code for when the divisor < BASE. */
678 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
680 /* hnum != 0 already checked. */
681 for (i = 4 - 1; i >= 0; i--)
683 work = num[i] + carry * BASE;
684 quo[i] = work / lden;
685 carry = work % lden;
688 else
690 /* Full double precision division,
691 with thanks to Don Knuth's "Seminumerical Algorithms". */
692 int num_hi_sig, den_hi_sig;
693 unsigned HOST_WIDE_INT quo_est, scale;
695 /* Find the highest nonzero divisor digit. */
696 for (i = 4 - 1;; i--)
697 if (den[i] != 0)
699 den_hi_sig = i;
700 break;
703 /* Insure that the first digit of the divisor is at least BASE/2.
704 This is required by the quotient digit estimation algorithm. */
706 scale = BASE / (den[den_hi_sig] + 1);
707 if (scale > 1)
708 { /* scale divisor and dividend */
709 carry = 0;
710 for (i = 0; i <= 4 - 1; i++)
712 work = (num[i] * scale) + carry;
713 num[i] = LOWPART (work);
714 carry = HIGHPART (work);
717 num[4] = carry;
718 carry = 0;
719 for (i = 0; i <= 4 - 1; i++)
721 work = (den[i] * scale) + carry;
722 den[i] = LOWPART (work);
723 carry = HIGHPART (work);
724 if (den[i] != 0) den_hi_sig = i;
728 num_hi_sig = 4;
730 /* Main loop */
731 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
733 /* Guess the next quotient digit, quo_est, by dividing the first
734 two remaining dividend digits by the high order quotient digit.
735 quo_est is never low and is at most 2 high. */
736 unsigned HOST_WIDE_INT tmp;
738 num_hi_sig = i + den_hi_sig + 1;
739 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
740 if (num[num_hi_sig] != den[den_hi_sig])
741 quo_est = work / den[den_hi_sig];
742 else
743 quo_est = BASE - 1;
745 /* Refine quo_est so it's usually correct, and at most one high. */
746 tmp = work - quo_est * den[den_hi_sig];
747 if (tmp < BASE
748 && (den[den_hi_sig - 1] * quo_est
749 > (tmp * BASE + num[num_hi_sig - 2])))
750 quo_est--;
752 /* Try QUO_EST as the quotient digit, by multiplying the
753 divisor by QUO_EST and subtracting from the remaining dividend.
754 Keep in mind that QUO_EST is the I - 1st digit. */
756 carry = 0;
757 for (j = 0; j <= den_hi_sig; j++)
759 work = quo_est * den[j] + carry;
760 carry = HIGHPART (work);
761 work = num[i + j] - LOWPART (work);
762 num[i + j] = LOWPART (work);
763 carry += HIGHPART (work) != 0;
766 /* If quo_est was high by one, then num[i] went negative and
767 we need to correct things. */
768 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
770 quo_est--;
771 carry = 0; /* add divisor back in */
772 for (j = 0; j <= den_hi_sig; j++)
774 work = num[i + j] + den[j] + carry;
775 carry = HIGHPART (work);
776 num[i + j] = LOWPART (work);
779 num [num_hi_sig] += carry;
782 /* Store the quotient digit. */
783 quo[i] = quo_est;
787 decode (quo, lquo, hquo);
789 finish_up:
790 /* If result is negative, make it so. */
791 if (quo_neg)
792 neg_double (*lquo, *hquo, lquo, hquo);
794 /* Compute trial remainder: rem = num - (quo * den) */
795 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
796 neg_double (*lrem, *hrem, lrem, hrem);
797 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
799 switch (code)
801 case TRUNC_DIV_EXPR:
802 case TRUNC_MOD_EXPR: /* round toward zero */
803 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
804 return overflow;
806 case FLOOR_DIV_EXPR:
807 case FLOOR_MOD_EXPR: /* round toward negative infinity */
808 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
810 /* quo = quo - 1; */
811 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
812 lquo, hquo);
814 else
815 return overflow;
816 break;
818 case CEIL_DIV_EXPR:
819 case CEIL_MOD_EXPR: /* round toward positive infinity */
820 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
822 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
823 lquo, hquo);
825 else
826 return overflow;
827 break;
829 case ROUND_DIV_EXPR:
830 case ROUND_MOD_EXPR: /* round to closest integer */
832 unsigned HOST_WIDE_INT labs_rem = *lrem;
833 HOST_WIDE_INT habs_rem = *hrem;
834 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
835 HOST_WIDE_INT habs_den = hden, htwice;
837 /* Get absolute values. */
838 if (*hrem < 0)
839 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
840 if (hden < 0)
841 neg_double (lden, hden, &labs_den, &habs_den);
843 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
844 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
845 labs_rem, habs_rem, &ltwice, &htwice);
847 if (((unsigned HOST_WIDE_INT) habs_den
848 < (unsigned HOST_WIDE_INT) htwice)
849 || (((unsigned HOST_WIDE_INT) habs_den
850 == (unsigned HOST_WIDE_INT) htwice)
851 && (labs_den <= ltwice)))
853 if (*hquo < 0)
854 /* quo = quo - 1; */
855 add_double (*lquo, *hquo,
856 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
857 else
858 /* quo = quo + 1; */
859 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
860 lquo, hquo);
862 else
863 return overflow;
865 break;
867 default:
868 gcc_unreachable ();
871 /* Compute true remainder: rem = num - (quo * den) */
872 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
873 neg_double (*lrem, *hrem, lrem, hrem);
874 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
875 return overflow;
878 /* If ARG2 divides ARG1 with zero remainder, carries out the division
879 of type CODE and returns the quotient.
880 Otherwise returns NULL_TREE. */
882 tree
883 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
885 unsigned HOST_WIDE_INT int1l, int2l;
886 HOST_WIDE_INT int1h, int2h;
887 unsigned HOST_WIDE_INT quol, reml;
888 HOST_WIDE_INT quoh, remh;
889 tree type = TREE_TYPE (arg1);
890 int uns = TYPE_UNSIGNED (type);
892 int1l = TREE_INT_CST_LOW (arg1);
893 int1h = TREE_INT_CST_HIGH (arg1);
894 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
895 &obj[some_exotic_number]. */
896 if (POINTER_TYPE_P (type))
898 uns = false;
899 type = signed_type_for (type);
900 fit_double_type (int1l, int1h, &int1l, &int1h,
901 type);
903 else
904 fit_double_type (int1l, int1h, &int1l, &int1h, type);
905 int2l = TREE_INT_CST_LOW (arg2);
906 int2h = TREE_INT_CST_HIGH (arg2);
908 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
909 &quol, &quoh, &reml, &remh);
910 if (remh != 0 || reml != 0)
911 return NULL_TREE;
913 return build_int_cst_wide (type, quol, quoh);
916 /* This is nonzero if we should defer warnings about undefined
917 overflow. This facility exists because these warnings are a
918 special case. The code to estimate loop iterations does not want
919 to issue any warnings, since it works with expressions which do not
920 occur in user code. Various bits of cleanup code call fold(), but
921 only use the result if it has certain characteristics (e.g., is a
922 constant); that code only wants to issue a warning if the result is
923 used. */
925 static int fold_deferring_overflow_warnings;
927 /* If a warning about undefined overflow is deferred, this is the
928 warning. Note that this may cause us to turn two warnings into
929 one, but that is fine since it is sufficient to only give one
930 warning per expression. */
932 static const char* fold_deferred_overflow_warning;
934 /* If a warning about undefined overflow is deferred, this is the
935 level at which the warning should be emitted. */
937 static enum warn_strict_overflow_code fold_deferred_overflow_code;
939 /* Start deferring overflow warnings. We could use a stack here to
940 permit nested calls, but at present it is not necessary. */
942 void
943 fold_defer_overflow_warnings (void)
945 ++fold_deferring_overflow_warnings;
948 /* Stop deferring overflow warnings. If there is a pending warning,
949 and ISSUE is true, then issue the warning if appropriate. STMT is
950 the statement with which the warning should be associated (used for
951 location information); STMT may be NULL. CODE is the level of the
952 warning--a warn_strict_overflow_code value. This function will use
953 the smaller of CODE and the deferred code when deciding whether to
954 issue the warning. CODE may be zero to mean to always use the
955 deferred code. */
957 void
958 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
960 const char *warnmsg;
961 location_t locus;
963 gcc_assert (fold_deferring_overflow_warnings > 0);
964 --fold_deferring_overflow_warnings;
965 if (fold_deferring_overflow_warnings > 0)
967 if (fold_deferred_overflow_warning != NULL
968 && code != 0
969 && code < (int) fold_deferred_overflow_code)
970 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
971 return;
974 warnmsg = fold_deferred_overflow_warning;
975 fold_deferred_overflow_warning = NULL;
977 if (!issue || warnmsg == NULL)
978 return;
980 if (gimple_no_warning_p (stmt))
981 return;
983 /* Use the smallest code level when deciding to issue the
984 warning. */
985 if (code == 0 || code > (int) fold_deferred_overflow_code)
986 code = fold_deferred_overflow_code;
988 if (!issue_strict_overflow_warning (code))
989 return;
991 if (stmt == NULL)
992 locus = input_location;
993 else
994 locus = gimple_location (stmt);
995 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
998 /* Stop deferring overflow warnings, ignoring any deferred
999 warnings. */
1001 void
1002 fold_undefer_and_ignore_overflow_warnings (void)
1004 fold_undefer_overflow_warnings (false, NULL, 0);
1007 /* Whether we are deferring overflow warnings. */
1009 bool
1010 fold_deferring_overflow_warnings_p (void)
1012 return fold_deferring_overflow_warnings > 0;
1015 /* This is called when we fold something based on the fact that signed
1016 overflow is undefined. */
1018 static void
1019 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1021 if (fold_deferring_overflow_warnings > 0)
1023 if (fold_deferred_overflow_warning == NULL
1024 || wc < fold_deferred_overflow_code)
1026 fold_deferred_overflow_warning = gmsgid;
1027 fold_deferred_overflow_code = wc;
1030 else if (issue_strict_overflow_warning (wc))
1031 warning (OPT_Wstrict_overflow, gmsgid);
1034 /* Return true if the built-in mathematical function specified by CODE
1035 is odd, i.e. -f(x) == f(-x). */
1037 static bool
1038 negate_mathfn_p (enum built_in_function code)
1040 switch (code)
1042 CASE_FLT_FN (BUILT_IN_ASIN):
1043 CASE_FLT_FN (BUILT_IN_ASINH):
1044 CASE_FLT_FN (BUILT_IN_ATAN):
1045 CASE_FLT_FN (BUILT_IN_ATANH):
1046 CASE_FLT_FN (BUILT_IN_CASIN):
1047 CASE_FLT_FN (BUILT_IN_CASINH):
1048 CASE_FLT_FN (BUILT_IN_CATAN):
1049 CASE_FLT_FN (BUILT_IN_CATANH):
1050 CASE_FLT_FN (BUILT_IN_CBRT):
1051 CASE_FLT_FN (BUILT_IN_CPROJ):
1052 CASE_FLT_FN (BUILT_IN_CSIN):
1053 CASE_FLT_FN (BUILT_IN_CSINH):
1054 CASE_FLT_FN (BUILT_IN_CTAN):
1055 CASE_FLT_FN (BUILT_IN_CTANH):
1056 CASE_FLT_FN (BUILT_IN_ERF):
1057 CASE_FLT_FN (BUILT_IN_LLROUND):
1058 CASE_FLT_FN (BUILT_IN_LROUND):
1059 CASE_FLT_FN (BUILT_IN_ROUND):
1060 CASE_FLT_FN (BUILT_IN_SIN):
1061 CASE_FLT_FN (BUILT_IN_SINH):
1062 CASE_FLT_FN (BUILT_IN_TAN):
1063 CASE_FLT_FN (BUILT_IN_TANH):
1064 CASE_FLT_FN (BUILT_IN_TRUNC):
1065 return true;
1067 CASE_FLT_FN (BUILT_IN_LLRINT):
1068 CASE_FLT_FN (BUILT_IN_LRINT):
1069 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1070 CASE_FLT_FN (BUILT_IN_RINT):
1071 return !flag_rounding_math;
1073 default:
1074 break;
1076 return false;
1079 /* Check whether we may negate an integer constant T without causing
1080 overflow. */
1082 bool
1083 may_negate_without_overflow_p (const_tree t)
1085 unsigned HOST_WIDE_INT val;
1086 unsigned int prec;
1087 tree type;
1089 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1091 type = TREE_TYPE (t);
1092 if (TYPE_UNSIGNED (type))
1093 return integer_zerop (t);
1095 prec = TYPE_PRECISION (type);
1096 if (prec > HOST_BITS_PER_WIDE_INT)
1098 if (TREE_INT_CST_LOW (t) != 0)
1099 return true;
1100 prec -= HOST_BITS_PER_WIDE_INT;
1101 val = TREE_INT_CST_HIGH (t);
1103 else
1104 val = TREE_INT_CST_LOW (t);
1105 if (prec < HOST_BITS_PER_WIDE_INT)
1106 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1107 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1110 /* Determine whether an expression T can be cheaply negated using
1111 the function negate_expr without introducing undefined overflow. */
1113 static bool
1114 negate_expr_p (tree t)
1116 tree type;
1118 if (t == 0)
1119 return false;
1121 type = TREE_TYPE (t);
1123 STRIP_SIGN_NOPS (t);
1124 switch (TREE_CODE (t))
1126 case INTEGER_CST:
1127 if (TYPE_OVERFLOW_TRAPS (type))
1128 return may_negate_without_overflow_p (t);
1129 return true;
1131 case BIT_NOT_EXPR:
1132 return INTEGRAL_TYPE_P (type);
1134 case FIXED_CST:
1135 case REAL_CST:
1136 case NEGATE_EXPR:
1137 case NEGATENV_EXPR:
1138 return true;
1140 case COMPLEX_CST:
1141 return negate_expr_p (TREE_REALPART (t))
1142 && negate_expr_p (TREE_IMAGPART (t));
1144 case COMPLEX_EXPR:
1145 return negate_expr_p (TREE_OPERAND (t, 0))
1146 && negate_expr_p (TREE_OPERAND (t, 1));
1148 case CONJ_EXPR:
1149 return negate_expr_p (TREE_OPERAND (t, 0));
1151 case PLUS_EXPR:
1152 case PLUSNV_EXPR:
1153 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1154 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1155 return false;
1156 /* -(A + B) -> (-B) - A. */
1157 if (negate_expr_p (TREE_OPERAND (t, 1))
1158 && reorder_operands_p (TREE_OPERAND (t, 0),
1159 TREE_OPERAND (t, 1)))
1160 return true;
1161 /* -(A + B) -> (-A) - B. */
1162 return negate_expr_p (TREE_OPERAND (t, 0));
1164 case MINUS_EXPR:
1165 case MINUSNV_EXPR:
1166 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1167 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1168 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1169 && reorder_operands_p (TREE_OPERAND (t, 0),
1170 TREE_OPERAND (t, 1));
1172 case MULT_EXPR:
1173 case MULTNV_EXPR:
1174 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1175 break;
1177 /* Fall through. */
1179 case RDIV_EXPR:
1180 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1181 return negate_expr_p (TREE_OPERAND (t, 1))
1182 || negate_expr_p (TREE_OPERAND (t, 0));
1183 break;
1185 case TRUNC_DIV_EXPR:
1186 case ROUND_DIV_EXPR:
1187 case FLOOR_DIV_EXPR:
1188 case CEIL_DIV_EXPR:
1189 case EXACT_DIV_EXPR:
1190 /* In general we can't negate A / B, because if A is INT_MIN and
1191 B is 1, we may turn this into INT_MIN / -1 which is undefined
1192 and actually traps on some architectures. */
1193 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
1194 break;
1195 return negate_expr_p (TREE_OPERAND (t, 1))
1196 || negate_expr_p (TREE_OPERAND (t, 0));
1198 CASE_CONVERT:
1199 /* Negate -((double)float) as (double)(-float). */
1200 if (TREE_CODE (type) == REAL_TYPE)
1202 tree tem = strip_float_extensions (t);
1203 if (tem != t)
1204 return negate_expr_p (tem);
1206 break;
1208 case CALL_EXPR:
1209 /* Negate -f(x) as f(-x). */
1210 if (negate_mathfn_p (builtin_mathfn_code (t)))
1211 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1212 break;
1214 case RSHIFT_EXPR:
1215 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1216 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1218 tree op1 = TREE_OPERAND (t, 1);
1219 if (TREE_INT_CST_HIGH (op1) == 0
1220 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1221 == TREE_INT_CST_LOW (op1))
1222 return true;
1224 break;
1226 default:
1227 break;
1229 return false;
1232 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1233 simplification is possible.
1234 If negate_expr_p would return true for T, NULL_TREE will never be
1235 returned. */
1237 static tree
1238 fold_negate_expr (tree t)
1240 tree type = TREE_TYPE (t);
1241 tree tem;
1243 switch (TREE_CODE (t))
1245 /* Convert - (~A) to A + 1. */
1246 case BIT_NOT_EXPR:
1247 if (INTEGRAL_TYPE_P (type))
1248 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1249 build_int_cst (type, 1));
1250 break;
1252 case INTEGER_CST:
1253 tem = fold_negate_const (t, type);
1254 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1255 || !TYPE_OVERFLOW_TRAPS (type))
1256 return tem;
1257 break;
1259 case REAL_CST:
1260 tem = fold_negate_const (t, type);
1261 /* Two's complement FP formats, such as c4x, may overflow. */
1262 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1263 return tem;
1264 break;
1266 case FIXED_CST:
1267 tem = fold_negate_const (t, type);
1268 return tem;
1270 case COMPLEX_CST:
1272 tree rpart = negate_expr (TREE_REALPART (t));
1273 tree ipart = negate_expr (TREE_IMAGPART (t));
1275 if ((TREE_CODE (rpart) == REAL_CST
1276 && TREE_CODE (ipart) == REAL_CST)
1277 || (TREE_CODE (rpart) == INTEGER_CST
1278 && TREE_CODE (ipart) == INTEGER_CST))
1279 return build_complex (type, rpart, ipart);
1281 break;
1283 case COMPLEX_EXPR:
1284 if (negate_expr_p (t))
1285 return fold_build2 (COMPLEX_EXPR, type,
1286 fold_negate_expr (TREE_OPERAND (t, 0)),
1287 fold_negate_expr (TREE_OPERAND (t, 1)));
1288 break;
1290 case CONJ_EXPR:
1291 if (negate_expr_p (t))
1292 return fold_build1 (CONJ_EXPR, type,
1293 fold_negate_expr (TREE_OPERAND (t, 0)));
1294 break;
1296 case NEGATE_EXPR:
1297 case NEGATENV_EXPR:
1298 return TREE_OPERAND (t, 0);
1300 case PLUS_EXPR:
1301 case PLUSNV_EXPR:
1302 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1303 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1305 /* -(A + B) -> (-B) - A. */
1306 if (negate_expr_p (TREE_OPERAND (t, 1))
1307 && reorder_operands_p (TREE_OPERAND (t, 0),
1308 TREE_OPERAND (t, 1)))
1310 tem = negate_expr (TREE_OPERAND (t, 1));
1311 return fold_build2 (MINUS_EXPR, type,
1312 tem, TREE_OPERAND (t, 0));
1315 /* -(A + B) -> (-A) - B. */
1316 if (negate_expr_p (TREE_OPERAND (t, 0)))
1318 tem = negate_expr (TREE_OPERAND (t, 0));
1319 return fold_build2 (MINUS_EXPR, type,
1320 tem, TREE_OPERAND (t, 1));
1323 break;
1325 case MINUS_EXPR:
1326 case MINUSNV_EXPR:
1327 /* - (A - B) -> B - A */
1328 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1329 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1330 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1331 return fold_build2 (MINUS_EXPR, type,
1332 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1333 break;
1335 case MULT_EXPR:
1336 case MULTNV_EXPR:
1337 if (TYPE_UNSIGNED (type))
1338 break;
1340 /* Fall through. */
1342 case RDIV_EXPR:
1343 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1345 tem = TREE_OPERAND (t, 1);
1346 if (negate_expr_p (tem))
1347 return fold_build2 (TREE_CODE (t), type,
1348 TREE_OPERAND (t, 0), negate_expr (tem));
1349 tem = TREE_OPERAND (t, 0);
1350 if (negate_expr_p (tem))
1351 return fold_build2 (TREE_CODE (t), type,
1352 negate_expr (tem), TREE_OPERAND (t, 1));
1354 break;
1356 case TRUNC_DIV_EXPR:
1357 case ROUND_DIV_EXPR:
1358 case FLOOR_DIV_EXPR:
1359 case CEIL_DIV_EXPR:
1360 case EXACT_DIV_EXPR:
1361 /* In general we can't negate A / B, because if A is INT_MIN and
1362 B is 1, we may turn this into INT_MIN / -1 which is undefined
1363 and actually traps on some architectures. */
1364 if (!INTEGRAL_TYPE_P (type))
1366 tem = TREE_OPERAND (t, 1);
1367 if (negate_expr_p (tem))
1368 return fold_build2 (TREE_CODE (t), type,
1369 TREE_OPERAND (t, 0), negate_expr (tem));
1370 tem = TREE_OPERAND (t, 0);
1371 if (negate_expr_p (tem))
1372 return fold_build2 (TREE_CODE (t), type,
1373 negate_expr (tem), TREE_OPERAND (t, 1));
1375 break;
1377 CASE_CONVERT:
1378 /* Convert -((double)float) into (double)(-float). */
1379 if (TREE_CODE (type) == REAL_TYPE)
1381 tem = strip_float_extensions (t);
1382 if (tem != t && negate_expr_p (tem))
1383 return fold_convert (type, negate_expr (tem));
1385 break;
1387 case CALL_EXPR:
1388 /* Negate -f(x) as f(-x). */
1389 if (negate_mathfn_p (builtin_mathfn_code (t))
1390 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1392 tree fndecl, arg;
1394 fndecl = get_callee_fndecl (t);
1395 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1396 return build_call_expr (fndecl, 1, arg);
1398 break;
1400 case RSHIFT_EXPR:
1401 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1402 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1404 tree op1 = TREE_OPERAND (t, 1);
1405 if (TREE_INT_CST_HIGH (op1) == 0
1406 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1407 == TREE_INT_CST_LOW (op1))
1409 tree ntype = TYPE_UNSIGNED (type)
1410 ? signed_type_for (type)
1411 : unsigned_type_for (type);
1412 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1413 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1414 return fold_convert (type, temp);
1417 break;
1419 default:
1420 break;
1423 return NULL_TREE;
1426 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1427 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1428 return NULL_TREE. */
1430 static tree
1431 negate_expr (tree t)
1433 tree type, tem;
1435 if (t == NULL_TREE)
1436 return NULL_TREE;
1438 type = TREE_TYPE (t);
1439 STRIP_SIGN_NOPS (t);
1441 tem = fold_negate_expr (t);
1442 if (!tem)
1443 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1444 return fold_convert (type, tem);
1447 /* Split a tree IN into a constant, literal and variable parts that could be
1448 combined with CODE to make IN. "constant" means an expression with
1449 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1450 commutative arithmetic operation. Store the constant part into *CONP,
1451 the literal in *LITP and return the variable part. If a part isn't
1452 present, set it to null. If the tree does not decompose in this way,
1453 return the entire tree as the variable part and the other parts as null.
1455 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1456 case, we negate an operand that was subtracted. Except if it is a
1457 literal for which we use *MINUS_LITP instead.
1459 If NEGATE_P is true, we are negating all of IN, again except a literal
1460 for which we use *MINUS_LITP instead.
1462 If IN is itself a literal or constant, return it as appropriate.
1464 Note that we do not guarantee that any of the three values will be the
1465 same type as IN, but they will have the same signedness and mode. */
1467 static tree
1468 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1469 tree *minus_litp, int negate_p)
1471 tree var = 0;
1473 *conp = 0;
1474 *litp = 0;
1475 *minus_litp = 0;
1477 /* Strip any conversions that don't change the machine mode or signedness. */
1478 STRIP_SIGN_NOPS (in);
1480 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1481 || TREE_CODE (in) == FIXED_CST)
1482 *litp = in;
1483 else if (TREE_CODE (in) == code
1484 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1485 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1486 /* We can associate addition and subtraction together (even
1487 though the C standard doesn't say so) for integers because
1488 the value is not affected. For reals, the value might be
1489 affected, so we can't. */
1490 && ((PLUS_EXPR_CODE_P (code) && MINUS_EXPR_P (in))
1491 || (MINUS_EXPR_CODE_P (code) && PLUS_EXPR_P (in)))))
1493 tree op0 = TREE_OPERAND (in, 0);
1494 tree op1 = TREE_OPERAND (in, 1);
1495 int neg1_p = MINUS_EXPR_P (in);
1496 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1498 /* First see if either of the operands is a literal, then a constant. */
1499 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1500 || TREE_CODE (op0) == FIXED_CST)
1501 *litp = op0, op0 = 0;
1502 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1503 || TREE_CODE (op1) == FIXED_CST)
1504 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1506 if (op0 != 0 && TREE_CONSTANT (op0))
1507 *conp = op0, op0 = 0;
1508 else if (op1 != 0 && TREE_CONSTANT (op1))
1509 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1511 /* If we haven't dealt with either operand, this is not a case we can
1512 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1513 if (op0 != 0 && op1 != 0)
1514 var = in;
1515 else if (op0 != 0)
1516 var = op0;
1517 else
1518 var = op1, neg_var_p = neg1_p;
1520 /* Now do any needed negations. */
1521 if (neg_litp_p)
1522 *minus_litp = *litp, *litp = 0;
1523 if (neg_conp_p)
1524 *conp = negate_expr (*conp);
1525 if (neg_var_p)
1526 var = negate_expr (var);
1528 else if (TREE_CONSTANT (in))
1529 *conp = in;
1530 else
1531 var = in;
1533 if (negate_p)
1535 if (*litp)
1536 *minus_litp = *litp, *litp = 0;
1537 else if (*minus_litp)
1538 *litp = *minus_litp, *minus_litp = 0;
1539 *conp = negate_expr (*conp);
1540 var = negate_expr (var);
1543 return var;
1546 /* Re-associate trees split by the above function. T1 and T2 are either
1547 expressions to associate or null. Return the new expression, if any. If
1548 we build an operation, do it in TYPE and with CODE. */
1550 static tree
1551 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1553 if (t1 == 0)
1554 return t2;
1555 else if (t2 == 0)
1556 return t1;
1558 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1559 try to fold this since we will have infinite recursion. But do
1560 deal with any NEGATE_EXPRs. */
1561 if (TREE_CODE (t1) == code
1562 || TREE_CODE (t2) == code
1563 || MINUS_EXPR_P (t1) || MINUS_EXPR_P (t2))
1565 if (PLUS_EXPR_CODE_P (code))
1567 if (NEGATE_EXPR_P (t1))
1568 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1569 fold_convert (type, TREE_OPERAND (t1, 0)));
1570 else if (NEGATE_EXPR_P (t2))
1571 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1572 fold_convert (type, TREE_OPERAND (t2, 0)));
1573 else if (integer_zerop (t2))
1574 return fold_convert (type, t1);
1576 else if (MINUS_EXPR_CODE_P (code))
1578 if (integer_zerop (t2))
1579 return fold_convert (type, t1);
1582 return build2 (strip_nv (code), type, fold_convert (type, t1),
1583 fold_convert (type, t2));
1586 return fold_build2 (strip_nv (code), type, fold_convert (type, t1),
1587 fold_convert (type, t2));
1590 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1591 for use in int_const_binop, size_binop and size_diffop. */
1593 static bool
1594 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1596 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1597 return false;
1598 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1599 return false;
1601 switch (code)
1603 case LSHIFT_EXPR:
1604 case RSHIFT_EXPR:
1605 case LROTATE_EXPR:
1606 case RROTATE_EXPR:
1607 return true;
1609 default:
1610 break;
1613 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1614 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1615 && TYPE_MODE (type1) == TYPE_MODE (type2);
1619 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1620 to produce a new constant in *LOWP, *HIP. Return -1 if we don't know how
1621 to evaluate CODE at compile-time otherwise return 1 if the
1622 operation overflowed and 0 if not. */
1625 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
1626 unsigned HOST_WIDE_INT *lowp, HOST_WIDE_INT *hip)
1628 unsigned HOST_WIDE_INT int1l, int2l;
1629 HOST_WIDE_INT int1h, int2h;
1630 unsigned HOST_WIDE_INT low;
1631 HOST_WIDE_INT hi;
1632 unsigned HOST_WIDE_INT garbagel;
1633 HOST_WIDE_INT garbageh;
1634 tree type = TREE_TYPE (arg1);
1635 int uns = TYPE_UNSIGNED (type);
1636 int overflow = 0;
1638 int1l = TREE_INT_CST_LOW (arg1);
1639 int1h = TREE_INT_CST_HIGH (arg1);
1640 int2l = TREE_INT_CST_LOW (arg2);
1641 int2h = TREE_INT_CST_HIGH (arg2);
1643 switch (code)
1645 case BIT_IOR_EXPR:
1646 low = int1l | int2l, hi = int1h | int2h;
1647 break;
1649 case BIT_XOR_EXPR:
1650 low = int1l ^ int2l, hi = int1h ^ int2h;
1651 break;
1653 case BIT_AND_EXPR:
1654 low = int1l & int2l, hi = int1h & int2h;
1655 break;
1657 case RSHIFT_EXPR:
1658 int2l = -int2l;
1659 case LSHIFT_EXPR:
1660 /* It's unclear from the C standard whether shifts can overflow.
1661 The following code ignores overflow; perhaps a C standard
1662 interpretation ruling is needed. */
1663 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1664 &low, &hi, !uns);
1665 break;
1667 case RROTATE_EXPR:
1668 int2l = - int2l;
1669 case LROTATE_EXPR:
1670 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1671 &low, &hi);
1672 break;
1674 case PLUS_EXPR:
1675 case PLUSNV_EXPR:
1676 overflow = add_double_with_sign (int1l, int1h, int2l, int2h,
1677 &low, &hi, uns);
1678 break;
1680 case MINUS_EXPR:
1681 case MINUSNV_EXPR:
1682 neg_double (int2l, int2h, &low, &hi);
1683 add_double (int1l, int1h, low, hi, &low, &hi);
1684 if (uns)
1685 overflow = ((unsigned HOST_WIDE_INT) hi > (unsigned HOST_WIDE_INT) int1h
1686 || (hi == int1h
1687 && low > int1l));
1688 else
1689 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1690 break;
1692 case MULT_EXPR:
1693 case MULTNV_EXPR:
1694 overflow = mul_double_with_sign (int1l, int1h, int2l, int2h,
1695 &low, &hi, uns);
1696 break;
1698 case TRUNC_DIV_EXPR:
1699 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1700 case EXACT_DIV_EXPR:
1701 /* This is a shortcut for a common special case. */
1702 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1703 && !TREE_OVERFLOW (arg1)
1704 && !TREE_OVERFLOW (arg2)
1705 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1707 if (code == CEIL_DIV_EXPR)
1708 int1l += int2l - 1;
1710 low = int1l / int2l, hi = 0;
1711 break;
1714 /* ... fall through ... */
1716 case ROUND_DIV_EXPR:
1717 if (int2h == 0 && int2l == 0)
1718 return -1;
1719 if (int2h == 0 && int2l == 1)
1721 low = int1l, hi = int1h;
1722 break;
1724 if (int1l == int2l && int1h == int2h
1725 && ! (int1l == 0 && int1h == 0))
1727 low = 1, hi = 0;
1728 break;
1730 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1731 &low, &hi, &garbagel, &garbageh);
1732 break;
1734 case TRUNC_MOD_EXPR:
1735 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1736 /* This is a shortcut for a common special case. */
1737 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1738 && !TREE_OVERFLOW (arg1)
1739 && !TREE_OVERFLOW (arg2)
1740 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1742 if (code == CEIL_MOD_EXPR)
1743 int1l += int2l - 1;
1744 low = int1l % int2l, hi = 0;
1745 break;
1748 /* ... fall through ... */
1750 case ROUND_MOD_EXPR:
1751 if (int2h == 0 && int2l == 0)
1752 return -1;
1753 overflow = div_and_round_double (code, uns,
1754 int1l, int1h, int2l, int2h,
1755 &garbagel, &garbageh, &low, &hi);
1756 break;
1758 case MIN_EXPR:
1759 case MAX_EXPR:
1760 if (uns)
1761 low = (((unsigned HOST_WIDE_INT) int1h
1762 < (unsigned HOST_WIDE_INT) int2h)
1763 || (((unsigned HOST_WIDE_INT) int1h
1764 == (unsigned HOST_WIDE_INT) int2h)
1765 && int1l < int2l));
1766 else
1767 low = (int1h < int2h
1768 || (int1h == int2h && int1l < int2l));
1770 if (low == (code == MIN_EXPR))
1771 low = int1l, hi = int1h;
1772 else
1773 low = int2l, hi = int2h;
1774 break;
1776 default:
1777 return -1;
1780 *lowp = low;
1781 *hip = hi;
1783 return overflow;
1786 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1787 to produce a new constant. Return NULL_TREE if we don't know how
1788 to evaluate CODE at compile-time.
1790 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1792 tree
1793 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1794 int notrunc)
1796 unsigned HOST_WIDE_INT low;
1797 HOST_WIDE_INT hi;
1798 tree t;
1799 tree type = TREE_TYPE (arg1);
1800 int uns = TYPE_UNSIGNED (type);
1801 int is_sizetype
1802 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1803 int overflow;
1805 overflow = int_const_binop_1 (code, arg1, arg2, &low, &hi);
1806 if (overflow == -1)
1807 return NULL_TREE;
1809 if (notrunc)
1811 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1813 /* Propagate overflow flags ourselves. */
1814 if (((!uns || is_sizetype) && overflow)
1815 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1817 t = copy_node (t);
1818 TREE_OVERFLOW (t) = 1;
1821 else
1822 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1823 ((!uns || is_sizetype) && overflow)
1824 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1826 return t;
1829 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1830 constant. We assume ARG1 and ARG2 have the same data type, or at least
1831 are the same kind of constant and the same machine mode. Return zero if
1832 combining the constants is not allowed in the current operating mode.
1834 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1836 static tree
1837 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1839 /* Sanity check for the recursive cases. */
1840 if (!arg1 || !arg2)
1841 return NULL_TREE;
1843 STRIP_NOPS (arg1);
1844 STRIP_NOPS (arg2);
1846 if (TREE_CODE (arg1) == INTEGER_CST)
1847 return int_const_binop (code, arg1, arg2, notrunc);
1849 if (TREE_CODE (arg1) == REAL_CST)
1851 enum machine_mode mode;
1852 REAL_VALUE_TYPE d1;
1853 REAL_VALUE_TYPE d2;
1854 REAL_VALUE_TYPE value;
1855 REAL_VALUE_TYPE result;
1856 bool inexact;
1857 tree t, type;
1859 /* The following codes are handled by real_arithmetic. */
1860 switch (code)
1862 case PLUS_EXPR:
1863 case MINUS_EXPR:
1864 case MULT_EXPR:
1865 case RDIV_EXPR:
1866 case MIN_EXPR:
1867 case MAX_EXPR:
1868 break;
1870 default:
1871 return NULL_TREE;
1874 d1 = TREE_REAL_CST (arg1);
1875 d2 = TREE_REAL_CST (arg2);
1877 type = TREE_TYPE (arg1);
1878 mode = TYPE_MODE (type);
1880 /* Don't perform operation if we honor signaling NaNs and
1881 either operand is a NaN. */
1882 if (HONOR_SNANS (mode)
1883 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1884 return NULL_TREE;
1886 /* Don't perform operation if it would raise a division
1887 by zero exception. */
1888 if (code == RDIV_EXPR
1889 && REAL_VALUES_EQUAL (d2, dconst0)
1890 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1891 return NULL_TREE;
1893 /* If either operand is a NaN, just return it. Otherwise, set up
1894 for floating-point trap; we return an overflow. */
1895 if (REAL_VALUE_ISNAN (d1))
1896 return arg1;
1897 else if (REAL_VALUE_ISNAN (d2))
1898 return arg2;
1900 inexact = real_arithmetic (&value, code, &d1, &d2);
1901 real_convert (&result, mode, &value);
1903 /* Don't constant fold this floating point operation if
1904 the result has overflowed and flag_trapping_math. */
1905 if (flag_trapping_math
1906 && MODE_HAS_INFINITIES (mode)
1907 && REAL_VALUE_ISINF (result)
1908 && !REAL_VALUE_ISINF (d1)
1909 && !REAL_VALUE_ISINF (d2))
1910 return NULL_TREE;
1912 /* Don't constant fold this floating point operation if the
1913 result may dependent upon the run-time rounding mode and
1914 flag_rounding_math is set, or if GCC's software emulation
1915 is unable to accurately represent the result. */
1916 if ((flag_rounding_math
1917 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1918 && (inexact || !real_identical (&result, &value)))
1919 return NULL_TREE;
1921 t = build_real (type, result);
1923 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1924 return t;
1927 if (TREE_CODE (arg1) == FIXED_CST)
1929 FIXED_VALUE_TYPE f1;
1930 FIXED_VALUE_TYPE f2;
1931 FIXED_VALUE_TYPE result;
1932 tree t, type;
1933 int sat_p;
1934 bool overflow_p;
1936 /* The following codes are handled by fixed_arithmetic. */
1937 switch (code)
1939 case PLUS_EXPR:
1940 case MINUS_EXPR:
1941 case MULT_EXPR:
1942 case TRUNC_DIV_EXPR:
1943 f2 = TREE_FIXED_CST (arg2);
1944 break;
1946 case LSHIFT_EXPR:
1947 case RSHIFT_EXPR:
1948 f2.data.high = TREE_INT_CST_HIGH (arg2);
1949 f2.data.low = TREE_INT_CST_LOW (arg2);
1950 f2.mode = SImode;
1951 break;
1953 default:
1954 return NULL_TREE;
1957 f1 = TREE_FIXED_CST (arg1);
1958 type = TREE_TYPE (arg1);
1959 sat_p = TYPE_SATURATING (type);
1960 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1961 t = build_fixed (type, result);
1962 /* Propagate overflow flags. */
1963 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1964 TREE_OVERFLOW (t) = 1;
1965 return t;
1968 if (TREE_CODE (arg1) == COMPLEX_CST)
1970 tree type = TREE_TYPE (arg1);
1971 tree r1 = TREE_REALPART (arg1);
1972 tree i1 = TREE_IMAGPART (arg1);
1973 tree r2 = TREE_REALPART (arg2);
1974 tree i2 = TREE_IMAGPART (arg2);
1975 tree real, imag;
1977 switch (code)
1979 case PLUS_EXPR:
1980 case MINUS_EXPR:
1981 real = const_binop (code, r1, r2, notrunc);
1982 imag = const_binop (code, i1, i2, notrunc);
1983 break;
1985 case MULT_EXPR:
1986 real = const_binop (MINUS_EXPR,
1987 const_binop (MULT_EXPR, r1, r2, notrunc),
1988 const_binop (MULT_EXPR, i1, i2, notrunc),
1989 notrunc);
1990 imag = const_binop (PLUS_EXPR,
1991 const_binop (MULT_EXPR, r1, i2, notrunc),
1992 const_binop (MULT_EXPR, i1, r2, notrunc),
1993 notrunc);
1994 break;
1996 case RDIV_EXPR:
1998 tree magsquared
1999 = const_binop (PLUS_EXPR,
2000 const_binop (MULT_EXPR, r2, r2, notrunc),
2001 const_binop (MULT_EXPR, i2, i2, notrunc),
2002 notrunc);
2003 tree t1
2004 = const_binop (PLUS_EXPR,
2005 const_binop (MULT_EXPR, r1, r2, notrunc),
2006 const_binop (MULT_EXPR, i1, i2, notrunc),
2007 notrunc);
2008 tree t2
2009 = const_binop (MINUS_EXPR,
2010 const_binop (MULT_EXPR, i1, r2, notrunc),
2011 const_binop (MULT_EXPR, r1, i2, notrunc),
2012 notrunc);
2014 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
2015 code = TRUNC_DIV_EXPR;
2017 real = const_binop (code, t1, magsquared, notrunc);
2018 imag = const_binop (code, t2, magsquared, notrunc);
2020 break;
2022 default:
2023 return NULL_TREE;
2026 if (real && imag)
2027 return build_complex (type, real, imag);
2030 if (TREE_CODE (arg1) == VECTOR_CST)
2032 tree type = TREE_TYPE(arg1);
2033 int count = TYPE_VECTOR_SUBPARTS (type), i;
2034 tree elements1, elements2, list = NULL_TREE;
2036 if(TREE_CODE(arg2) != VECTOR_CST)
2037 return NULL_TREE;
2039 elements1 = TREE_VECTOR_CST_ELTS (arg1);
2040 elements2 = TREE_VECTOR_CST_ELTS (arg2);
2042 for (i = 0; i < count; i++)
2044 tree elem1, elem2, elem;
2046 /* The trailing elements can be empty and should be treated as 0 */
2047 if(!elements1)
2048 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2049 else
2051 elem1 = TREE_VALUE(elements1);
2052 elements1 = TREE_CHAIN (elements1);
2055 if(!elements2)
2056 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2057 else
2059 elem2 = TREE_VALUE(elements2);
2060 elements2 = TREE_CHAIN (elements2);
2063 elem = const_binop (code, elem1, elem2, notrunc);
2065 /* It is possible that const_binop cannot handle the given
2066 code and return NULL_TREE */
2067 if(elem == NULL_TREE)
2068 return NULL_TREE;
2070 list = tree_cons (NULL_TREE, elem, list);
2072 return build_vector(type, nreverse(list));
2074 return NULL_TREE;
2077 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2078 indicates which particular sizetype to create. */
2080 tree
2081 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2083 return build_int_cst (sizetype_tab[(int) kind], number);
2086 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2087 is a tree code. The type of the result is taken from the operands.
2088 Both must be equivalent integer types, ala int_binop_types_match_p.
2089 If the operands are constant, so is the result. */
2091 tree
2092 size_binop (enum tree_code code, tree arg0, tree arg1)
2094 tree type = TREE_TYPE (arg0);
2096 if (arg0 == error_mark_node || arg1 == error_mark_node)
2097 return error_mark_node;
2099 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2100 TREE_TYPE (arg1)));
2102 /* Handle the special case of two integer constants faster. */
2103 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2105 /* And some specific cases even faster than that. */
2106 if (code == PLUS_EXPR)
2108 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2109 return arg1;
2110 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2111 return arg0;
2113 else if (code == MINUS_EXPR)
2115 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2116 return arg0;
2118 else if (code == MULT_EXPR)
2120 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2121 return arg1;
2124 /* Handle general case of two integer constants. */
2125 return int_const_binop (code, arg0, arg1, 0);
2128 return fold_build2 (code, type, arg0, arg1);
2131 /* Given two values, either both of sizetype or both of bitsizetype,
2132 compute the difference between the two values. Return the value
2133 in signed type corresponding to the type of the operands. */
2135 tree
2136 size_diffop (tree arg0, tree arg1)
2138 tree type = TREE_TYPE (arg0);
2139 tree ctype;
2141 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2142 TREE_TYPE (arg1)));
2144 /* If the type is already signed, just do the simple thing. */
2145 if (!TYPE_UNSIGNED (type))
2146 return size_binop (MINUS_EXPR, arg0, arg1);
2148 if (type == sizetype)
2149 ctype = ssizetype;
2150 else if (type == bitsizetype)
2151 ctype = sbitsizetype;
2152 else
2153 ctype = signed_type_for (type);
2155 /* If either operand is not a constant, do the conversions to the signed
2156 type and subtract. The hardware will do the right thing with any
2157 overflow in the subtraction. */
2158 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2159 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2160 fold_convert (ctype, arg1));
2162 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2163 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2164 overflow) and negate (which can't either). Special-case a result
2165 of zero while we're here. */
2166 if (tree_int_cst_equal (arg0, arg1))
2167 return build_int_cst (ctype, 0);
2168 else if (tree_int_cst_lt (arg1, arg0))
2169 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2170 else
2171 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2172 fold_convert (ctype, size_binop (MINUS_EXPR,
2173 arg1, arg0)));
2176 /* A subroutine of fold_convert_const handling conversions of an
2177 INTEGER_CST to another integer type. */
2179 static tree
2180 fold_convert_const_int_from_int (tree type, const_tree arg1)
2182 tree t;
2184 /* Given an integer constant, make new constant with new type,
2185 appropriately sign-extended or truncated. */
2186 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2187 TREE_INT_CST_HIGH (arg1),
2188 /* Don't set the overflow when
2189 converting from a pointer, */
2190 !POINTER_TYPE_P (TREE_TYPE (arg1))
2191 /* or to a sizetype with same signedness
2192 and the precision is unchanged.
2193 ??? sizetype is always sign-extended,
2194 but its signedness depends on the
2195 frontend. Thus we see spurious overflows
2196 here if we do not check this. */
2197 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2198 == TYPE_PRECISION (type))
2199 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2200 == TYPE_UNSIGNED (type))
2201 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2202 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2203 || (TREE_CODE (type) == INTEGER_TYPE
2204 && TYPE_IS_SIZETYPE (type)))),
2205 (TREE_INT_CST_HIGH (arg1) < 0
2206 && (TYPE_UNSIGNED (type)
2207 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2208 | TREE_OVERFLOW (arg1));
2210 return t;
2213 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2214 to an integer type. */
2216 static tree
2217 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2219 int overflow = 0;
2220 tree t;
2222 /* The following code implements the floating point to integer
2223 conversion rules required by the Java Language Specification,
2224 that IEEE NaNs are mapped to zero and values that overflow
2225 the target precision saturate, i.e. values greater than
2226 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2227 are mapped to INT_MIN. These semantics are allowed by the
2228 C and C++ standards that simply state that the behavior of
2229 FP-to-integer conversion is unspecified upon overflow. */
2231 HOST_WIDE_INT high, low;
2232 REAL_VALUE_TYPE r;
2233 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2235 switch (code)
2237 case FIX_TRUNC_EXPR:
2238 real_trunc (&r, VOIDmode, &x);
2239 break;
2241 default:
2242 gcc_unreachable ();
2245 /* If R is NaN, return zero and show we have an overflow. */
2246 if (REAL_VALUE_ISNAN (r))
2248 overflow = 1;
2249 high = 0;
2250 low = 0;
2253 /* See if R is less than the lower bound or greater than the
2254 upper bound. */
2256 if (! overflow)
2258 tree lt = TYPE_MIN_VALUE (type);
2259 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2260 if (REAL_VALUES_LESS (r, l))
2262 overflow = 1;
2263 high = TREE_INT_CST_HIGH (lt);
2264 low = TREE_INT_CST_LOW (lt);
2268 if (! overflow)
2270 tree ut = TYPE_MAX_VALUE (type);
2271 if (ut)
2273 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2274 if (REAL_VALUES_LESS (u, r))
2276 overflow = 1;
2277 high = TREE_INT_CST_HIGH (ut);
2278 low = TREE_INT_CST_LOW (ut);
2283 if (! overflow)
2284 REAL_VALUE_TO_INT (&low, &high, r);
2286 t = force_fit_type_double (type, low, high, -1,
2287 overflow | TREE_OVERFLOW (arg1));
2288 return t;
2291 /* A subroutine of fold_convert_const handling conversions of a
2292 FIXED_CST to an integer type. */
2294 static tree
2295 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2297 tree t;
2298 double_int temp, temp_trunc;
2299 unsigned int mode;
2301 /* Right shift FIXED_CST to temp by fbit. */
2302 temp = TREE_FIXED_CST (arg1).data;
2303 mode = TREE_FIXED_CST (arg1).mode;
2304 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2306 lshift_double (temp.low, temp.high,
2307 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2308 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2310 /* Left shift temp to temp_trunc by fbit. */
2311 lshift_double (temp.low, temp.high,
2312 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2313 &temp_trunc.low, &temp_trunc.high,
2314 SIGNED_FIXED_POINT_MODE_P (mode));
2316 else
2318 temp.low = 0;
2319 temp.high = 0;
2320 temp_trunc.low = 0;
2321 temp_trunc.high = 0;
2324 /* If FIXED_CST is negative, we need to round the value toward 0.
2325 By checking if the fractional bits are not zero to add 1 to temp. */
2326 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2327 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2329 double_int one;
2330 one.low = 1;
2331 one.high = 0;
2332 temp = double_int_add (temp, one);
2335 /* Given a fixed-point constant, make new constant with new type,
2336 appropriately sign-extended or truncated. */
2337 t = force_fit_type_double (type, temp.low, temp.high, -1,
2338 (temp.high < 0
2339 && (TYPE_UNSIGNED (type)
2340 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2341 | TREE_OVERFLOW (arg1));
2343 return t;
2346 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2347 to another floating point type. */
2349 static tree
2350 fold_convert_const_real_from_real (tree type, const_tree arg1)
2352 REAL_VALUE_TYPE value;
2353 tree t;
2355 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2356 t = build_real (type, value);
2358 /* If converting an infinity or NAN to a representation that doesn't
2359 have one, set the overflow bit so that we can produce some kind of
2360 error message at the appropriate point if necessary. It's not the
2361 most user-friendly message, but it's better than nothing. */
2362 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2363 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2364 TREE_OVERFLOW (t) = 1;
2365 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2366 && !MODE_HAS_NANS (TYPE_MODE (type)))
2367 TREE_OVERFLOW (t) = 1;
2368 /* Regular overflow, conversion produced an infinity in a mode that
2369 can't represent them. */
2370 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2371 && REAL_VALUE_ISINF (value)
2372 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2373 TREE_OVERFLOW (t) = 1;
2374 else
2375 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2376 return t;
2379 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2380 to a floating point type. */
2382 static tree
2383 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2385 REAL_VALUE_TYPE value;
2386 tree t;
2388 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2389 t = build_real (type, value);
2391 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2392 return t;
2395 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2396 to another fixed-point type. */
2398 static tree
2399 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2401 FIXED_VALUE_TYPE value;
2402 tree t;
2403 bool overflow_p;
2405 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2406 TYPE_SATURATING (type));
2407 t = build_fixed (type, value);
2409 /* Propagate overflow flags. */
2410 if (overflow_p | TREE_OVERFLOW (arg1))
2411 TREE_OVERFLOW (t) = 1;
2412 return t;
2415 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2416 to a fixed-point type. */
2418 static tree
2419 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2421 FIXED_VALUE_TYPE value;
2422 tree t;
2423 bool overflow_p;
2425 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2426 TREE_INT_CST (arg1),
2427 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2428 TYPE_SATURATING (type));
2429 t = build_fixed (type, value);
2431 /* Propagate overflow flags. */
2432 if (overflow_p | TREE_OVERFLOW (arg1))
2433 TREE_OVERFLOW (t) = 1;
2434 return t;
2437 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2438 to a fixed-point type. */
2440 static tree
2441 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2443 FIXED_VALUE_TYPE value;
2444 tree t;
2445 bool overflow_p;
2447 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2448 &TREE_REAL_CST (arg1),
2449 TYPE_SATURATING (type));
2450 t = build_fixed (type, value);
2452 /* Propagate overflow flags. */
2453 if (overflow_p | TREE_OVERFLOW (arg1))
2454 TREE_OVERFLOW (t) = 1;
2455 return t;
2458 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2459 type TYPE. If no simplification can be done return NULL_TREE. */
2461 static tree
2462 fold_convert_const (enum tree_code code, tree type, tree arg1)
2464 if (TREE_TYPE (arg1) == type)
2465 return arg1;
2467 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2468 || TREE_CODE (type) == OFFSET_TYPE)
2470 if (TREE_CODE (arg1) == INTEGER_CST)
2471 return fold_convert_const_int_from_int (type, arg1);
2472 else if (TREE_CODE (arg1) == REAL_CST)
2473 return fold_convert_const_int_from_real (code, type, arg1);
2474 else if (TREE_CODE (arg1) == FIXED_CST)
2475 return fold_convert_const_int_from_fixed (type, arg1);
2477 else if (TREE_CODE (type) == REAL_TYPE)
2479 if (TREE_CODE (arg1) == INTEGER_CST)
2480 return build_real_from_int_cst (type, arg1);
2481 else if (TREE_CODE (arg1) == REAL_CST)
2482 return fold_convert_const_real_from_real (type, arg1);
2483 else if (TREE_CODE (arg1) == FIXED_CST)
2484 return fold_convert_const_real_from_fixed (type, arg1);
2486 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2488 if (TREE_CODE (arg1) == FIXED_CST)
2489 return fold_convert_const_fixed_from_fixed (type, arg1);
2490 else if (TREE_CODE (arg1) == INTEGER_CST)
2491 return fold_convert_const_fixed_from_int (type, arg1);
2492 else if (TREE_CODE (arg1) == REAL_CST)
2493 return fold_convert_const_fixed_from_real (type, arg1);
2495 return NULL_TREE;
2498 /* Construct a vector of zero elements of vector type TYPE. */
2500 static tree
2501 build_zero_vector (tree type)
2503 tree elem, list;
2504 int i, units;
2506 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2507 units = TYPE_VECTOR_SUBPARTS (type);
2509 list = NULL_TREE;
2510 for (i = 0; i < units; i++)
2511 list = tree_cons (NULL_TREE, elem, list);
2512 return build_vector (type, list);
2515 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2517 bool
2518 fold_convertible_p (const_tree type, const_tree arg)
2520 tree orig = TREE_TYPE (arg);
2522 if (type == orig)
2523 return true;
2525 if (TREE_CODE (arg) == ERROR_MARK
2526 || TREE_CODE (type) == ERROR_MARK
2527 || TREE_CODE (orig) == ERROR_MARK)
2528 return false;
2530 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2531 return true;
2533 switch (TREE_CODE (type))
2535 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2536 case POINTER_TYPE: case REFERENCE_TYPE:
2537 case OFFSET_TYPE:
2538 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2539 || TREE_CODE (orig) == OFFSET_TYPE)
2540 return true;
2541 return (TREE_CODE (orig) == VECTOR_TYPE
2542 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2544 case REAL_TYPE:
2545 case FIXED_POINT_TYPE:
2546 case COMPLEX_TYPE:
2547 case VECTOR_TYPE:
2548 case VOID_TYPE:
2549 return TREE_CODE (type) == TREE_CODE (orig);
2551 default:
2552 return false;
2556 /* Convert expression ARG to type TYPE. Used by the middle-end for
2557 simple conversions in preference to calling the front-end's convert. */
2559 tree
2560 fold_convert (tree type, tree arg)
2562 tree orig = TREE_TYPE (arg);
2563 tree tem;
2565 if (type == orig)
2566 return arg;
2568 if (TREE_CODE (arg) == ERROR_MARK
2569 || TREE_CODE (type) == ERROR_MARK
2570 || TREE_CODE (orig) == ERROR_MARK)
2571 return error_mark_node;
2573 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2574 return fold_build1 (NOP_EXPR, type, arg);
2576 switch (TREE_CODE (type))
2578 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2579 case POINTER_TYPE: case REFERENCE_TYPE:
2580 case OFFSET_TYPE:
2581 if (TREE_CODE (arg) == INTEGER_CST)
2583 tem = fold_convert_const (NOP_EXPR, type, arg);
2584 if (tem != NULL_TREE)
2585 return tem;
2587 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2588 || TREE_CODE (orig) == OFFSET_TYPE)
2589 return fold_build1 (NOP_EXPR, type, arg);
2590 if (TREE_CODE (orig) == COMPLEX_TYPE)
2592 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2593 return fold_convert (type, tem);
2595 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2596 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2597 return fold_build1 (NOP_EXPR, type, arg);
2599 case REAL_TYPE:
2600 if (TREE_CODE (arg) == INTEGER_CST)
2602 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2603 if (tem != NULL_TREE)
2604 return tem;
2606 else if (TREE_CODE (arg) == REAL_CST)
2608 tem = fold_convert_const (NOP_EXPR, type, arg);
2609 if (tem != NULL_TREE)
2610 return tem;
2612 else if (TREE_CODE (arg) == FIXED_CST)
2614 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2615 if (tem != NULL_TREE)
2616 return tem;
2619 switch (TREE_CODE (orig))
2621 case INTEGER_TYPE:
2622 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2623 case POINTER_TYPE: case REFERENCE_TYPE:
2624 return fold_build1 (FLOAT_EXPR, type, arg);
2626 case REAL_TYPE:
2627 return fold_build1 (NOP_EXPR, type, arg);
2629 case FIXED_POINT_TYPE:
2630 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2632 case COMPLEX_TYPE:
2633 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2634 return fold_convert (type, tem);
2636 default:
2637 gcc_unreachable ();
2640 case FIXED_POINT_TYPE:
2641 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2642 || TREE_CODE (arg) == REAL_CST)
2644 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2645 if (tem != NULL_TREE)
2646 return tem;
2649 switch (TREE_CODE (orig))
2651 case FIXED_POINT_TYPE:
2652 case INTEGER_TYPE:
2653 case ENUMERAL_TYPE:
2654 case BOOLEAN_TYPE:
2655 case REAL_TYPE:
2656 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2658 case COMPLEX_TYPE:
2659 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2660 return fold_convert (type, tem);
2662 default:
2663 gcc_unreachable ();
2666 case COMPLEX_TYPE:
2667 switch (TREE_CODE (orig))
2669 case INTEGER_TYPE:
2670 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2671 case POINTER_TYPE: case REFERENCE_TYPE:
2672 case REAL_TYPE:
2673 case FIXED_POINT_TYPE:
2674 return fold_build2 (COMPLEX_EXPR, type,
2675 fold_convert (TREE_TYPE (type), arg),
2676 fold_convert (TREE_TYPE (type),
2677 integer_zero_node));
2678 case COMPLEX_TYPE:
2680 tree rpart, ipart;
2682 if (TREE_CODE (arg) == COMPLEX_EXPR)
2684 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2685 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2686 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2689 arg = save_expr (arg);
2690 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2691 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2692 rpart = fold_convert (TREE_TYPE (type), rpart);
2693 ipart = fold_convert (TREE_TYPE (type), ipart);
2694 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2697 default:
2698 gcc_unreachable ();
2701 case VECTOR_TYPE:
2702 if (integer_zerop (arg))
2703 return build_zero_vector (type);
2704 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2705 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2706 || TREE_CODE (orig) == VECTOR_TYPE);
2707 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2709 case VOID_TYPE:
2710 tem = fold_ignored_result (arg);
2711 if (TREE_CODE (tem) == MODIFY_EXPR)
2712 return tem;
2713 return fold_build1 (NOP_EXPR, type, tem);
2715 default:
2716 gcc_unreachable ();
2720 /* Return false if expr can be assumed not to be an lvalue, true
2721 otherwise. */
2723 static bool
2724 maybe_lvalue_p (const_tree x)
2726 /* We only need to wrap lvalue tree codes. */
2727 switch (TREE_CODE (x))
2729 case VAR_DECL:
2730 case PARM_DECL:
2731 case RESULT_DECL:
2732 case LABEL_DECL:
2733 case FUNCTION_DECL:
2734 case SSA_NAME:
2736 case COMPONENT_REF:
2737 case INDIRECT_REF:
2738 case ALIGN_INDIRECT_REF:
2739 case MISALIGNED_INDIRECT_REF:
2740 case ARRAY_REF:
2741 case ARRAY_RANGE_REF:
2742 case BIT_FIELD_REF:
2743 case OBJ_TYPE_REF:
2745 case REALPART_EXPR:
2746 case IMAGPART_EXPR:
2747 case PREINCREMENT_EXPR:
2748 case PREDECREMENT_EXPR:
2749 case SAVE_EXPR:
2750 case TRY_CATCH_EXPR:
2751 case WITH_CLEANUP_EXPR:
2752 case COMPOUND_EXPR:
2753 case MODIFY_EXPR:
2754 case TARGET_EXPR:
2755 case COND_EXPR:
2756 case BIND_EXPR:
2757 case MIN_EXPR:
2758 case MAX_EXPR:
2759 break;
2761 default:
2762 /* Assume the worst for front-end tree codes. */
2763 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2764 break;
2765 return false;
2768 return true;
2771 /* Return an expr equal to X but certainly not valid as an lvalue. */
2773 tree
2774 non_lvalue (tree x)
2776 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2777 us. */
2778 if (in_gimple_form)
2779 return x;
2781 if (! maybe_lvalue_p (x))
2782 return x;
2783 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2786 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2787 Zero means allow extended lvalues. */
2789 int pedantic_lvalues;
2791 /* When pedantic, return an expr equal to X but certainly not valid as a
2792 pedantic lvalue. Otherwise, return X. */
2794 static tree
2795 pedantic_non_lvalue (tree x)
2797 if (pedantic_lvalues)
2798 return non_lvalue (x);
2799 else
2800 return x;
2803 /* Given a tree comparison code, return the code that is the logical inverse
2804 of the given code. It is not safe to do this for floating-point
2805 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2806 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2808 enum tree_code
2809 invert_tree_comparison (enum tree_code code, bool honor_nans)
2811 if (honor_nans && flag_trapping_math)
2812 return ERROR_MARK;
2814 switch (code)
2816 case EQ_EXPR:
2817 return NE_EXPR;
2818 case NE_EXPR:
2819 return EQ_EXPR;
2820 case GT_EXPR:
2821 return honor_nans ? UNLE_EXPR : LE_EXPR;
2822 case GE_EXPR:
2823 return honor_nans ? UNLT_EXPR : LT_EXPR;
2824 case LT_EXPR:
2825 return honor_nans ? UNGE_EXPR : GE_EXPR;
2826 case LE_EXPR:
2827 return honor_nans ? UNGT_EXPR : GT_EXPR;
2828 case LTGT_EXPR:
2829 return UNEQ_EXPR;
2830 case UNEQ_EXPR:
2831 return LTGT_EXPR;
2832 case UNGT_EXPR:
2833 return LE_EXPR;
2834 case UNGE_EXPR:
2835 return LT_EXPR;
2836 case UNLT_EXPR:
2837 return GE_EXPR;
2838 case UNLE_EXPR:
2839 return GT_EXPR;
2840 case ORDERED_EXPR:
2841 return UNORDERED_EXPR;
2842 case UNORDERED_EXPR:
2843 return ORDERED_EXPR;
2844 default:
2845 gcc_unreachable ();
2849 /* Similar, but return the comparison that results if the operands are
2850 swapped. This is safe for floating-point. */
2852 enum tree_code
2853 swap_tree_comparison (enum tree_code code)
2855 switch (code)
2857 case EQ_EXPR:
2858 case NE_EXPR:
2859 case ORDERED_EXPR:
2860 case UNORDERED_EXPR:
2861 case LTGT_EXPR:
2862 case UNEQ_EXPR:
2863 return code;
2864 case GT_EXPR:
2865 return LT_EXPR;
2866 case GE_EXPR:
2867 return LE_EXPR;
2868 case LT_EXPR:
2869 return GT_EXPR;
2870 case LE_EXPR:
2871 return GE_EXPR;
2872 case UNGT_EXPR:
2873 return UNLT_EXPR;
2874 case UNGE_EXPR:
2875 return UNLE_EXPR;
2876 case UNLT_EXPR:
2877 return UNGT_EXPR;
2878 case UNLE_EXPR:
2879 return UNGE_EXPR;
2880 default:
2881 gcc_unreachable ();
2886 /* Convert a comparison tree code from an enum tree_code representation
2887 into a compcode bit-based encoding. This function is the inverse of
2888 compcode_to_comparison. */
2890 static enum comparison_code
2891 comparison_to_compcode (enum tree_code code)
2893 switch (code)
2895 case LT_EXPR:
2896 return COMPCODE_LT;
2897 case EQ_EXPR:
2898 return COMPCODE_EQ;
2899 case LE_EXPR:
2900 return COMPCODE_LE;
2901 case GT_EXPR:
2902 return COMPCODE_GT;
2903 case NE_EXPR:
2904 return COMPCODE_NE;
2905 case GE_EXPR:
2906 return COMPCODE_GE;
2907 case ORDERED_EXPR:
2908 return COMPCODE_ORD;
2909 case UNORDERED_EXPR:
2910 return COMPCODE_UNORD;
2911 case UNLT_EXPR:
2912 return COMPCODE_UNLT;
2913 case UNEQ_EXPR:
2914 return COMPCODE_UNEQ;
2915 case UNLE_EXPR:
2916 return COMPCODE_UNLE;
2917 case UNGT_EXPR:
2918 return COMPCODE_UNGT;
2919 case LTGT_EXPR:
2920 return COMPCODE_LTGT;
2921 case UNGE_EXPR:
2922 return COMPCODE_UNGE;
2923 default:
2924 gcc_unreachable ();
2928 /* Convert a compcode bit-based encoding of a comparison operator back
2929 to GCC's enum tree_code representation. This function is the
2930 inverse of comparison_to_compcode. */
2932 static enum tree_code
2933 compcode_to_comparison (enum comparison_code code)
2935 switch (code)
2937 case COMPCODE_LT:
2938 return LT_EXPR;
2939 case COMPCODE_EQ:
2940 return EQ_EXPR;
2941 case COMPCODE_LE:
2942 return LE_EXPR;
2943 case COMPCODE_GT:
2944 return GT_EXPR;
2945 case COMPCODE_NE:
2946 return NE_EXPR;
2947 case COMPCODE_GE:
2948 return GE_EXPR;
2949 case COMPCODE_ORD:
2950 return ORDERED_EXPR;
2951 case COMPCODE_UNORD:
2952 return UNORDERED_EXPR;
2953 case COMPCODE_UNLT:
2954 return UNLT_EXPR;
2955 case COMPCODE_UNEQ:
2956 return UNEQ_EXPR;
2957 case COMPCODE_UNLE:
2958 return UNLE_EXPR;
2959 case COMPCODE_UNGT:
2960 return UNGT_EXPR;
2961 case COMPCODE_LTGT:
2962 return LTGT_EXPR;
2963 case COMPCODE_UNGE:
2964 return UNGE_EXPR;
2965 default:
2966 gcc_unreachable ();
2970 /* Return a tree for the comparison which is the combination of
2971 doing the AND or OR (depending on CODE) of the two operations LCODE
2972 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2973 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2974 if this makes the transformation invalid. */
2976 tree
2977 combine_comparisons (enum tree_code code, enum tree_code lcode,
2978 enum tree_code rcode, tree truth_type,
2979 tree ll_arg, tree lr_arg)
2981 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2982 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2983 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2984 int compcode;
2986 switch (code)
2988 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2989 compcode = lcompcode & rcompcode;
2990 break;
2992 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2993 compcode = lcompcode | rcompcode;
2994 break;
2996 default:
2997 return NULL_TREE;
3000 if (!honor_nans)
3002 /* Eliminate unordered comparisons, as well as LTGT and ORD
3003 which are not used unless the mode has NaNs. */
3004 compcode &= ~COMPCODE_UNORD;
3005 if (compcode == COMPCODE_LTGT)
3006 compcode = COMPCODE_NE;
3007 else if (compcode == COMPCODE_ORD)
3008 compcode = COMPCODE_TRUE;
3010 else if (flag_trapping_math)
3012 /* Check that the original operation and the optimized ones will trap
3013 under the same condition. */
3014 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
3015 && (lcompcode != COMPCODE_EQ)
3016 && (lcompcode != COMPCODE_ORD);
3017 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3018 && (rcompcode != COMPCODE_EQ)
3019 && (rcompcode != COMPCODE_ORD);
3020 bool trap = (compcode & COMPCODE_UNORD) == 0
3021 && (compcode != COMPCODE_EQ)
3022 && (compcode != COMPCODE_ORD);
3024 /* In a short-circuited boolean expression the LHS might be
3025 such that the RHS, if evaluated, will never trap. For
3026 example, in ORD (x, y) && (x < y), we evaluate the RHS only
3027 if neither x nor y is NaN. (This is a mixed blessing: for
3028 example, the expression above will never trap, hence
3029 optimizing it to x < y would be invalid). */
3030 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3031 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3032 rtrap = false;
3034 /* If the comparison was short-circuited, and only the RHS
3035 trapped, we may now generate a spurious trap. */
3036 if (rtrap && !ltrap
3037 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3038 return NULL_TREE;
3040 /* If we changed the conditions that cause a trap, we lose. */
3041 if ((ltrap || rtrap) != trap)
3042 return NULL_TREE;
3045 if (compcode == COMPCODE_TRUE)
3046 return constant_boolean_node (true, truth_type);
3047 else if (compcode == COMPCODE_FALSE)
3048 return constant_boolean_node (false, truth_type);
3049 else
3051 enum tree_code tcode;
3053 tcode = compcode_to_comparison ((enum comparison_code) compcode);
3054 return fold_build2 (tcode, truth_type, ll_arg, lr_arg);
3058 /* Return nonzero if two operands (typically of the same tree node)
3059 are necessarily equal. If either argument has side-effects this
3060 function returns zero. FLAGS modifies behavior as follows:
3062 If OEP_ONLY_CONST is set, only return nonzero for constants.
3063 This function tests whether the operands are indistinguishable;
3064 it does not test whether they are equal using C's == operation.
3065 The distinction is important for IEEE floating point, because
3066 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3067 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3069 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3070 even though it may hold multiple values during a function.
3071 This is because a GCC tree node guarantees that nothing else is
3072 executed between the evaluation of its "operands" (which may often
3073 be evaluated in arbitrary order). Hence if the operands themselves
3074 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3075 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3076 unset means assuming isochronic (or instantaneous) tree equivalence.
3077 Unless comparing arbitrary expression trees, such as from different
3078 statements, this flag can usually be left unset.
3080 If OEP_PURE_SAME is set, then pure functions with identical arguments
3081 are considered the same. It is used when the caller has other ways
3082 to ensure that global memory is unchanged in between. */
3085 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3087 /* If either is ERROR_MARK, they aren't equal. */
3088 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3089 return 0;
3091 /* Check equality of integer constants before bailing out due to
3092 precision differences. */
3093 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3094 return tree_int_cst_equal (arg0, arg1);
3096 /* If both types don't have the same signedness, then we can't consider
3097 them equal. We must check this before the STRIP_NOPS calls
3098 because they may change the signedness of the arguments. As pointers
3099 strictly don't have a signedness, require either two pointers or
3100 two non-pointers as well. */
3101 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3102 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3103 return 0;
3105 /* If both types don't have the same precision, then it is not safe
3106 to strip NOPs. */
3107 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3108 return 0;
3110 STRIP_NOPS (arg0);
3111 STRIP_NOPS (arg1);
3113 /* In case both args are comparisons but with different comparison
3114 code, try to swap the comparison operands of one arg to produce
3115 a match and compare that variant. */
3116 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3117 && COMPARISON_CLASS_P (arg0)
3118 && COMPARISON_CLASS_P (arg1))
3120 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3122 if (TREE_CODE (arg0) == swap_code)
3123 return operand_equal_p (TREE_OPERAND (arg0, 0),
3124 TREE_OPERAND (arg1, 1), flags)
3125 && operand_equal_p (TREE_OPERAND (arg0, 1),
3126 TREE_OPERAND (arg1, 0), flags);
3129 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3130 /* This is needed for conversions and for COMPONENT_REF.
3131 Might as well play it safe and always test this. */
3132 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3133 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3134 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3135 return 0;
3137 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3138 We don't care about side effects in that case because the SAVE_EXPR
3139 takes care of that for us. In all other cases, two expressions are
3140 equal if they have no side effects. If we have two identical
3141 expressions with side effects that should be treated the same due
3142 to the only side effects being identical SAVE_EXPR's, that will
3143 be detected in the recursive calls below. */
3144 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3145 && (TREE_CODE (arg0) == SAVE_EXPR
3146 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3147 return 1;
3149 /* Next handle constant cases, those for which we can return 1 even
3150 if ONLY_CONST is set. */
3151 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3152 switch (TREE_CODE (arg0))
3154 case INTEGER_CST:
3155 return tree_int_cst_equal (arg0, arg1);
3157 case FIXED_CST:
3158 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3159 TREE_FIXED_CST (arg1));
3161 case REAL_CST:
3162 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3163 TREE_REAL_CST (arg1)))
3164 return 1;
3167 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3169 /* If we do not distinguish between signed and unsigned zero,
3170 consider them equal. */
3171 if (real_zerop (arg0) && real_zerop (arg1))
3172 return 1;
3174 return 0;
3176 case VECTOR_CST:
3178 tree v1, v2;
3180 v1 = TREE_VECTOR_CST_ELTS (arg0);
3181 v2 = TREE_VECTOR_CST_ELTS (arg1);
3182 while (v1 && v2)
3184 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3185 flags))
3186 return 0;
3187 v1 = TREE_CHAIN (v1);
3188 v2 = TREE_CHAIN (v2);
3191 return v1 == v2;
3194 case COMPLEX_CST:
3195 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3196 flags)
3197 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3198 flags));
3200 case STRING_CST:
3201 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3202 && ! memcmp (TREE_STRING_POINTER (arg0),
3203 TREE_STRING_POINTER (arg1),
3204 TREE_STRING_LENGTH (arg0)));
3206 case ADDR_EXPR:
3207 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3209 default:
3210 break;
3213 if (flags & OEP_ONLY_CONST)
3214 return 0;
3216 /* Define macros to test an operand from arg0 and arg1 for equality and a
3217 variant that allows null and views null as being different from any
3218 non-null value. In the latter case, if either is null, the both
3219 must be; otherwise, do the normal comparison. */
3220 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3221 TREE_OPERAND (arg1, N), flags)
3223 #define OP_SAME_WITH_NULL(N) \
3224 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3225 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3227 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3229 case tcc_unary:
3230 /* Two conversions are equal only if signedness and modes match. */
3231 switch (TREE_CODE (arg0))
3233 CASE_CONVERT:
3234 case FIX_TRUNC_EXPR:
3235 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3236 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3237 return 0;
3238 break;
3239 default:
3240 break;
3243 return OP_SAME (0);
3246 case tcc_comparison:
3247 case tcc_binary:
3248 if (OP_SAME (0) && OP_SAME (1))
3249 return 1;
3251 /* For commutative ops, allow the other order. */
3252 return (commutative_tree_code (TREE_CODE (arg0))
3253 && operand_equal_p (TREE_OPERAND (arg0, 0),
3254 TREE_OPERAND (arg1, 1), flags)
3255 && operand_equal_p (TREE_OPERAND (arg0, 1),
3256 TREE_OPERAND (arg1, 0), flags));
3258 case tcc_reference:
3259 /* If either of the pointer (or reference) expressions we are
3260 dereferencing contain a side effect, these cannot be equal. */
3261 if (TREE_SIDE_EFFECTS (arg0)
3262 || TREE_SIDE_EFFECTS (arg1))
3263 return 0;
3265 switch (TREE_CODE (arg0))
3267 case INDIRECT_REF:
3268 case ALIGN_INDIRECT_REF:
3269 case MISALIGNED_INDIRECT_REF:
3270 case REALPART_EXPR:
3271 case IMAGPART_EXPR:
3272 return OP_SAME (0);
3274 case ARRAY_REF:
3275 case ARRAY_RANGE_REF:
3276 /* Operands 2 and 3 may be null.
3277 Compare the array index by value if it is constant first as we
3278 may have different types but same value here. */
3279 return (OP_SAME (0)
3280 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3281 TREE_OPERAND (arg1, 1))
3282 || OP_SAME (1))
3283 && OP_SAME_WITH_NULL (2)
3284 && OP_SAME_WITH_NULL (3));
3286 case COMPONENT_REF:
3287 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3288 may be NULL when we're called to compare MEM_EXPRs. */
3289 return OP_SAME_WITH_NULL (0)
3290 && OP_SAME (1)
3291 && OP_SAME_WITH_NULL (2);
3293 case BIT_FIELD_REF:
3294 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3296 default:
3297 return 0;
3300 case tcc_expression:
3301 switch (TREE_CODE (arg0))
3303 case ADDR_EXPR:
3304 case TRUTH_NOT_EXPR:
3305 return OP_SAME (0);
3307 case TRUTH_ANDIF_EXPR:
3308 case TRUTH_ORIF_EXPR:
3309 return OP_SAME (0) && OP_SAME (1);
3311 case TRUTH_AND_EXPR:
3312 case TRUTH_OR_EXPR:
3313 case TRUTH_XOR_EXPR:
3314 if (OP_SAME (0) && OP_SAME (1))
3315 return 1;
3317 /* Otherwise take into account this is a commutative operation. */
3318 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3319 TREE_OPERAND (arg1, 1), flags)
3320 && operand_equal_p (TREE_OPERAND (arg0, 1),
3321 TREE_OPERAND (arg1, 0), flags));
3323 case COND_EXPR:
3324 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3326 default:
3327 return 0;
3330 case tcc_vl_exp:
3331 switch (TREE_CODE (arg0))
3333 case CALL_EXPR:
3334 /* If the CALL_EXPRs call different functions, then they
3335 clearly can not be equal. */
3336 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3337 flags))
3338 return 0;
3341 unsigned int cef = call_expr_flags (arg0);
3342 if (flags & OEP_PURE_SAME)
3343 cef &= ECF_CONST | ECF_PURE;
3344 else
3345 cef &= ECF_CONST;
3346 if (!cef)
3347 return 0;
3350 /* Now see if all the arguments are the same. */
3352 const_call_expr_arg_iterator iter0, iter1;
3353 const_tree a0, a1;
3354 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3355 a1 = first_const_call_expr_arg (arg1, &iter1);
3356 a0 && a1;
3357 a0 = next_const_call_expr_arg (&iter0),
3358 a1 = next_const_call_expr_arg (&iter1))
3359 if (! operand_equal_p (a0, a1, flags))
3360 return 0;
3362 /* If we get here and both argument lists are exhausted
3363 then the CALL_EXPRs are equal. */
3364 return ! (a0 || a1);
3366 default:
3367 return 0;
3370 case tcc_declaration:
3371 /* Consider __builtin_sqrt equal to sqrt. */
3372 return (TREE_CODE (arg0) == FUNCTION_DECL
3373 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3374 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3375 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3377 default:
3378 return 0;
3381 #undef OP_SAME
3382 #undef OP_SAME_WITH_NULL
3385 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3386 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3388 When in doubt, return 0. */
3390 static int
3391 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3393 int unsignedp1, unsignedpo;
3394 tree primarg0, primarg1, primother;
3395 unsigned int correct_width;
3397 if (operand_equal_p (arg0, arg1, 0))
3398 return 1;
3400 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3401 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3402 return 0;
3404 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3405 and see if the inner values are the same. This removes any
3406 signedness comparison, which doesn't matter here. */
3407 primarg0 = arg0, primarg1 = arg1;
3408 STRIP_NOPS (primarg0);
3409 STRIP_NOPS (primarg1);
3410 if (operand_equal_p (primarg0, primarg1, 0))
3411 return 1;
3413 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3414 actual comparison operand, ARG0.
3416 First throw away any conversions to wider types
3417 already present in the operands. */
3419 primarg1 = get_narrower (arg1, &unsignedp1);
3420 primother = get_narrower (other, &unsignedpo);
3422 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3423 if (unsignedp1 == unsignedpo
3424 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3425 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3427 tree type = TREE_TYPE (arg0);
3429 /* Make sure shorter operand is extended the right way
3430 to match the longer operand. */
3431 primarg1 = fold_convert (signed_or_unsigned_type_for
3432 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3434 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3435 return 1;
3438 return 0;
3441 /* See if ARG is an expression that is either a comparison or is performing
3442 arithmetic on comparisons. The comparisons must only be comparing
3443 two different values, which will be stored in *CVAL1 and *CVAL2; if
3444 they are nonzero it means that some operands have already been found.
3445 No variables may be used anywhere else in the expression except in the
3446 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3447 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3449 If this is true, return 1. Otherwise, return zero. */
3451 static int
3452 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3454 enum tree_code code = TREE_CODE (arg);
3455 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3457 /* We can handle some of the tcc_expression cases here. */
3458 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3459 tclass = tcc_unary;
3460 else if (tclass == tcc_expression
3461 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3462 || code == COMPOUND_EXPR))
3463 tclass = tcc_binary;
3465 else if (tclass == tcc_expression && code == SAVE_EXPR
3466 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3468 /* If we've already found a CVAL1 or CVAL2, this expression is
3469 two complex to handle. */
3470 if (*cval1 || *cval2)
3471 return 0;
3473 tclass = tcc_unary;
3474 *save_p = 1;
3477 switch (tclass)
3479 case tcc_unary:
3480 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3482 case tcc_binary:
3483 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3484 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3485 cval1, cval2, save_p));
3487 case tcc_constant:
3488 return 1;
3490 case tcc_expression:
3491 if (code == COND_EXPR)
3492 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3493 cval1, cval2, save_p)
3494 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3495 cval1, cval2, save_p)
3496 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3497 cval1, cval2, save_p));
3498 return 0;
3500 case tcc_comparison:
3501 /* First see if we can handle the first operand, then the second. For
3502 the second operand, we know *CVAL1 can't be zero. It must be that
3503 one side of the comparison is each of the values; test for the
3504 case where this isn't true by failing if the two operands
3505 are the same. */
3507 if (operand_equal_p (TREE_OPERAND (arg, 0),
3508 TREE_OPERAND (arg, 1), 0))
3509 return 0;
3511 if (*cval1 == 0)
3512 *cval1 = TREE_OPERAND (arg, 0);
3513 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3515 else if (*cval2 == 0)
3516 *cval2 = TREE_OPERAND (arg, 0);
3517 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3519 else
3520 return 0;
3522 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3524 else if (*cval2 == 0)
3525 *cval2 = TREE_OPERAND (arg, 1);
3526 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3528 else
3529 return 0;
3531 return 1;
3533 default:
3534 return 0;
3538 /* ARG is a tree that is known to contain just arithmetic operations and
3539 comparisons. Evaluate the operations in the tree substituting NEW0 for
3540 any occurrence of OLD0 as an operand of a comparison and likewise for
3541 NEW1 and OLD1. */
3543 static tree
3544 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3546 tree type = TREE_TYPE (arg);
3547 enum tree_code code = TREE_CODE (arg);
3548 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3550 /* We can handle some of the tcc_expression cases here. */
3551 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3552 tclass = tcc_unary;
3553 else if (tclass == tcc_expression
3554 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3555 tclass = tcc_binary;
3557 switch (tclass)
3559 case tcc_unary:
3560 return fold_build1 (code, type,
3561 eval_subst (TREE_OPERAND (arg, 0),
3562 old0, new0, old1, new1));
3564 case tcc_binary:
3565 return fold_build2 (code, type,
3566 eval_subst (TREE_OPERAND (arg, 0),
3567 old0, new0, old1, new1),
3568 eval_subst (TREE_OPERAND (arg, 1),
3569 old0, new0, old1, new1));
3571 case tcc_expression:
3572 switch (code)
3574 case SAVE_EXPR:
3575 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3577 case COMPOUND_EXPR:
3578 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3580 case COND_EXPR:
3581 return fold_build3 (code, type,
3582 eval_subst (TREE_OPERAND (arg, 0),
3583 old0, new0, old1, new1),
3584 eval_subst (TREE_OPERAND (arg, 1),
3585 old0, new0, old1, new1),
3586 eval_subst (TREE_OPERAND (arg, 2),
3587 old0, new0, old1, new1));
3588 default:
3589 break;
3591 /* Fall through - ??? */
3593 case tcc_comparison:
3595 tree arg0 = TREE_OPERAND (arg, 0);
3596 tree arg1 = TREE_OPERAND (arg, 1);
3598 /* We need to check both for exact equality and tree equality. The
3599 former will be true if the operand has a side-effect. In that
3600 case, we know the operand occurred exactly once. */
3602 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3603 arg0 = new0;
3604 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3605 arg0 = new1;
3607 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3608 arg1 = new0;
3609 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3610 arg1 = new1;
3612 return fold_build2 (code, type, arg0, arg1);
3615 default:
3616 return arg;
3620 /* Return a tree for the case when the result of an expression is RESULT
3621 converted to TYPE and OMITTED was previously an operand of the expression
3622 but is now not needed (e.g., we folded OMITTED * 0).
3624 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3625 the conversion of RESULT to TYPE. */
3627 tree
3628 omit_one_operand (tree type, tree result, tree omitted)
3630 tree t = fold_convert (type, result);
3632 /* If the resulting operand is an empty statement, just return the omitted
3633 statement casted to void. */
3634 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3635 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3637 if (TREE_SIDE_EFFECTS (omitted))
3638 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3640 return non_lvalue (t);
3643 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3645 static tree
3646 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3648 tree t = fold_convert (type, result);
3650 /* If the resulting operand is an empty statement, just return the omitted
3651 statement casted to void. */
3652 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3653 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3655 if (TREE_SIDE_EFFECTS (omitted))
3656 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3658 return pedantic_non_lvalue (t);
3661 /* Return a tree for the case when the result of an expression is RESULT
3662 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3663 of the expression but are now not needed.
3665 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3666 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3667 evaluated before OMITTED2. Otherwise, if neither has side effects,
3668 just do the conversion of RESULT to TYPE. */
3670 tree
3671 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3673 tree t = fold_convert (type, result);
3675 if (TREE_SIDE_EFFECTS (omitted2))
3676 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3677 if (TREE_SIDE_EFFECTS (omitted1))
3678 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3680 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3684 /* Return a simplified tree node for the truth-negation of ARG. This
3685 never alters ARG itself. We assume that ARG is an operation that
3686 returns a truth value (0 or 1).
3688 FIXME: one would think we would fold the result, but it causes
3689 problems with the dominator optimizer. */
3691 tree
3692 fold_truth_not_expr (tree arg)
3694 tree t, type = TREE_TYPE (arg);
3695 enum tree_code code = TREE_CODE (arg);
3697 /* If this is a comparison, we can simply invert it, except for
3698 floating-point non-equality comparisons, in which case we just
3699 enclose a TRUTH_NOT_EXPR around what we have. */
3701 if (TREE_CODE_CLASS (code) == tcc_comparison)
3703 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3704 if (FLOAT_TYPE_P (op_type)
3705 && flag_trapping_math
3706 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3707 && code != NE_EXPR && code != EQ_EXPR)
3708 return NULL_TREE;
3710 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3711 if (code == ERROR_MARK)
3712 return NULL_TREE;
3714 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3715 if (EXPR_HAS_LOCATION (arg))
3716 SET_EXPR_LOCATION (t, EXPR_LOCATION (arg));
3717 return t;
3720 switch (code)
3722 case INTEGER_CST:
3723 return constant_boolean_node (integer_zerop (arg), type);
3725 case TRUTH_AND_EXPR:
3726 t = build2 (TRUTH_OR_EXPR, type,
3727 invert_truthvalue (TREE_OPERAND (arg, 0)),
3728 invert_truthvalue (TREE_OPERAND (arg, 1)));
3729 break;
3731 case TRUTH_OR_EXPR:
3732 t = build2 (TRUTH_AND_EXPR, type,
3733 invert_truthvalue (TREE_OPERAND (arg, 0)),
3734 invert_truthvalue (TREE_OPERAND (arg, 1)));
3735 break;
3737 case TRUTH_XOR_EXPR:
3738 /* Here we can invert either operand. We invert the first operand
3739 unless the second operand is a TRUTH_NOT_EXPR in which case our
3740 result is the XOR of the first operand with the inside of the
3741 negation of the second operand. */
3743 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3744 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3745 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3746 else
3747 t = build2 (TRUTH_XOR_EXPR, type,
3748 invert_truthvalue (TREE_OPERAND (arg, 0)),
3749 TREE_OPERAND (arg, 1));
3750 break;
3752 case TRUTH_ANDIF_EXPR:
3753 t = build2 (TRUTH_ORIF_EXPR, type,
3754 invert_truthvalue (TREE_OPERAND (arg, 0)),
3755 invert_truthvalue (TREE_OPERAND (arg, 1)));
3756 break;
3758 case TRUTH_ORIF_EXPR:
3759 t = build2 (TRUTH_ANDIF_EXPR, type,
3760 invert_truthvalue (TREE_OPERAND (arg, 0)),
3761 invert_truthvalue (TREE_OPERAND (arg, 1)));
3762 break;
3764 case TRUTH_NOT_EXPR:
3765 return TREE_OPERAND (arg, 0);
3767 case COND_EXPR:
3769 tree arg1 = TREE_OPERAND (arg, 1);
3770 tree arg2 = TREE_OPERAND (arg, 2);
3771 /* A COND_EXPR may have a throw as one operand, which
3772 then has void type. Just leave void operands
3773 as they are. */
3774 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3775 VOID_TYPE_P (TREE_TYPE (arg1))
3776 ? arg1 : invert_truthvalue (arg1),
3777 VOID_TYPE_P (TREE_TYPE (arg2))
3778 ? arg2 : invert_truthvalue (arg2));
3779 break;
3782 case COMPOUND_EXPR:
3783 t = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3784 invert_truthvalue (TREE_OPERAND (arg, 1)));
3785 break;
3787 case NON_LVALUE_EXPR:
3788 return invert_truthvalue (TREE_OPERAND (arg, 0));
3790 CASE_CONVERT:
3791 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3793 t = build1 (TRUTH_NOT_EXPR, type, arg);
3794 break;
3797 /* ... fall through ... */
3799 case FLOAT_EXPR:
3800 t = build1 (TREE_CODE (arg), type,
3801 invert_truthvalue (TREE_OPERAND (arg, 0)));
3802 break;
3804 case BIT_AND_EXPR:
3805 if (!integer_onep (TREE_OPERAND (arg, 1)))
3806 return NULL_TREE;
3807 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3808 break;
3810 case SAVE_EXPR:
3811 t = build1 (TRUTH_NOT_EXPR, type, arg);
3812 break;
3814 case CLEANUP_POINT_EXPR:
3815 t = build1 (CLEANUP_POINT_EXPR, type,
3816 invert_truthvalue (TREE_OPERAND (arg, 0)));
3817 break;
3819 default:
3820 t = NULL_TREE;
3821 break;
3824 if (t && EXPR_HAS_LOCATION (arg))
3825 SET_EXPR_LOCATION (t, EXPR_LOCATION (arg));
3827 return t;
3830 /* Return a simplified tree node for the truth-negation of ARG. This
3831 never alters ARG itself. We assume that ARG is an operation that
3832 returns a truth value (0 or 1).
3834 FIXME: one would think we would fold the result, but it causes
3835 problems with the dominator optimizer. */
3837 tree
3838 invert_truthvalue (tree arg)
3840 tree tem;
3842 if (TREE_CODE (arg) == ERROR_MARK)
3843 return arg;
3845 tem = fold_truth_not_expr (arg);
3846 if (!tem)
3847 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3849 return tem;
3852 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3853 operands are another bit-wise operation with a common input. If so,
3854 distribute the bit operations to save an operation and possibly two if
3855 constants are involved. For example, convert
3856 (A | B) & (A | C) into A | (B & C)
3857 Further simplification will occur if B and C are constants.
3859 If this optimization cannot be done, 0 will be returned. */
3861 static tree
3862 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3864 tree common;
3865 tree left, right;
3867 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3868 || TREE_CODE (arg0) == code
3869 || (TREE_CODE (arg0) != BIT_AND_EXPR
3870 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3871 return 0;
3873 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3875 common = TREE_OPERAND (arg0, 0);
3876 left = TREE_OPERAND (arg0, 1);
3877 right = TREE_OPERAND (arg1, 1);
3879 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3881 common = TREE_OPERAND (arg0, 0);
3882 left = TREE_OPERAND (arg0, 1);
3883 right = TREE_OPERAND (arg1, 0);
3885 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3887 common = TREE_OPERAND (arg0, 1);
3888 left = TREE_OPERAND (arg0, 0);
3889 right = TREE_OPERAND (arg1, 1);
3891 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3893 common = TREE_OPERAND (arg0, 1);
3894 left = TREE_OPERAND (arg0, 0);
3895 right = TREE_OPERAND (arg1, 0);
3897 else
3898 return 0;
3900 common = fold_convert (type, common);
3901 left = fold_convert (type, left);
3902 right = fold_convert (type, right);
3903 return fold_build2 (TREE_CODE (arg0), type, common,
3904 fold_build2 (code, type, left, right));
3907 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3908 with code CODE. This optimization is unsafe. */
3909 static tree
3910 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3912 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3913 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3915 /* (A / C) +- (B / C) -> (A +- B) / C. */
3916 if (mul0 == mul1
3917 && operand_equal_p (TREE_OPERAND (arg0, 1),
3918 TREE_OPERAND (arg1, 1), 0))
3919 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3920 fold_build2 (code, type,
3921 TREE_OPERAND (arg0, 0),
3922 TREE_OPERAND (arg1, 0)),
3923 TREE_OPERAND (arg0, 1));
3925 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3926 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3927 TREE_OPERAND (arg1, 0), 0)
3928 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3929 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3931 REAL_VALUE_TYPE r0, r1;
3932 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3933 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3934 if (!mul0)
3935 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3936 if (!mul1)
3937 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3938 real_arithmetic (&r0, code, &r0, &r1);
3939 return fold_build2 (MULT_EXPR, type,
3940 TREE_OPERAND (arg0, 0),
3941 build_real (type, r0));
3944 return NULL_TREE;
3947 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3948 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3950 static tree
3951 make_bit_field_ref (tree inner, tree type, HOST_WIDE_INT bitsize,
3952 HOST_WIDE_INT bitpos, int unsignedp)
3954 tree result, bftype;
3956 if (bitpos == 0)
3958 tree size = TYPE_SIZE (TREE_TYPE (inner));
3959 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3960 || POINTER_TYPE_P (TREE_TYPE (inner)))
3961 && host_integerp (size, 0)
3962 && tree_low_cst (size, 0) == bitsize)
3963 return fold_convert (type, inner);
3966 bftype = type;
3967 if (TYPE_PRECISION (bftype) != bitsize
3968 || TYPE_UNSIGNED (bftype) == !unsignedp)
3969 bftype = build_nonstandard_integer_type (bitsize, 0);
3971 result = build3 (BIT_FIELD_REF, bftype, inner,
3972 size_int (bitsize), bitsize_int (bitpos));
3974 if (bftype != type)
3975 result = fold_convert (type, result);
3977 return result;
3980 /* Optimize a bit-field compare.
3982 There are two cases: First is a compare against a constant and the
3983 second is a comparison of two items where the fields are at the same
3984 bit position relative to the start of a chunk (byte, halfword, word)
3985 large enough to contain it. In these cases we can avoid the shift
3986 implicit in bitfield extractions.
3988 For constants, we emit a compare of the shifted constant with the
3989 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3990 compared. For two fields at the same position, we do the ANDs with the
3991 similar mask and compare the result of the ANDs.
3993 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3994 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3995 are the left and right operands of the comparison, respectively.
3997 If the optimization described above can be done, we return the resulting
3998 tree. Otherwise we return zero. */
4000 static tree
4001 optimize_bit_field_compare (enum tree_code code, tree compare_type,
4002 tree lhs, tree rhs)
4004 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
4005 tree type = TREE_TYPE (lhs);
4006 tree signed_type, unsigned_type;
4007 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4008 enum machine_mode lmode, rmode, nmode;
4009 int lunsignedp, runsignedp;
4010 int lvolatilep = 0, rvolatilep = 0;
4011 tree linner, rinner = NULL_TREE;
4012 tree mask;
4013 tree offset;
4015 /* Get all the information about the extractions being done. If the bit size
4016 if the same as the size of the underlying object, we aren't doing an
4017 extraction at all and so can do nothing. We also don't want to
4018 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4019 then will no longer be able to replace it. */
4020 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
4021 &lunsignedp, &lvolatilep, false);
4022 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
4023 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
4024 return 0;
4026 if (!const_p)
4028 /* If this is not a constant, we can only do something if bit positions,
4029 sizes, and signedness are the same. */
4030 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4031 &runsignedp, &rvolatilep, false);
4033 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
4034 || lunsignedp != runsignedp || offset != 0
4035 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
4036 return 0;
4039 /* See if we can find a mode to refer to this field. We should be able to,
4040 but fail if we can't. */
4041 nmode = get_best_mode (lbitsize, lbitpos,
4042 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4043 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4044 TYPE_ALIGN (TREE_TYPE (rinner))),
4045 word_mode, lvolatilep || rvolatilep);
4046 if (nmode == VOIDmode)
4047 return 0;
4049 /* Set signed and unsigned types of the precision of this mode for the
4050 shifts below. */
4051 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
4052 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4054 /* Compute the bit position and size for the new reference and our offset
4055 within it. If the new reference is the same size as the original, we
4056 won't optimize anything, so return zero. */
4057 nbitsize = GET_MODE_BITSIZE (nmode);
4058 nbitpos = lbitpos & ~ (nbitsize - 1);
4059 lbitpos -= nbitpos;
4060 if (nbitsize == lbitsize)
4061 return 0;
4063 if (BYTES_BIG_ENDIAN)
4064 lbitpos = nbitsize - lbitsize - lbitpos;
4066 /* Make the mask to be used against the extracted field. */
4067 mask = build_int_cst_type (unsigned_type, -1);
4068 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
4069 mask = const_binop (RSHIFT_EXPR, mask,
4070 size_int (nbitsize - lbitsize - lbitpos), 0);
4072 if (! const_p)
4073 /* If not comparing with constant, just rework the comparison
4074 and return. */
4075 return fold_build2 (code, compare_type,
4076 fold_build2 (BIT_AND_EXPR, unsigned_type,
4077 make_bit_field_ref (linner,
4078 unsigned_type,
4079 nbitsize, nbitpos,
4081 mask),
4082 fold_build2 (BIT_AND_EXPR, unsigned_type,
4083 make_bit_field_ref (rinner,
4084 unsigned_type,
4085 nbitsize, nbitpos,
4087 mask));
4089 /* Otherwise, we are handling the constant case. See if the constant is too
4090 big for the field. Warn and return a tree of for 0 (false) if so. We do
4091 this not only for its own sake, but to avoid having to test for this
4092 error case below. If we didn't, we might generate wrong code.
4094 For unsigned fields, the constant shifted right by the field length should
4095 be all zero. For signed fields, the high-order bits should agree with
4096 the sign bit. */
4098 if (lunsignedp)
4100 if (! integer_zerop (const_binop (RSHIFT_EXPR,
4101 fold_convert (unsigned_type, rhs),
4102 size_int (lbitsize), 0)))
4104 warning (0, "comparison is always %d due to width of bit-field",
4105 code == NE_EXPR);
4106 return constant_boolean_node (code == NE_EXPR, compare_type);
4109 else
4111 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
4112 size_int (lbitsize - 1), 0);
4113 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4115 warning (0, "comparison is always %d due to width of bit-field",
4116 code == NE_EXPR);
4117 return constant_boolean_node (code == NE_EXPR, compare_type);
4121 /* Single-bit compares should always be against zero. */
4122 if (lbitsize == 1 && ! integer_zerop (rhs))
4124 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4125 rhs = build_int_cst (type, 0);
4128 /* Make a new bitfield reference, shift the constant over the
4129 appropriate number of bits and mask it with the computed mask
4130 (in case this was a signed field). If we changed it, make a new one. */
4131 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
4132 if (lvolatilep)
4134 TREE_SIDE_EFFECTS (lhs) = 1;
4135 TREE_THIS_VOLATILE (lhs) = 1;
4138 rhs = const_binop (BIT_AND_EXPR,
4139 const_binop (LSHIFT_EXPR,
4140 fold_convert (unsigned_type, rhs),
4141 size_int (lbitpos), 0),
4142 mask, 0);
4144 return build2 (code, compare_type,
4145 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4146 rhs);
4149 /* Subroutine for fold_truthop: decode a field reference.
4151 If EXP is a comparison reference, we return the innermost reference.
4153 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4154 set to the starting bit number.
4156 If the innermost field can be completely contained in a mode-sized
4157 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4159 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4160 otherwise it is not changed.
4162 *PUNSIGNEDP is set to the signedness of the field.
4164 *PMASK is set to the mask used. This is either contained in a
4165 BIT_AND_EXPR or derived from the width of the field.
4167 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4169 Return 0 if this is not a component reference or is one that we can't
4170 do anything with. */
4172 static tree
4173 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
4174 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4175 int *punsignedp, int *pvolatilep,
4176 tree *pmask, tree *pand_mask)
4178 tree outer_type = 0;
4179 tree and_mask = 0;
4180 tree mask, inner, offset;
4181 tree unsigned_type;
4182 unsigned int precision;
4184 /* All the optimizations using this function assume integer fields.
4185 There are problems with FP fields since the type_for_size call
4186 below can fail for, e.g., XFmode. */
4187 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4188 return 0;
4190 /* We are interested in the bare arrangement of bits, so strip everything
4191 that doesn't affect the machine mode. However, record the type of the
4192 outermost expression if it may matter below. */
4193 if (CONVERT_EXPR_P (exp)
4194 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4195 outer_type = TREE_TYPE (exp);
4196 STRIP_NOPS (exp);
4198 if (TREE_CODE (exp) == BIT_AND_EXPR)
4200 and_mask = TREE_OPERAND (exp, 1);
4201 exp = TREE_OPERAND (exp, 0);
4202 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4203 if (TREE_CODE (and_mask) != INTEGER_CST)
4204 return 0;
4207 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4208 punsignedp, pvolatilep, false);
4209 if ((inner == exp && and_mask == 0)
4210 || *pbitsize < 0 || offset != 0
4211 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4212 return 0;
4214 /* If the number of bits in the reference is the same as the bitsize of
4215 the outer type, then the outer type gives the signedness. Otherwise
4216 (in case of a small bitfield) the signedness is unchanged. */
4217 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4218 *punsignedp = TYPE_UNSIGNED (outer_type);
4220 /* Compute the mask to access the bitfield. */
4221 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4222 precision = TYPE_PRECISION (unsigned_type);
4224 mask = build_int_cst_type (unsigned_type, -1);
4226 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4227 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4229 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4230 if (and_mask != 0)
4231 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
4232 fold_convert (unsigned_type, and_mask), mask);
4234 *pmask = mask;
4235 *pand_mask = and_mask;
4236 return inner;
4239 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4240 bit positions. */
4242 static int
4243 all_ones_mask_p (const_tree mask, int size)
4245 tree type = TREE_TYPE (mask);
4246 unsigned int precision = TYPE_PRECISION (type);
4247 tree tmask;
4249 tmask = build_int_cst_type (signed_type_for (type), -1);
4251 return
4252 tree_int_cst_equal (mask,
4253 const_binop (RSHIFT_EXPR,
4254 const_binop (LSHIFT_EXPR, tmask,
4255 size_int (precision - size),
4257 size_int (precision - size), 0));
4260 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4261 represents the sign bit of EXP's type. If EXP represents a sign
4262 or zero extension, also test VAL against the unextended type.
4263 The return value is the (sub)expression whose sign bit is VAL,
4264 or NULL_TREE otherwise. */
4266 static tree
4267 sign_bit_p (tree exp, const_tree val)
4269 unsigned HOST_WIDE_INT mask_lo, lo;
4270 HOST_WIDE_INT mask_hi, hi;
4271 int width;
4272 tree t;
4274 /* Tree EXP must have an integral type. */
4275 t = TREE_TYPE (exp);
4276 if (! INTEGRAL_TYPE_P (t))
4277 return NULL_TREE;
4279 /* Tree VAL must be an integer constant. */
4280 if (TREE_CODE (val) != INTEGER_CST
4281 || TREE_OVERFLOW (val))
4282 return NULL_TREE;
4284 width = TYPE_PRECISION (t);
4285 if (width > HOST_BITS_PER_WIDE_INT)
4287 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4288 lo = 0;
4290 mask_hi = ((unsigned HOST_WIDE_INT) -1
4291 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4292 mask_lo = -1;
4294 else
4296 hi = 0;
4297 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4299 mask_hi = 0;
4300 mask_lo = ((unsigned HOST_WIDE_INT) -1
4301 >> (HOST_BITS_PER_WIDE_INT - width));
4304 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4305 treat VAL as if it were unsigned. */
4306 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4307 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4308 return exp;
4310 /* Handle extension from a narrower type. */
4311 if (TREE_CODE (exp) == NOP_EXPR
4312 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4313 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4315 return NULL_TREE;
4318 /* Subroutine for fold_truthop: determine if an operand is simple enough
4319 to be evaluated unconditionally. */
4321 static int
4322 simple_operand_p (const_tree exp)
4324 /* Strip any conversions that don't change the machine mode. */
4325 STRIP_NOPS (exp);
4327 return (CONSTANT_CLASS_P (exp)
4328 || TREE_CODE (exp) == SSA_NAME
4329 || (DECL_P (exp)
4330 && ! TREE_ADDRESSABLE (exp)
4331 && ! TREE_THIS_VOLATILE (exp)
4332 && ! DECL_NONLOCAL (exp)
4333 /* Don't regard global variables as simple. They may be
4334 allocated in ways unknown to the compiler (shared memory,
4335 #pragma weak, etc). */
4336 && ! TREE_PUBLIC (exp)
4337 && ! DECL_EXTERNAL (exp)
4338 /* Loading a static variable is unduly expensive, but global
4339 registers aren't expensive. */
4340 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4343 /* The following functions are subroutines to fold_range_test and allow it to
4344 try to change a logical combination of comparisons into a range test.
4346 For example, both
4347 X == 2 || X == 3 || X == 4 || X == 5
4349 X >= 2 && X <= 5
4350 are converted to
4351 (unsigned) (X - 2) <= 3
4353 We describe each set of comparisons as being either inside or outside
4354 a range, using a variable named like IN_P, and then describe the
4355 range with a lower and upper bound. If one of the bounds is omitted,
4356 it represents either the highest or lowest value of the type.
4358 In the comments below, we represent a range by two numbers in brackets
4359 preceded by a "+" to designate being inside that range, or a "-" to
4360 designate being outside that range, so the condition can be inverted by
4361 flipping the prefix. An omitted bound is represented by a "-". For
4362 example, "- [-, 10]" means being outside the range starting at the lowest
4363 possible value and ending at 10, in other words, being greater than 10.
4364 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4365 always false.
4367 We set up things so that the missing bounds are handled in a consistent
4368 manner so neither a missing bound nor "true" and "false" need to be
4369 handled using a special case. */
4371 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4372 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4373 and UPPER1_P are nonzero if the respective argument is an upper bound
4374 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4375 must be specified for a comparison. ARG1 will be converted to ARG0's
4376 type if both are specified. */
4378 static tree
4379 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4380 tree arg1, int upper1_p)
4382 tree tem;
4383 int result;
4384 int sgn0, sgn1;
4386 /* If neither arg represents infinity, do the normal operation.
4387 Else, if not a comparison, return infinity. Else handle the special
4388 comparison rules. Note that most of the cases below won't occur, but
4389 are handled for consistency. */
4391 if (arg0 != 0 && arg1 != 0)
4393 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4394 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4395 STRIP_NOPS (tem);
4396 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4399 if (TREE_CODE_CLASS (code) != tcc_comparison)
4400 return 0;
4402 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4403 for neither. In real maths, we cannot assume open ended ranges are
4404 the same. But, this is computer arithmetic, where numbers are finite.
4405 We can therefore make the transformation of any unbounded range with
4406 the value Z, Z being greater than any representable number. This permits
4407 us to treat unbounded ranges as equal. */
4408 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4409 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4410 switch (code)
4412 case EQ_EXPR:
4413 result = sgn0 == sgn1;
4414 break;
4415 case NE_EXPR:
4416 result = sgn0 != sgn1;
4417 break;
4418 case LT_EXPR:
4419 result = sgn0 < sgn1;
4420 break;
4421 case LE_EXPR:
4422 result = sgn0 <= sgn1;
4423 break;
4424 case GT_EXPR:
4425 result = sgn0 > sgn1;
4426 break;
4427 case GE_EXPR:
4428 result = sgn0 >= sgn1;
4429 break;
4430 default:
4431 gcc_unreachable ();
4434 return constant_boolean_node (result, type);
4437 /* Given EXP, a logical expression, set the range it is testing into
4438 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4439 actually being tested. *PLOW and *PHIGH will be made of the same
4440 type as the returned expression. If EXP is not a comparison, we
4441 will most likely not be returning a useful value and range. Set
4442 *STRICT_OVERFLOW_P to true if the return value is only valid
4443 because signed overflow is undefined; otherwise, do not change
4444 *STRICT_OVERFLOW_P. */
4446 tree
4447 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4448 bool *strict_overflow_p)
4450 enum tree_code code;
4451 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4452 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4453 int in_p, n_in_p;
4454 tree low, high, n_low, n_high;
4456 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4457 and see if we can refine the range. Some of the cases below may not
4458 happen, but it doesn't seem worth worrying about this. We "continue"
4459 the outer loop when we've changed something; otherwise we "break"
4460 the switch, which will "break" the while. */
4462 in_p = 0;
4463 low = high = build_int_cst (TREE_TYPE (exp), 0);
4465 while (1)
4467 code = TREE_CODE (exp);
4468 exp_type = TREE_TYPE (exp);
4470 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4472 if (TREE_OPERAND_LENGTH (exp) > 0)
4473 arg0 = TREE_OPERAND (exp, 0);
4474 if (TREE_CODE_CLASS (code) == tcc_comparison
4475 || TREE_CODE_CLASS (code) == tcc_unary
4476 || TREE_CODE_CLASS (code) == tcc_binary)
4477 arg0_type = TREE_TYPE (arg0);
4478 if (TREE_CODE_CLASS (code) == tcc_binary
4479 || TREE_CODE_CLASS (code) == tcc_comparison
4480 || (TREE_CODE_CLASS (code) == tcc_expression
4481 && TREE_OPERAND_LENGTH (exp) > 1))
4482 arg1 = TREE_OPERAND (exp, 1);
4485 switch (code)
4487 case TRUTH_NOT_EXPR:
4488 in_p = ! in_p, exp = arg0;
4489 continue;
4491 case EQ_EXPR: case NE_EXPR:
4492 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4493 /* We can only do something if the range is testing for zero
4494 and if the second operand is an integer constant. Note that
4495 saying something is "in" the range we make is done by
4496 complementing IN_P since it will set in the initial case of
4497 being not equal to zero; "out" is leaving it alone. */
4498 if (low == 0 || high == 0
4499 || ! integer_zerop (low) || ! integer_zerop (high)
4500 || TREE_CODE (arg1) != INTEGER_CST)
4501 break;
4503 switch (code)
4505 case NE_EXPR: /* - [c, c] */
4506 low = high = arg1;
4507 break;
4508 case EQ_EXPR: /* + [c, c] */
4509 in_p = ! in_p, low = high = arg1;
4510 break;
4511 case GT_EXPR: /* - [-, c] */
4512 low = 0, high = arg1;
4513 break;
4514 case GE_EXPR: /* + [c, -] */
4515 in_p = ! in_p, low = arg1, high = 0;
4516 break;
4517 case LT_EXPR: /* - [c, -] */
4518 low = arg1, high = 0;
4519 break;
4520 case LE_EXPR: /* + [-, c] */
4521 in_p = ! in_p, low = 0, high = arg1;
4522 break;
4523 default:
4524 gcc_unreachable ();
4527 /* If this is an unsigned comparison, we also know that EXP is
4528 greater than or equal to zero. We base the range tests we make
4529 on that fact, so we record it here so we can parse existing
4530 range tests. We test arg0_type since often the return type
4531 of, e.g. EQ_EXPR, is boolean. */
4532 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4534 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4535 in_p, low, high, 1,
4536 build_int_cst (arg0_type, 0),
4537 NULL_TREE))
4538 break;
4540 in_p = n_in_p, low = n_low, high = n_high;
4542 /* If the high bound is missing, but we have a nonzero low
4543 bound, reverse the range so it goes from zero to the low bound
4544 minus 1. */
4545 if (high == 0 && low && ! integer_zerop (low))
4547 in_p = ! in_p;
4548 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4549 integer_one_node, 0);
4550 low = build_int_cst (arg0_type, 0);
4554 exp = arg0;
4555 continue;
4557 case NEGATE_EXPR:
4558 /* (-x) IN [a,b] -> x in [-b, -a] */
4559 n_low = range_binop (MINUS_EXPR, exp_type,
4560 build_int_cst (exp_type, 0),
4561 0, high, 1);
4562 n_high = range_binop (MINUS_EXPR, exp_type,
4563 build_int_cst (exp_type, 0),
4564 0, low, 0);
4565 low = n_low, high = n_high;
4566 exp = arg0;
4567 continue;
4569 case BIT_NOT_EXPR:
4570 /* ~ X -> -X - 1 */
4571 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4572 build_int_cst (exp_type, 1));
4573 continue;
4575 case PLUS_EXPR: case MINUS_EXPR:
4576 if (TREE_CODE (arg1) != INTEGER_CST)
4577 break;
4579 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4580 move a constant to the other side. */
4581 if (!TYPE_UNSIGNED (arg0_type)
4582 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4583 break;
4585 /* If EXP is signed, any overflow in the computation is undefined,
4586 so we don't worry about it so long as our computations on
4587 the bounds don't overflow. For unsigned, overflow is defined
4588 and this is exactly the right thing. */
4589 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4590 arg0_type, low, 0, arg1, 0);
4591 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4592 arg0_type, high, 1, arg1, 0);
4593 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4594 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4595 break;
4597 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4598 *strict_overflow_p = true;
4600 /* Check for an unsigned range which has wrapped around the maximum
4601 value thus making n_high < n_low, and normalize it. */
4602 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4604 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4605 integer_one_node, 0);
4606 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4607 integer_one_node, 0);
4609 /* If the range is of the form +/- [ x+1, x ], we won't
4610 be able to normalize it. But then, it represents the
4611 whole range or the empty set, so make it
4612 +/- [ -, - ]. */
4613 if (tree_int_cst_equal (n_low, low)
4614 && tree_int_cst_equal (n_high, high))
4615 low = high = 0;
4616 else
4617 in_p = ! in_p;
4619 else
4620 low = n_low, high = n_high;
4622 exp = arg0;
4623 continue;
4625 CASE_CONVERT: case NON_LVALUE_EXPR:
4626 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4627 break;
4629 if (! INTEGRAL_TYPE_P (arg0_type)
4630 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4631 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4632 break;
4634 n_low = low, n_high = high;
4636 if (n_low != 0)
4637 n_low = fold_convert (arg0_type, n_low);
4639 if (n_high != 0)
4640 n_high = fold_convert (arg0_type, n_high);
4643 /* If we're converting arg0 from an unsigned type, to exp,
4644 a signed type, we will be doing the comparison as unsigned.
4645 The tests above have already verified that LOW and HIGH
4646 are both positive.
4648 So we have to ensure that we will handle large unsigned
4649 values the same way that the current signed bounds treat
4650 negative values. */
4652 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4654 tree high_positive;
4655 tree equiv_type;
4656 /* For fixed-point modes, we need to pass the saturating flag
4657 as the 2nd parameter. */
4658 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4659 equiv_type = lang_hooks.types.type_for_mode
4660 (TYPE_MODE (arg0_type),
4661 TYPE_SATURATING (arg0_type));
4662 else
4663 equiv_type = lang_hooks.types.type_for_mode
4664 (TYPE_MODE (arg0_type), 1);
4666 /* A range without an upper bound is, naturally, unbounded.
4667 Since convert would have cropped a very large value, use
4668 the max value for the destination type. */
4669 high_positive
4670 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4671 : TYPE_MAX_VALUE (arg0_type);
4673 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4674 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4675 fold_convert (arg0_type,
4676 high_positive),
4677 build_int_cst (arg0_type, 1));
4679 /* If the low bound is specified, "and" the range with the
4680 range for which the original unsigned value will be
4681 positive. */
4682 if (low != 0)
4684 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4685 1, n_low, n_high, 1,
4686 fold_convert (arg0_type,
4687 integer_zero_node),
4688 high_positive))
4689 break;
4691 in_p = (n_in_p == in_p);
4693 else
4695 /* Otherwise, "or" the range with the range of the input
4696 that will be interpreted as negative. */
4697 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4698 0, n_low, n_high, 1,
4699 fold_convert (arg0_type,
4700 integer_zero_node),
4701 high_positive))
4702 break;
4704 in_p = (in_p != n_in_p);
4708 exp = arg0;
4709 low = n_low, high = n_high;
4710 continue;
4712 default:
4713 break;
4716 break;
4719 /* If EXP is a constant, we can evaluate whether this is true or false. */
4720 if (TREE_CODE (exp) == INTEGER_CST)
4722 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4723 exp, 0, low, 0))
4724 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4725 exp, 1, high, 1)));
4726 low = high = 0;
4727 exp = 0;
4730 *pin_p = in_p, *plow = low, *phigh = high;
4731 return exp;
4734 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4735 type, TYPE, return an expression to test if EXP is in (or out of, depending
4736 on IN_P) the range. Return 0 if the test couldn't be created. */
4738 tree
4739 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4741 tree etype = TREE_TYPE (exp), value;
4743 #ifdef HAVE_canonicalize_funcptr_for_compare
4744 /* Disable this optimization for function pointer expressions
4745 on targets that require function pointer canonicalization. */
4746 if (HAVE_canonicalize_funcptr_for_compare
4747 && TREE_CODE (etype) == POINTER_TYPE
4748 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4749 return NULL_TREE;
4750 #endif
4752 if (! in_p)
4754 value = build_range_check (type, exp, 1, low, high);
4755 if (value != 0)
4756 return invert_truthvalue (value);
4758 return 0;
4761 if (low == 0 && high == 0)
4762 return build_int_cst (type, 1);
4764 if (low == 0)
4765 return fold_build2 (LE_EXPR, type, exp,
4766 fold_convert (etype, high));
4768 if (high == 0)
4769 return fold_build2 (GE_EXPR, type, exp,
4770 fold_convert (etype, low));
4772 if (operand_equal_p (low, high, 0))
4773 return fold_build2 (EQ_EXPR, type, exp,
4774 fold_convert (etype, low));
4776 if (integer_zerop (low))
4778 if (! TYPE_UNSIGNED (etype))
4780 etype = unsigned_type_for (etype);
4781 high = fold_convert (etype, high);
4782 exp = fold_convert (etype, exp);
4784 return build_range_check (type, exp, 1, 0, high);
4787 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4788 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4790 unsigned HOST_WIDE_INT lo;
4791 HOST_WIDE_INT hi;
4792 int prec;
4794 prec = TYPE_PRECISION (etype);
4795 if (prec <= HOST_BITS_PER_WIDE_INT)
4797 hi = 0;
4798 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4800 else
4802 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4803 lo = (unsigned HOST_WIDE_INT) -1;
4806 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4808 if (TYPE_UNSIGNED (etype))
4810 tree signed_etype = signed_type_for (etype);
4811 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4812 etype
4813 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4814 else
4815 etype = signed_etype;
4816 exp = fold_convert (etype, exp);
4818 return fold_build2 (GT_EXPR, type, exp,
4819 build_int_cst (etype, 0));
4823 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4824 This requires wrap-around arithmetics for the type of the expression.
4825 First make sure that arithmetics in this type is valid, then make sure
4826 that it wraps around. */
4827 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4828 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4829 TYPE_UNSIGNED (etype));
4831 if (TREE_CODE (etype) == INTEGER_TYPE
4832 && strip_nv (TREE_CODE (exp)) != TREE_CODE (exp))
4834 tree utype, minv, maxv;
4836 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4837 for the type in question, as we rely on this here. */
4838 utype = unsigned_type_for (etype);
4839 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4840 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4841 integer_one_node, 1);
4842 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4844 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4845 minv, 1, maxv, 1)))
4846 etype = utype;
4847 else
4848 return 0;
4851 high = fold_convert (etype, high);
4852 low = fold_convert (etype, low);
4853 exp = fold_convert (etype, exp);
4855 value = const_binop (MINUS_EXPR, high, low, 0);
4858 if (POINTER_TYPE_P (etype))
4860 if (value != 0 && !TREE_OVERFLOW (value))
4862 low = fold_convert (sizetype, low);
4863 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4864 return build_range_check (type,
4865 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4866 1, build_int_cst (etype, 0), value);
4868 return 0;
4871 if (value != 0 && !TREE_OVERFLOW (value))
4872 return build_range_check (type,
4873 fold_build2 (MINUS_EXPR, etype, exp, low),
4874 1, build_int_cst (etype, 0), value);
4876 return 0;
4879 /* Return the predecessor of VAL in its type, handling the infinite case. */
4881 static tree
4882 range_predecessor (tree val)
4884 tree type = TREE_TYPE (val);
4886 if (INTEGRAL_TYPE_P (type)
4887 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4888 return 0;
4889 else
4890 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4893 /* Return the successor of VAL in its type, handling the infinite case. */
4895 static tree
4896 range_successor (tree val)
4898 tree type = TREE_TYPE (val);
4900 if (INTEGRAL_TYPE_P (type)
4901 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4902 return 0;
4903 else
4904 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4907 /* Given two ranges, see if we can merge them into one. Return 1 if we
4908 can, 0 if we can't. Set the output range into the specified parameters. */
4910 bool
4911 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4912 tree high0, int in1_p, tree low1, tree high1)
4914 int no_overlap;
4915 int subset;
4916 int temp;
4917 tree tem;
4918 int in_p;
4919 tree low, high;
4920 int lowequal = ((low0 == 0 && low1 == 0)
4921 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4922 low0, 0, low1, 0)));
4923 int highequal = ((high0 == 0 && high1 == 0)
4924 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4925 high0, 1, high1, 1)));
4927 /* Make range 0 be the range that starts first, or ends last if they
4928 start at the same value. Swap them if it isn't. */
4929 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4930 low0, 0, low1, 0))
4931 || (lowequal
4932 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4933 high1, 1, high0, 1))))
4935 temp = in0_p, in0_p = in1_p, in1_p = temp;
4936 tem = low0, low0 = low1, low1 = tem;
4937 tem = high0, high0 = high1, high1 = tem;
4940 /* Now flag two cases, whether the ranges are disjoint or whether the
4941 second range is totally subsumed in the first. Note that the tests
4942 below are simplified by the ones above. */
4943 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4944 high0, 1, low1, 0));
4945 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4946 high1, 1, high0, 1));
4948 /* We now have four cases, depending on whether we are including or
4949 excluding the two ranges. */
4950 if (in0_p && in1_p)
4952 /* If they don't overlap, the result is false. If the second range
4953 is a subset it is the result. Otherwise, the range is from the start
4954 of the second to the end of the first. */
4955 if (no_overlap)
4956 in_p = 0, low = high = 0;
4957 else if (subset)
4958 in_p = 1, low = low1, high = high1;
4959 else
4960 in_p = 1, low = low1, high = high0;
4963 else if (in0_p && ! in1_p)
4965 /* If they don't overlap, the result is the first range. If they are
4966 equal, the result is false. If the second range is a subset of the
4967 first, and the ranges begin at the same place, we go from just after
4968 the end of the second range to the end of the first. If the second
4969 range is not a subset of the first, or if it is a subset and both
4970 ranges end at the same place, the range starts at the start of the
4971 first range and ends just before the second range.
4972 Otherwise, we can't describe this as a single range. */
4973 if (no_overlap)
4974 in_p = 1, low = low0, high = high0;
4975 else if (lowequal && highequal)
4976 in_p = 0, low = high = 0;
4977 else if (subset && lowequal)
4979 low = range_successor (high1);
4980 high = high0;
4981 in_p = 1;
4982 if (low == 0)
4984 /* We are in the weird situation where high0 > high1 but
4985 high1 has no successor. Punt. */
4986 return 0;
4989 else if (! subset || highequal)
4991 low = low0;
4992 high = range_predecessor (low1);
4993 in_p = 1;
4994 if (high == 0)
4996 /* low0 < low1 but low1 has no predecessor. Punt. */
4997 return 0;
5000 else
5001 return 0;
5004 else if (! in0_p && in1_p)
5006 /* If they don't overlap, the result is the second range. If the second
5007 is a subset of the first, the result is false. Otherwise,
5008 the range starts just after the first range and ends at the
5009 end of the second. */
5010 if (no_overlap)
5011 in_p = 1, low = low1, high = high1;
5012 else if (subset || highequal)
5013 in_p = 0, low = high = 0;
5014 else
5016 low = range_successor (high0);
5017 high = high1;
5018 in_p = 1;
5019 if (low == 0)
5021 /* high1 > high0 but high0 has no successor. Punt. */
5022 return 0;
5027 else
5029 /* The case where we are excluding both ranges. Here the complex case
5030 is if they don't overlap. In that case, the only time we have a
5031 range is if they are adjacent. If the second is a subset of the
5032 first, the result is the first. Otherwise, the range to exclude
5033 starts at the beginning of the first range and ends at the end of the
5034 second. */
5035 if (no_overlap)
5037 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5038 range_successor (high0),
5039 1, low1, 0)))
5040 in_p = 0, low = low0, high = high1;
5041 else
5043 /* Canonicalize - [min, x] into - [-, x]. */
5044 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5045 switch (TREE_CODE (TREE_TYPE (low0)))
5047 case ENUMERAL_TYPE:
5048 if (TYPE_PRECISION (TREE_TYPE (low0))
5049 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5050 break;
5051 /* FALLTHROUGH */
5052 case INTEGER_TYPE:
5053 if (tree_int_cst_equal (low0,
5054 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5055 low0 = 0;
5056 break;
5057 case POINTER_TYPE:
5058 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5059 && integer_zerop (low0))
5060 low0 = 0;
5061 break;
5062 default:
5063 break;
5066 /* Canonicalize - [x, max] into - [x, -]. */
5067 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5068 switch (TREE_CODE (TREE_TYPE (high1)))
5070 case ENUMERAL_TYPE:
5071 if (TYPE_PRECISION (TREE_TYPE (high1))
5072 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5073 break;
5074 /* FALLTHROUGH */
5075 case INTEGER_TYPE:
5076 if (tree_int_cst_equal (high1,
5077 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5078 high1 = 0;
5079 break;
5080 case POINTER_TYPE:
5081 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5082 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5083 high1, 1,
5084 integer_one_node, 1)))
5085 high1 = 0;
5086 break;
5087 default:
5088 break;
5091 /* The ranges might be also adjacent between the maximum and
5092 minimum values of the given type. For
5093 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5094 return + [x + 1, y - 1]. */
5095 if (low0 == 0 && high1 == 0)
5097 low = range_successor (high0);
5098 high = range_predecessor (low1);
5099 if (low == 0 || high == 0)
5100 return 0;
5102 in_p = 1;
5104 else
5105 return 0;
5108 else if (subset)
5109 in_p = 0, low = low0, high = high0;
5110 else
5111 in_p = 0, low = low0, high = high1;
5114 *pin_p = in_p, *plow = low, *phigh = high;
5115 return 1;
5119 /* Subroutine of fold, looking inside expressions of the form
5120 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5121 of the COND_EXPR. This function is being used also to optimize
5122 A op B ? C : A, by reversing the comparison first.
5124 Return a folded expression whose code is not a COND_EXPR
5125 anymore, or NULL_TREE if no folding opportunity is found. */
5127 static tree
5128 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
5130 enum tree_code comp_code = TREE_CODE (arg0);
5131 tree arg00 = TREE_OPERAND (arg0, 0);
5132 tree arg01 = TREE_OPERAND (arg0, 1);
5133 tree arg1_type = TREE_TYPE (arg1);
5134 tree tem;
5136 STRIP_NOPS (arg1);
5137 STRIP_NOPS (arg2);
5139 /* If we have A op 0 ? A : -A, consider applying the following
5140 transformations:
5142 A == 0? A : -A same as -A
5143 A != 0? A : -A same as A
5144 A >= 0? A : -A same as abs (A)
5145 A > 0? A : -A same as abs (A)
5146 A <= 0? A : -A same as -abs (A)
5147 A < 0? A : -A same as -abs (A)
5149 None of these transformations work for modes with signed
5150 zeros. If A is +/-0, the first two transformations will
5151 change the sign of the result (from +0 to -0, or vice
5152 versa). The last four will fix the sign of the result,
5153 even though the original expressions could be positive or
5154 negative, depending on the sign of A.
5156 Note that all these transformations are correct if A is
5157 NaN, since the two alternatives (A and -A) are also NaNs. */
5158 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5159 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5160 ? real_zerop (arg01)
5161 : integer_zerop (arg01))
5162 && ((TREE_CODE (arg2) == NEGATE_EXPR
5163 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5164 /* In the case that A is of the form X-Y, '-A' (arg2) may
5165 have already been folded to Y-X, check for that. */
5166 || (TREE_CODE (arg1) == MINUS_EXPR
5167 && TREE_CODE (arg2) == MINUS_EXPR
5168 && operand_equal_p (TREE_OPERAND (arg1, 0),
5169 TREE_OPERAND (arg2, 1), 0)
5170 && operand_equal_p (TREE_OPERAND (arg1, 1),
5171 TREE_OPERAND (arg2, 0), 0))))
5172 switch (comp_code)
5174 case EQ_EXPR:
5175 case UNEQ_EXPR:
5176 tem = fold_convert (arg1_type, arg1);
5177 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
5178 case NE_EXPR:
5179 case LTGT_EXPR:
5180 return pedantic_non_lvalue (fold_convert (type, arg1));
5181 case UNGE_EXPR:
5182 case UNGT_EXPR:
5183 if (flag_trapping_math)
5184 break;
5185 /* Fall through. */
5186 case GE_EXPR:
5187 case GT_EXPR:
5188 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5189 arg1 = fold_convert (signed_type_for
5190 (TREE_TYPE (arg1)), arg1);
5191 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5192 return pedantic_non_lvalue (fold_convert (type, tem));
5193 case UNLE_EXPR:
5194 case UNLT_EXPR:
5195 if (flag_trapping_math)
5196 break;
5197 case LE_EXPR:
5198 case LT_EXPR:
5199 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5200 arg1 = fold_convert (signed_type_for
5201 (TREE_TYPE (arg1)), arg1);
5202 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5203 return negate_expr (fold_convert (type, tem));
5204 default:
5205 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5206 break;
5209 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5210 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5211 both transformations are correct when A is NaN: A != 0
5212 is then true, and A == 0 is false. */
5214 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5215 && integer_zerop (arg01) && integer_zerop (arg2))
5217 if (comp_code == NE_EXPR)
5218 return pedantic_non_lvalue (fold_convert (type, arg1));
5219 else if (comp_code == EQ_EXPR)
5220 return build_int_cst (type, 0);
5223 /* Try some transformations of A op B ? A : B.
5225 A == B? A : B same as B
5226 A != B? A : B same as A
5227 A >= B? A : B same as max (A, B)
5228 A > B? A : B same as max (B, A)
5229 A <= B? A : B same as min (A, B)
5230 A < B? A : B same as min (B, A)
5232 As above, these transformations don't work in the presence
5233 of signed zeros. For example, if A and B are zeros of
5234 opposite sign, the first two transformations will change
5235 the sign of the result. In the last four, the original
5236 expressions give different results for (A=+0, B=-0) and
5237 (A=-0, B=+0), but the transformed expressions do not.
5239 The first two transformations are correct if either A or B
5240 is a NaN. In the first transformation, the condition will
5241 be false, and B will indeed be chosen. In the case of the
5242 second transformation, the condition A != B will be true,
5243 and A will be chosen.
5245 The conversions to max() and min() are not correct if B is
5246 a number and A is not. The conditions in the original
5247 expressions will be false, so all four give B. The min()
5248 and max() versions would give a NaN instead. */
5249 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5250 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5251 /* Avoid these transformations if the COND_EXPR may be used
5252 as an lvalue in the C++ front-end. PR c++/19199. */
5253 && (in_gimple_form
5254 || (strcmp (lang_hooks.name, "GNU C++") != 0
5255 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5256 || ! maybe_lvalue_p (arg1)
5257 || ! maybe_lvalue_p (arg2)))
5259 tree comp_op0 = arg00;
5260 tree comp_op1 = arg01;
5261 tree comp_type = TREE_TYPE (comp_op0);
5263 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5264 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5266 comp_type = type;
5267 comp_op0 = arg1;
5268 comp_op1 = arg2;
5271 switch (comp_code)
5273 case EQ_EXPR:
5274 return pedantic_non_lvalue (fold_convert (type, arg2));
5275 case NE_EXPR:
5276 return pedantic_non_lvalue (fold_convert (type, arg1));
5277 case LE_EXPR:
5278 case LT_EXPR:
5279 case UNLE_EXPR:
5280 case UNLT_EXPR:
5281 /* In C++ a ?: expression can be an lvalue, so put the
5282 operand which will be used if they are equal first
5283 so that we can convert this back to the
5284 corresponding COND_EXPR. */
5285 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5287 comp_op0 = fold_convert (comp_type, comp_op0);
5288 comp_op1 = fold_convert (comp_type, comp_op1);
5289 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5290 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
5291 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
5292 return pedantic_non_lvalue (fold_convert (type, tem));
5294 break;
5295 case GE_EXPR:
5296 case GT_EXPR:
5297 case UNGE_EXPR:
5298 case UNGT_EXPR:
5299 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5301 comp_op0 = fold_convert (comp_type, comp_op0);
5302 comp_op1 = fold_convert (comp_type, comp_op1);
5303 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5304 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5305 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5306 return pedantic_non_lvalue (fold_convert (type, tem));
5308 break;
5309 case UNEQ_EXPR:
5310 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5311 return pedantic_non_lvalue (fold_convert (type, arg2));
5312 break;
5313 case LTGT_EXPR:
5314 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5315 return pedantic_non_lvalue (fold_convert (type, arg1));
5316 break;
5317 default:
5318 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5319 break;
5323 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5324 we might still be able to simplify this. For example,
5325 if C1 is one less or one more than C2, this might have started
5326 out as a MIN or MAX and been transformed by this function.
5327 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5329 if (INTEGRAL_TYPE_P (type)
5330 && TREE_CODE (arg01) == INTEGER_CST
5331 && TREE_CODE (arg2) == INTEGER_CST)
5332 switch (comp_code)
5334 case EQ_EXPR:
5335 if (TREE_CODE (arg1) == INTEGER_CST)
5336 break;
5337 /* We can replace A with C1 in this case. */
5338 arg1 = fold_convert (type, arg01);
5339 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5341 case LT_EXPR:
5342 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5343 MIN_EXPR, to preserve the signedness of the comparison. */
5344 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5345 OEP_ONLY_CONST)
5346 && operand_equal_p (arg01,
5347 const_binop (PLUS_EXPR, arg2,
5348 build_int_cst (type, 1), 0),
5349 OEP_ONLY_CONST))
5351 tem = fold_build2 (MIN_EXPR, TREE_TYPE (arg00), arg00,
5352 fold_convert (TREE_TYPE (arg00), arg2));
5353 return pedantic_non_lvalue (fold_convert (type, tem));
5355 break;
5357 case LE_EXPR:
5358 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5359 as above. */
5360 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5361 OEP_ONLY_CONST)
5362 && operand_equal_p (arg01,
5363 const_binop (MINUS_EXPR, arg2,
5364 build_int_cst (type, 1), 0),
5365 OEP_ONLY_CONST))
5367 tem = fold_build2 (MIN_EXPR, TREE_TYPE (arg00), arg00,
5368 fold_convert (TREE_TYPE (arg00), arg2));
5369 return pedantic_non_lvalue (fold_convert (type, tem));
5371 break;
5373 case GT_EXPR:
5374 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5375 MAX_EXPR, to preserve the signedness of the comparison. */
5376 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5377 OEP_ONLY_CONST)
5378 && operand_equal_p (arg01,
5379 const_binop (MINUS_EXPR, arg2,
5380 build_int_cst (type, 1), 0),
5381 OEP_ONLY_CONST))
5383 tem = fold_build2 (MAX_EXPR, TREE_TYPE (arg00), arg00,
5384 fold_convert (TREE_TYPE (arg00), arg2));
5385 return pedantic_non_lvalue (fold_convert (type, tem));
5387 break;
5389 case GE_EXPR:
5390 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5391 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5392 OEP_ONLY_CONST)
5393 && operand_equal_p (arg01,
5394 const_binop (PLUS_EXPR, arg2,
5395 build_int_cst (type, 1), 0),
5396 OEP_ONLY_CONST))
5398 tem = fold_build2 (MAX_EXPR, TREE_TYPE (arg00), arg00,
5399 fold_convert (TREE_TYPE (arg00), arg2));
5400 return pedantic_non_lvalue (fold_convert (type, tem));
5402 break;
5403 case NE_EXPR:
5404 break;
5405 default:
5406 gcc_unreachable ();
5409 return NULL_TREE;
5414 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5415 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5416 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5417 false) >= 2)
5418 #endif
5420 /* EXP is some logical combination of boolean tests. See if we can
5421 merge it into some range test. Return the new tree if so. */
5423 static tree
5424 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5426 int or_op = (code == TRUTH_ORIF_EXPR
5427 || code == TRUTH_OR_EXPR);
5428 int in0_p, in1_p, in_p;
5429 tree low0, low1, low, high0, high1, high;
5430 bool strict_overflow_p = false;
5431 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5432 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5433 tree tem;
5434 const char * const warnmsg = G_("assuming signed overflow does not occur "
5435 "when simplifying range test");
5437 /* If this is an OR operation, invert both sides; we will invert
5438 again at the end. */
5439 if (or_op)
5440 in0_p = ! in0_p, in1_p = ! in1_p;
5442 /* If both expressions are the same, if we can merge the ranges, and we
5443 can build the range test, return it or it inverted. If one of the
5444 ranges is always true or always false, consider it to be the same
5445 expression as the other. */
5446 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5447 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5448 in1_p, low1, high1)
5449 && 0 != (tem = (build_range_check (type,
5450 lhs != 0 ? lhs
5451 : rhs != 0 ? rhs : integer_zero_node,
5452 in_p, low, high))))
5454 if (strict_overflow_p)
5455 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5456 return or_op ? invert_truthvalue (tem) : tem;
5459 /* On machines where the branch cost is expensive, if this is a
5460 short-circuited branch and the underlying object on both sides
5461 is the same, make a non-short-circuit operation. */
5462 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5463 && lhs != 0 && rhs != 0
5464 && (code == TRUTH_ANDIF_EXPR
5465 || code == TRUTH_ORIF_EXPR)
5466 && operand_equal_p (lhs, rhs, 0))
5468 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5469 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5470 which cases we can't do this. */
5471 if (simple_operand_p (lhs))
5472 return build2 (code == TRUTH_ANDIF_EXPR
5473 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5474 type, op0, op1);
5476 else if (lang_hooks.decls.global_bindings_p () == 0
5477 && ! CONTAINS_PLACEHOLDER_P (lhs))
5479 tree common = save_expr (lhs);
5481 if (0 != (lhs = build_range_check (type, common,
5482 or_op ? ! in0_p : in0_p,
5483 low0, high0))
5484 && (0 != (rhs = build_range_check (type, common,
5485 or_op ? ! in1_p : in1_p,
5486 low1, high1))))
5488 if (strict_overflow_p)
5489 fold_overflow_warning (warnmsg,
5490 WARN_STRICT_OVERFLOW_COMPARISON);
5491 return build2 (code == TRUTH_ANDIF_EXPR
5492 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5493 type, lhs, rhs);
5498 return 0;
5501 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5502 bit value. Arrange things so the extra bits will be set to zero if and
5503 only if C is signed-extended to its full width. If MASK is nonzero,
5504 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5506 static tree
5507 unextend (tree c, int p, int unsignedp, tree mask)
5509 tree type = TREE_TYPE (c);
5510 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5511 tree temp;
5513 if (p == modesize || unsignedp)
5514 return c;
5516 /* We work by getting just the sign bit into the low-order bit, then
5517 into the high-order bit, then sign-extend. We then XOR that value
5518 with C. */
5519 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5520 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5522 /* We must use a signed type in order to get an arithmetic right shift.
5523 However, we must also avoid introducing accidental overflows, so that
5524 a subsequent call to integer_zerop will work. Hence we must
5525 do the type conversion here. At this point, the constant is either
5526 zero or one, and the conversion to a signed type can never overflow.
5527 We could get an overflow if this conversion is done anywhere else. */
5528 if (TYPE_UNSIGNED (type))
5529 temp = fold_convert (signed_type_for (type), temp);
5531 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5532 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5533 if (mask != 0)
5534 temp = const_binop (BIT_AND_EXPR, temp,
5535 fold_convert (TREE_TYPE (c), mask), 0);
5536 /* If necessary, convert the type back to match the type of C. */
5537 if (TYPE_UNSIGNED (type))
5538 temp = fold_convert (type, temp);
5540 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5543 /* Find ways of folding logical expressions of LHS and RHS:
5544 Try to merge two comparisons to the same innermost item.
5545 Look for range tests like "ch >= '0' && ch <= '9'".
5546 Look for combinations of simple terms on machines with expensive branches
5547 and evaluate the RHS unconditionally.
5549 For example, if we have p->a == 2 && p->b == 4 and we can make an
5550 object large enough to span both A and B, we can do this with a comparison
5551 against the object ANDed with the a mask.
5553 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5554 operations to do this with one comparison.
5556 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5557 function and the one above.
5559 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5560 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5562 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5563 two operands.
5565 We return the simplified tree or 0 if no optimization is possible. */
5567 static tree
5568 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5570 /* If this is the "or" of two comparisons, we can do something if
5571 the comparisons are NE_EXPR. If this is the "and", we can do something
5572 if the comparisons are EQ_EXPR. I.e.,
5573 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5575 WANTED_CODE is this operation code. For single bit fields, we can
5576 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5577 comparison for one-bit fields. */
5579 enum tree_code wanted_code;
5580 enum tree_code lcode, rcode;
5581 tree ll_arg, lr_arg, rl_arg, rr_arg;
5582 tree ll_inner, lr_inner, rl_inner, rr_inner;
5583 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5584 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5585 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5586 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5587 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5588 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5589 enum machine_mode lnmode, rnmode;
5590 tree ll_mask, lr_mask, rl_mask, rr_mask;
5591 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5592 tree l_const, r_const;
5593 tree lntype, rntype, result;
5594 HOST_WIDE_INT first_bit, end_bit;
5595 int volatilep;
5596 tree orig_lhs = lhs, orig_rhs = rhs;
5597 enum tree_code orig_code = code;
5599 /* Start by getting the comparison codes. Fail if anything is volatile.
5600 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5601 it were surrounded with a NE_EXPR. */
5603 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5604 return 0;
5606 lcode = TREE_CODE (lhs);
5607 rcode = TREE_CODE (rhs);
5609 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5611 lhs = build2 (NE_EXPR, truth_type, lhs,
5612 build_int_cst (TREE_TYPE (lhs), 0));
5613 lcode = NE_EXPR;
5616 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5618 rhs = build2 (NE_EXPR, truth_type, rhs,
5619 build_int_cst (TREE_TYPE (rhs), 0));
5620 rcode = NE_EXPR;
5623 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5624 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5625 return 0;
5627 ll_arg = TREE_OPERAND (lhs, 0);
5628 lr_arg = TREE_OPERAND (lhs, 1);
5629 rl_arg = TREE_OPERAND (rhs, 0);
5630 rr_arg = TREE_OPERAND (rhs, 1);
5632 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5633 if (simple_operand_p (ll_arg)
5634 && simple_operand_p (lr_arg))
5636 tree result;
5637 if (operand_equal_p (ll_arg, rl_arg, 0)
5638 && operand_equal_p (lr_arg, rr_arg, 0))
5640 result = combine_comparisons (code, lcode, rcode,
5641 truth_type, ll_arg, lr_arg);
5642 if (result)
5643 return result;
5645 else if (operand_equal_p (ll_arg, rr_arg, 0)
5646 && operand_equal_p (lr_arg, rl_arg, 0))
5648 result = combine_comparisons (code, lcode,
5649 swap_tree_comparison (rcode),
5650 truth_type, ll_arg, lr_arg);
5651 if (result)
5652 return result;
5656 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5657 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5659 /* If the RHS can be evaluated unconditionally and its operands are
5660 simple, it wins to evaluate the RHS unconditionally on machines
5661 with expensive branches. In this case, this isn't a comparison
5662 that can be merged. Avoid doing this if the RHS is a floating-point
5663 comparison since those can trap. */
5665 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5666 false) >= 2
5667 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5668 && simple_operand_p (rl_arg)
5669 && simple_operand_p (rr_arg))
5671 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5672 if (code == TRUTH_OR_EXPR
5673 && lcode == NE_EXPR && integer_zerop (lr_arg)
5674 && rcode == NE_EXPR && integer_zerop (rr_arg)
5675 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5676 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5677 return build2 (NE_EXPR, truth_type,
5678 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5679 ll_arg, rl_arg),
5680 build_int_cst (TREE_TYPE (ll_arg), 0));
5682 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5683 if (code == TRUTH_AND_EXPR
5684 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5685 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5686 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5687 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5688 return build2 (EQ_EXPR, truth_type,
5689 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5690 ll_arg, rl_arg),
5691 build_int_cst (TREE_TYPE (ll_arg), 0));
5693 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5695 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5696 return build2 (code, truth_type, lhs, rhs);
5697 return NULL_TREE;
5701 /* See if the comparisons can be merged. Then get all the parameters for
5702 each side. */
5704 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5705 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5706 return 0;
5708 volatilep = 0;
5709 ll_inner = decode_field_reference (ll_arg,
5710 &ll_bitsize, &ll_bitpos, &ll_mode,
5711 &ll_unsignedp, &volatilep, &ll_mask,
5712 &ll_and_mask);
5713 lr_inner = decode_field_reference (lr_arg,
5714 &lr_bitsize, &lr_bitpos, &lr_mode,
5715 &lr_unsignedp, &volatilep, &lr_mask,
5716 &lr_and_mask);
5717 rl_inner = decode_field_reference (rl_arg,
5718 &rl_bitsize, &rl_bitpos, &rl_mode,
5719 &rl_unsignedp, &volatilep, &rl_mask,
5720 &rl_and_mask);
5721 rr_inner = decode_field_reference (rr_arg,
5722 &rr_bitsize, &rr_bitpos, &rr_mode,
5723 &rr_unsignedp, &volatilep, &rr_mask,
5724 &rr_and_mask);
5726 /* It must be true that the inner operation on the lhs of each
5727 comparison must be the same if we are to be able to do anything.
5728 Then see if we have constants. If not, the same must be true for
5729 the rhs's. */
5730 if (volatilep || ll_inner == 0 || rl_inner == 0
5731 || ! operand_equal_p (ll_inner, rl_inner, 0))
5732 return 0;
5734 if (TREE_CODE (lr_arg) == INTEGER_CST
5735 && TREE_CODE (rr_arg) == INTEGER_CST)
5736 l_const = lr_arg, r_const = rr_arg;
5737 else if (lr_inner == 0 || rr_inner == 0
5738 || ! operand_equal_p (lr_inner, rr_inner, 0))
5739 return 0;
5740 else
5741 l_const = r_const = 0;
5743 /* If either comparison code is not correct for our logical operation,
5744 fail. However, we can convert a one-bit comparison against zero into
5745 the opposite comparison against that bit being set in the field. */
5747 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5748 if (lcode != wanted_code)
5750 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5752 /* Make the left operand unsigned, since we are only interested
5753 in the value of one bit. Otherwise we are doing the wrong
5754 thing below. */
5755 ll_unsignedp = 1;
5756 l_const = ll_mask;
5758 else
5759 return 0;
5762 /* This is analogous to the code for l_const above. */
5763 if (rcode != wanted_code)
5765 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5767 rl_unsignedp = 1;
5768 r_const = rl_mask;
5770 else
5771 return 0;
5774 /* See if we can find a mode that contains both fields being compared on
5775 the left. If we can't, fail. Otherwise, update all constants and masks
5776 to be relative to a field of that size. */
5777 first_bit = MIN (ll_bitpos, rl_bitpos);
5778 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5779 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5780 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5781 volatilep);
5782 if (lnmode == VOIDmode)
5783 return 0;
5785 lnbitsize = GET_MODE_BITSIZE (lnmode);
5786 lnbitpos = first_bit & ~ (lnbitsize - 1);
5787 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5788 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5790 if (BYTES_BIG_ENDIAN)
5792 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5793 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5796 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5797 size_int (xll_bitpos), 0);
5798 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5799 size_int (xrl_bitpos), 0);
5801 if (l_const)
5803 l_const = fold_convert (lntype, l_const);
5804 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5805 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5806 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5807 fold_build1 (BIT_NOT_EXPR,
5808 lntype, ll_mask),
5809 0)))
5811 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5813 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5816 if (r_const)
5818 r_const = fold_convert (lntype, r_const);
5819 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5820 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5821 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5822 fold_build1 (BIT_NOT_EXPR,
5823 lntype, rl_mask),
5824 0)))
5826 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5828 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5832 /* If the right sides are not constant, do the same for it. Also,
5833 disallow this optimization if a size or signedness mismatch occurs
5834 between the left and right sides. */
5835 if (l_const == 0)
5837 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5838 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5839 /* Make sure the two fields on the right
5840 correspond to the left without being swapped. */
5841 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5842 return 0;
5844 first_bit = MIN (lr_bitpos, rr_bitpos);
5845 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5846 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5847 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5848 volatilep);
5849 if (rnmode == VOIDmode)
5850 return 0;
5852 rnbitsize = GET_MODE_BITSIZE (rnmode);
5853 rnbitpos = first_bit & ~ (rnbitsize - 1);
5854 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5855 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5857 if (BYTES_BIG_ENDIAN)
5859 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5860 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5863 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5864 size_int (xlr_bitpos), 0);
5865 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5866 size_int (xrr_bitpos), 0);
5868 /* Make a mask that corresponds to both fields being compared.
5869 Do this for both items being compared. If the operands are the
5870 same size and the bits being compared are in the same position
5871 then we can do this by masking both and comparing the masked
5872 results. */
5873 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5874 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5875 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5877 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5878 ll_unsignedp || rl_unsignedp);
5879 if (! all_ones_mask_p (ll_mask, lnbitsize))
5880 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5882 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5883 lr_unsignedp || rr_unsignedp);
5884 if (! all_ones_mask_p (lr_mask, rnbitsize))
5885 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5887 return build2 (wanted_code, truth_type, lhs, rhs);
5890 /* There is still another way we can do something: If both pairs of
5891 fields being compared are adjacent, we may be able to make a wider
5892 field containing them both.
5894 Note that we still must mask the lhs/rhs expressions. Furthermore,
5895 the mask must be shifted to account for the shift done by
5896 make_bit_field_ref. */
5897 if ((ll_bitsize + ll_bitpos == rl_bitpos
5898 && lr_bitsize + lr_bitpos == rr_bitpos)
5899 || (ll_bitpos == rl_bitpos + rl_bitsize
5900 && lr_bitpos == rr_bitpos + rr_bitsize))
5902 tree type;
5904 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5905 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5906 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5907 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5909 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5910 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5911 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5912 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5914 /* Convert to the smaller type before masking out unwanted bits. */
5915 type = lntype;
5916 if (lntype != rntype)
5918 if (lnbitsize > rnbitsize)
5920 lhs = fold_convert (rntype, lhs);
5921 ll_mask = fold_convert (rntype, ll_mask);
5922 type = rntype;
5924 else if (lnbitsize < rnbitsize)
5926 rhs = fold_convert (lntype, rhs);
5927 lr_mask = fold_convert (lntype, lr_mask);
5928 type = lntype;
5932 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5933 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5935 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5936 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5938 return build2 (wanted_code, truth_type, lhs, rhs);
5941 return 0;
5944 /* Handle the case of comparisons with constants. If there is something in
5945 common between the masks, those bits of the constants must be the same.
5946 If not, the condition is always false. Test for this to avoid generating
5947 incorrect code below. */
5948 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5949 if (! integer_zerop (result)
5950 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5951 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5953 if (wanted_code == NE_EXPR)
5955 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5956 return constant_boolean_node (true, truth_type);
5958 else
5960 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5961 return constant_boolean_node (false, truth_type);
5965 /* Construct the expression we will return. First get the component
5966 reference we will make. Unless the mask is all ones the width of
5967 that field, perform the mask operation. Then compare with the
5968 merged constant. */
5969 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5970 ll_unsignedp || rl_unsignedp);
5972 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5973 if (! all_ones_mask_p (ll_mask, lnbitsize))
5974 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5976 return build2 (wanted_code, truth_type, result,
5977 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5980 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5981 constant. */
5983 static tree
5984 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5986 tree arg0 = op0;
5987 enum tree_code op_code;
5988 tree comp_const;
5989 tree minmax_const;
5990 int consts_equal, consts_lt;
5991 tree inner;
5993 STRIP_SIGN_NOPS (arg0);
5995 op_code = TREE_CODE (arg0);
5996 minmax_const = TREE_OPERAND (arg0, 1);
5997 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5998 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5999 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
6000 inner = TREE_OPERAND (arg0, 0);
6002 /* If something does not permit us to optimize, return the original tree. */
6003 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
6004 || TREE_CODE (comp_const) != INTEGER_CST
6005 || TREE_OVERFLOW (comp_const)
6006 || TREE_CODE (minmax_const) != INTEGER_CST
6007 || TREE_OVERFLOW (minmax_const))
6008 return NULL_TREE;
6010 /* Now handle all the various comparison codes. We only handle EQ_EXPR
6011 and GT_EXPR, doing the rest with recursive calls using logical
6012 simplifications. */
6013 switch (code)
6015 case NE_EXPR: case LT_EXPR: case LE_EXPR:
6017 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
6018 type, op0, op1);
6019 if (tem)
6020 return invert_truthvalue (tem);
6021 return NULL_TREE;
6024 case GE_EXPR:
6025 return
6026 fold_build2 (TRUTH_ORIF_EXPR, type,
6027 optimize_minmax_comparison
6028 (EQ_EXPR, type, arg0, comp_const),
6029 optimize_minmax_comparison
6030 (GT_EXPR, type, arg0, comp_const));
6032 case EQ_EXPR:
6033 if (op_code == MAX_EXPR && consts_equal)
6034 /* MAX (X, 0) == 0 -> X <= 0 */
6035 return fold_build2 (LE_EXPR, type, inner, comp_const);
6037 else if (op_code == MAX_EXPR && consts_lt)
6038 /* MAX (X, 0) == 5 -> X == 5 */
6039 return fold_build2 (EQ_EXPR, type, inner, comp_const);
6041 else if (op_code == MAX_EXPR)
6042 /* MAX (X, 0) == -1 -> false */
6043 return omit_one_operand (type, integer_zero_node, inner);
6045 else if (consts_equal)
6046 /* MIN (X, 0) == 0 -> X >= 0 */
6047 return fold_build2 (GE_EXPR, type, inner, comp_const);
6049 else if (consts_lt)
6050 /* MIN (X, 0) == 5 -> false */
6051 return omit_one_operand (type, integer_zero_node, inner);
6053 else
6054 /* MIN (X, 0) == -1 -> X == -1 */
6055 return fold_build2 (EQ_EXPR, type, inner, comp_const);
6057 case GT_EXPR:
6058 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6059 /* MAX (X, 0) > 0 -> X > 0
6060 MAX (X, 0) > 5 -> X > 5 */
6061 return fold_build2 (GT_EXPR, type, inner, comp_const);
6063 else if (op_code == MAX_EXPR)
6064 /* MAX (X, 0) > -1 -> true */
6065 return omit_one_operand (type, integer_one_node, inner);
6067 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6068 /* MIN (X, 0) > 0 -> false
6069 MIN (X, 0) > 5 -> false */
6070 return omit_one_operand (type, integer_zero_node, inner);
6072 else
6073 /* MIN (X, 0) > -1 -> X > -1 */
6074 return fold_build2 (GT_EXPR, type, inner, comp_const);
6076 default:
6077 return NULL_TREE;
6081 /* T is an integer expression that is being multiplied, divided, or taken a
6082 modulus (CODE says which and what kind of divide or modulus) by a
6083 constant C. See if we can eliminate that operation by folding it with
6084 other operations already in T. WIDE_TYPE, if non-null, is a type that
6085 should be used for the computation if wider than our type.
6087 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6088 (X * 2) + (Y * 4). We must, however, be assured that either the original
6089 expression would not overflow or that overflow is undefined for the type
6090 in the language in question.
6092 If we return a non-null expression, it is an equivalent form of the
6093 original computation, but need not be in the original type.
6095 We set *STRICT_OVERFLOW_P to true if the return values depends on
6096 signed overflow being undefined. Otherwise we do not change
6097 *STRICT_OVERFLOW_P. */
6099 static tree
6100 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6101 bool *strict_overflow_p)
6103 /* To avoid exponential search depth, refuse to allow recursion past
6104 three levels. Beyond that (1) it's highly unlikely that we'll find
6105 something interesting and (2) we've probably processed it before
6106 when we built the inner expression. */
6108 static int depth;
6109 tree ret;
6111 if (depth > 3)
6112 return NULL;
6114 depth++;
6115 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6116 depth--;
6118 return ret;
6121 static tree
6122 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6123 bool *strict_overflow_p)
6125 tree type = TREE_TYPE (t);
6126 enum tree_code tcode = TREE_CODE (t);
6127 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6128 > GET_MODE_SIZE (TYPE_MODE (type)))
6129 ? wide_type : type);
6130 tree t1, t2;
6131 int same_p = tcode == code;
6132 tree op0 = NULL_TREE, op1 = NULL_TREE;
6133 bool sub_strict_overflow_p;
6135 /* Don't deal with constants of zero here; they confuse the code below. */
6136 if (integer_zerop (c))
6137 return NULL_TREE;
6139 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6140 op0 = TREE_OPERAND (t, 0);
6142 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6143 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6145 /* Note that we need not handle conditional operations here since fold
6146 already handles those cases. So just do arithmetic here. */
6147 switch (tcode)
6149 case INTEGER_CST:
6150 /* For a constant, we can always simplify if we are a multiply
6151 or (for divide and modulus) if it is a multiple of our constant. */
6152 if (MULT_EXPR_CODE_P (code)
6153 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6154 return const_binop (code, fold_convert (ctype, t),
6155 fold_convert (ctype, c), 0);
6156 break;
6158 CASE_CONVERT: case NON_LVALUE_EXPR:
6159 /* If op0 is an expression ... */
6160 if ((COMPARISON_CLASS_P (op0)
6161 || UNARY_CLASS_P (op0)
6162 || BINARY_CLASS_P (op0)
6163 || VL_EXP_CLASS_P (op0)
6164 || EXPRESSION_CLASS_P (op0))
6165 /* ... and possibly overflows, and its type is smaller
6166 than ctype, then we cannot pass through as widening. */
6167 && ((strip_nv (TREE_CODE (op0)) == TREE_CODE (op0)
6168 /* ??? Remove me. */
6169 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6170 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6171 && (TYPE_PRECISION (ctype)
6172 > TYPE_PRECISION (TREE_TYPE (op0))))
6173 /* ... or this is a truncation (t is narrower than op0),
6174 then we cannot pass through this narrowing. */
6175 || (TYPE_PRECISION (type)
6176 < TYPE_PRECISION (TREE_TYPE (op0)))
6177 /* ... or signedness changes for division or modulus,
6178 then we cannot pass through this conversion. */
6179 || (!MULT_EXPR_CODE_P (code)
6180 && (TYPE_UNSIGNED (ctype)
6181 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
6182 break;
6184 /* Pass the constant down and see if we can make a simplification. If
6185 we can, replace this expression with the inner simplification for
6186 possible later conversion to our or some other type. */
6187 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6188 && TREE_CODE (t2) == INTEGER_CST
6189 && !TREE_OVERFLOW (t2)
6190 && (0 != (t1 = extract_muldiv (op0, t2, code,
6191 MULT_EXPR_CODE_P (code)
6192 ? ctype : NULL_TREE,
6193 strict_overflow_p))))
6194 return t1;
6195 break;
6197 case ABS_EXPR:
6198 /* If widening the type changes it from signed to unsigned, then we
6199 must avoid building ABS_EXPR itself as unsigned. */
6200 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6202 tree cstype = (*signed_type_for) (ctype);
6203 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6204 != 0)
6206 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6207 return fold_convert (ctype, t1);
6209 break;
6211 /* If the constant is negative, we cannot simplify this. */
6212 if (tree_int_cst_sgn (c) == -1)
6213 break;
6214 /* FALLTHROUGH */
6215 case NEGATE_EXPR:
6216 case NEGATENV_EXPR:
6217 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6218 != 0)
6219 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6220 break;
6222 case MIN_EXPR: case MAX_EXPR:
6223 /* If widening the type changes the signedness, then we can't perform
6224 this optimization as that changes the result. */
6225 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6226 break;
6228 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6229 sub_strict_overflow_p = false;
6230 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6231 &sub_strict_overflow_p)) != 0
6232 && (t2 = extract_muldiv (op1, c, code, wide_type,
6233 &sub_strict_overflow_p)) != 0)
6235 if (tree_int_cst_sgn (c) < 0)
6236 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6237 if (sub_strict_overflow_p)
6238 *strict_overflow_p = true;
6239 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6240 fold_convert (ctype, t2));
6242 break;
6244 case LSHIFT_EXPR: case RSHIFT_EXPR:
6245 /* If the second operand is constant, this is a multiplication
6246 or floor division, by a power of two, so we can treat it that
6247 way unless the multiplier or divisor overflows. Signed
6248 left-shift overflow is implementation-defined rather than
6249 undefined in C90, so do not convert signed left shift into
6250 multiplication. */
6251 if (TREE_CODE (op1) == INTEGER_CST
6252 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6253 /* const_binop may not detect overflow correctly,
6254 so check for it explicitly here. */
6255 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6256 && TREE_INT_CST_HIGH (op1) == 0
6257 && 0 != (t1 = fold_convert (ctype,
6258 const_binop (LSHIFT_EXPR,
6259 size_one_node,
6260 op1, 0)))
6261 && !TREE_OVERFLOW (t1))
6262 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6263 ? MULT_EXPR : FLOOR_DIV_EXPR,
6264 ctype, fold_convert (ctype, op0), t1),
6265 c, code, wide_type, strict_overflow_p);
6266 break;
6268 case PLUS_EXPR:
6269 case PLUSNV_EXPR:
6270 case MINUS_EXPR:
6271 case MINUSNV_EXPR:
6272 /* See if we can eliminate the operation on both sides. If we can, we
6273 can return a new PLUS or MINUS. If we can't, the only remaining
6274 cases where we can do anything are if the second operand is a
6275 constant. */
6276 sub_strict_overflow_p = false;
6277 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6278 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6279 if (t1 != 0 && t2 != 0
6280 && (MULT_EXPR_CODE_P (code)
6281 /* If not multiplication, we can only do this if both operands
6282 are divisible by c. */
6283 || (multiple_of_p (ctype, op0, c)
6284 && multiple_of_p (ctype, op1, c))))
6286 if (sub_strict_overflow_p)
6287 *strict_overflow_p = true;
6288 return fold_build2 (strip_nv (tcode), ctype, fold_convert (ctype, t1),
6289 fold_convert (ctype, t2));
6292 /* If this was a subtraction, negate OP1 and set it to be an addition.
6293 This simplifies the logic below. */
6294 if (MINUS_EXPR_CODE_P (tcode))
6295 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6297 if (TREE_CODE (op1) != INTEGER_CST)
6298 break;
6300 /* If either OP1 or C are negative, this optimization is not safe for
6301 some of the division and remainder types while for others we need
6302 to change the code. */
6303 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6305 if (code == CEIL_DIV_EXPR)
6306 code = FLOOR_DIV_EXPR;
6307 else if (code == FLOOR_DIV_EXPR)
6308 code = CEIL_DIV_EXPR;
6309 else if (!MULT_EXPR_CODE_P (code)
6310 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6311 break;
6314 /* If it's a multiply or a division/modulus operation of a multiple
6315 of our constant, do the operation. */
6316 if (MULT_EXPR_CODE_P (code)
6317 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6319 op1 = const_binop (code, fold_convert (ctype, op1),
6320 fold_convert (ctype, c), 0);
6321 if (op1 == 0)
6322 break;
6324 else
6325 break;
6327 /* If the original operation possibly overflowed we cannot widen
6328 the operation since it will change the result. */
6329 if (tcode == strip_nv (tcode)
6330 && ctype != type)
6331 break;
6333 /* If we were able to eliminate our operation from the first side,
6334 apply our operation to the second side and reform the PLUS. */
6335 if (t1 != 0 && (strip_nv (TREE_CODE (t1)) != strip_nv (code)
6336 || strip_nv (code) == MULT_EXPR))
6337 return fold_build2 (strip_nv (tcode),
6338 ctype, fold_convert (ctype, t1), op1);
6340 /* The last case is if we are a multiply. In that case, we can
6341 apply the distributive law to commute the multiply and addition
6342 if the multiplication of the constants doesn't overflow. */
6343 if (MULT_EXPR_CODE_P (code))
6344 return fold_build2 (strip_nv (tcode), ctype,
6345 fold_build2 (MULT_EXPR, ctype,
6346 fold_convert (ctype, op0),
6347 fold_convert (ctype, c)),
6348 op1);
6350 break;
6352 case MULT_EXPR:
6353 case MULTNV_EXPR:
6354 /* We have a special case here if we are doing something like
6355 (C * 8) % 4 since we know that's zero. */
6356 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6357 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6358 /* If the multiplication can overflow we cannot optimize this. */
6359 && (tcode == MULTNV_EXPR
6360 /* ??? Until we can properly mark individual operations as
6361 not overflowing we need to treat sizetype special here as
6362 stor-layout relies on this opimization to make
6363 DECL_FIELD_BIT_OFFSET always a constant. */
6364 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
6365 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
6366 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6367 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6369 *strict_overflow_p = true;
6370 return omit_one_operand (type, integer_zero_node, op0);
6373 /* ... fall through ... */
6375 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6376 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6377 /* If we can extract our operation from the LHS, do so and return a
6378 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6379 do something only if the second operand is a constant. */
6380 if (strip_nv (tcode) == strip_nv (code)
6381 && (t1 = extract_muldiv (op0, c, code, wide_type,
6382 strict_overflow_p)) != 0)
6383 return fold_build2 (same_p ? tcode : strip_nv (tcode),
6384 ctype, fold_convert (ctype, t1),
6385 fold_convert (ctype, op1));
6386 else if (MULT_EXPR_CODE_P (tcode) && MULT_EXPR_CODE_P (code)
6387 && (t1 = extract_muldiv (op1, c, code, wide_type,
6388 strict_overflow_p)) != 0)
6389 return fold_build2 (same_p ? tcode : strip_nv (tcode),
6390 ctype, fold_convert (ctype, op0),
6391 fold_convert (ctype, t1));
6392 else if (TREE_CODE (op1) != INTEGER_CST)
6393 return 0;
6395 /* If these are the same operation types, we can associate them
6396 assuming no overflow.
6397 ??? Why only for no overflow? */
6398 if (strip_nv (tcode) == strip_nv (code)
6399 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
6400 fold_convert (ctype, c), 1))
6401 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
6402 TREE_INT_CST_HIGH (t1),
6403 (TYPE_UNSIGNED (ctype)
6404 && tcode != MULT_EXPR) ? -1 : 1,
6405 TREE_OVERFLOW (t1)))
6406 && !TREE_OVERFLOW (t1))
6407 return fold_build2 (same_p ? tcode : strip_nv (tcode), ctype,
6408 fold_convert (ctype, op0), t1);
6410 /* If these operations "cancel" each other, we have the main
6411 optimizations of this pass, which occur when either constant is a
6412 multiple of the other, in which case we replace this with either an
6413 operation or CODE or TCODE.
6415 If we have an unsigned type that is not a sizetype, we cannot do
6416 this since it will change the result if the original computation
6417 overflowed. */
6418 if ((MULT_EXPR_CODE_P (code) && tcode == EXACT_DIV_EXPR)
6419 || (tcode == MULTNV_EXPR
6420 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6421 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6422 && !MULT_EXPR_CODE_P (code)))
6424 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6426 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6427 *strict_overflow_p = true;
6428 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6429 fold_convert (ctype,
6430 const_binop (TRUNC_DIV_EXPR,
6431 op1, c, 0)));
6433 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6435 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6436 *strict_overflow_p = true;
6437 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6438 fold_convert (ctype,
6439 const_binop (TRUNC_DIV_EXPR,
6440 c, op1, 0)));
6443 break;
6445 default:
6446 break;
6449 return 0;
6452 /* Return a node which has the indicated constant VALUE (either 0 or
6453 1), and is of the indicated TYPE. */
6455 tree
6456 constant_boolean_node (int value, tree type)
6458 if (type == integer_type_node)
6459 return value ? integer_one_node : integer_zero_node;
6460 else if (type == boolean_type_node)
6461 return value ? boolean_true_node : boolean_false_node;
6462 else
6463 return build_int_cst (type, value);
6467 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6468 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6469 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6470 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6471 COND is the first argument to CODE; otherwise (as in the example
6472 given here), it is the second argument. TYPE is the type of the
6473 original expression. Return NULL_TREE if no simplification is
6474 possible. */
6476 static tree
6477 fold_binary_op_with_conditional_arg (enum tree_code code,
6478 tree type, tree op0, tree op1,
6479 tree cond, tree arg, int cond_first_p)
6481 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6482 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6483 tree test, true_value, false_value;
6484 tree lhs = NULL_TREE;
6485 tree rhs = NULL_TREE;
6487 /* This transformation is only worthwhile if we don't have to wrap
6488 arg in a SAVE_EXPR, and the operation can be simplified on at least
6489 one of the branches once its pushed inside the COND_EXPR. */
6490 if (!TREE_CONSTANT (arg))
6491 return NULL_TREE;
6493 if (TREE_CODE (cond) == COND_EXPR)
6495 test = TREE_OPERAND (cond, 0);
6496 true_value = TREE_OPERAND (cond, 1);
6497 false_value = TREE_OPERAND (cond, 2);
6498 /* If this operand throws an expression, then it does not make
6499 sense to try to perform a logical or arithmetic operation
6500 involving it. */
6501 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6502 lhs = true_value;
6503 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6504 rhs = false_value;
6506 else
6508 tree testtype = TREE_TYPE (cond);
6509 test = cond;
6510 true_value = constant_boolean_node (true, testtype);
6511 false_value = constant_boolean_node (false, testtype);
6514 arg = fold_convert (arg_type, arg);
6515 if (lhs == 0)
6517 true_value = fold_convert (cond_type, true_value);
6518 if (cond_first_p)
6519 lhs = fold_build2 (code, type, true_value, arg);
6520 else
6521 lhs = fold_build2 (code, type, arg, true_value);
6523 if (rhs == 0)
6525 false_value = fold_convert (cond_type, false_value);
6526 if (cond_first_p)
6527 rhs = fold_build2 (code, type, false_value, arg);
6528 else
6529 rhs = fold_build2 (code, type, arg, false_value);
6532 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6533 return fold_convert (type, test);
6537 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6539 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6540 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6541 ADDEND is the same as X.
6543 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6544 and finite. The problematic cases are when X is zero, and its mode
6545 has signed zeros. In the case of rounding towards -infinity,
6546 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6547 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6549 bool
6550 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6552 if (!real_zerop (addend))
6553 return false;
6555 /* Don't allow the fold with -fsignaling-nans. */
6556 if (HONOR_SNANS (TYPE_MODE (type)))
6557 return false;
6559 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6560 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6561 return true;
6563 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6564 if (TREE_CODE (addend) == REAL_CST
6565 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6566 negate = !negate;
6568 /* The mode has signed zeros, and we have to honor their sign.
6569 In this situation, there is only one case we can return true for.
6570 X - 0 is the same as X unless rounding towards -infinity is
6571 supported. */
6572 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6575 /* Subroutine of fold() that checks comparisons of built-in math
6576 functions against real constants.
6578 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6579 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6580 is the type of the result and ARG0 and ARG1 are the operands of the
6581 comparison. ARG1 must be a TREE_REAL_CST.
6583 The function returns the constant folded tree if a simplification
6584 can be made, and NULL_TREE otherwise. */
6586 static tree
6587 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6588 tree type, tree arg0, tree arg1)
6590 REAL_VALUE_TYPE c;
6592 if (BUILTIN_SQRT_P (fcode))
6594 tree arg = CALL_EXPR_ARG (arg0, 0);
6595 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6597 c = TREE_REAL_CST (arg1);
6598 if (REAL_VALUE_NEGATIVE (c))
6600 /* sqrt(x) < y is always false, if y is negative. */
6601 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6602 return omit_one_operand (type, integer_zero_node, arg);
6604 /* sqrt(x) > y is always true, if y is negative and we
6605 don't care about NaNs, i.e. negative values of x. */
6606 if (code == NE_EXPR || !HONOR_NANS (mode))
6607 return omit_one_operand (type, integer_one_node, arg);
6609 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6610 return fold_build2 (GE_EXPR, type, arg,
6611 build_real (TREE_TYPE (arg), dconst0));
6613 else if (code == GT_EXPR || code == GE_EXPR)
6615 REAL_VALUE_TYPE c2;
6617 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6618 real_convert (&c2, mode, &c2);
6620 if (REAL_VALUE_ISINF (c2))
6622 /* sqrt(x) > y is x == +Inf, when y is very large. */
6623 if (HONOR_INFINITIES (mode))
6624 return fold_build2 (EQ_EXPR, type, arg,
6625 build_real (TREE_TYPE (arg), c2));
6627 /* sqrt(x) > y is always false, when y is very large
6628 and we don't care about infinities. */
6629 return omit_one_operand (type, integer_zero_node, arg);
6632 /* sqrt(x) > c is the same as x > c*c. */
6633 return fold_build2 (code, type, arg,
6634 build_real (TREE_TYPE (arg), c2));
6636 else if (code == LT_EXPR || code == LE_EXPR)
6638 REAL_VALUE_TYPE c2;
6640 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6641 real_convert (&c2, mode, &c2);
6643 if (REAL_VALUE_ISINF (c2))
6645 /* sqrt(x) < y is always true, when y is a very large
6646 value and we don't care about NaNs or Infinities. */
6647 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6648 return omit_one_operand (type, integer_one_node, arg);
6650 /* sqrt(x) < y is x != +Inf when y is very large and we
6651 don't care about NaNs. */
6652 if (! HONOR_NANS (mode))
6653 return fold_build2 (NE_EXPR, type, arg,
6654 build_real (TREE_TYPE (arg), c2));
6656 /* sqrt(x) < y is x >= 0 when y is very large and we
6657 don't care about Infinities. */
6658 if (! HONOR_INFINITIES (mode))
6659 return fold_build2 (GE_EXPR, type, arg,
6660 build_real (TREE_TYPE (arg), dconst0));
6662 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6663 if (lang_hooks.decls.global_bindings_p () != 0
6664 || CONTAINS_PLACEHOLDER_P (arg))
6665 return NULL_TREE;
6667 arg = save_expr (arg);
6668 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6669 fold_build2 (GE_EXPR, type, arg,
6670 build_real (TREE_TYPE (arg),
6671 dconst0)),
6672 fold_build2 (NE_EXPR, type, arg,
6673 build_real (TREE_TYPE (arg),
6674 c2)));
6677 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6678 if (! HONOR_NANS (mode))
6679 return fold_build2 (code, type, arg,
6680 build_real (TREE_TYPE (arg), c2));
6682 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6683 if (lang_hooks.decls.global_bindings_p () == 0
6684 && ! CONTAINS_PLACEHOLDER_P (arg))
6686 arg = save_expr (arg);
6687 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6688 fold_build2 (GE_EXPR, type, arg,
6689 build_real (TREE_TYPE (arg),
6690 dconst0)),
6691 fold_build2 (code, type, arg,
6692 build_real (TREE_TYPE (arg),
6693 c2)));
6698 return NULL_TREE;
6701 /* Subroutine of fold() that optimizes comparisons against Infinities,
6702 either +Inf or -Inf.
6704 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6705 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6706 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6708 The function returns the constant folded tree if a simplification
6709 can be made, and NULL_TREE otherwise. */
6711 static tree
6712 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6714 enum machine_mode mode;
6715 REAL_VALUE_TYPE max;
6716 tree temp;
6717 bool neg;
6719 mode = TYPE_MODE (TREE_TYPE (arg0));
6721 /* For negative infinity swap the sense of the comparison. */
6722 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6723 if (neg)
6724 code = swap_tree_comparison (code);
6726 switch (code)
6728 case GT_EXPR:
6729 /* x > +Inf is always false, if with ignore sNANs. */
6730 if (HONOR_SNANS (mode))
6731 return NULL_TREE;
6732 return omit_one_operand (type, integer_zero_node, arg0);
6734 case LE_EXPR:
6735 /* x <= +Inf is always true, if we don't case about NaNs. */
6736 if (! HONOR_NANS (mode))
6737 return omit_one_operand (type, integer_one_node, arg0);
6739 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6740 if (lang_hooks.decls.global_bindings_p () == 0
6741 && ! CONTAINS_PLACEHOLDER_P (arg0))
6743 arg0 = save_expr (arg0);
6744 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6746 break;
6748 case EQ_EXPR:
6749 case GE_EXPR:
6750 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6751 real_maxval (&max, neg, mode);
6752 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6753 arg0, build_real (TREE_TYPE (arg0), max));
6755 case LT_EXPR:
6756 /* x < +Inf is always equal to x <= DBL_MAX. */
6757 real_maxval (&max, neg, mode);
6758 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6759 arg0, build_real (TREE_TYPE (arg0), max));
6761 case NE_EXPR:
6762 /* x != +Inf is always equal to !(x > DBL_MAX). */
6763 real_maxval (&max, neg, mode);
6764 if (! HONOR_NANS (mode))
6765 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6766 arg0, build_real (TREE_TYPE (arg0), max));
6768 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6769 arg0, build_real (TREE_TYPE (arg0), max));
6770 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6772 default:
6773 break;
6776 return NULL_TREE;
6779 /* Subroutine of fold() that optimizes comparisons of a division by
6780 a nonzero integer constant against an integer constant, i.e.
6781 X/C1 op C2.
6783 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6784 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6785 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6787 The function returns the constant folded tree if a simplification
6788 can be made, and NULL_TREE otherwise. */
6790 static tree
6791 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6793 tree prod, tmp, hi, lo;
6794 tree arg00 = TREE_OPERAND (arg0, 0);
6795 tree arg01 = TREE_OPERAND (arg0, 1);
6796 unsigned HOST_WIDE_INT lpart;
6797 HOST_WIDE_INT hpart;
6798 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6799 bool neg_overflow;
6800 int overflow;
6802 /* We have to do this the hard way to detect unsigned overflow.
6803 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6804 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6805 TREE_INT_CST_HIGH (arg01),
6806 TREE_INT_CST_LOW (arg1),
6807 TREE_INT_CST_HIGH (arg1),
6808 &lpart, &hpart, unsigned_p);
6809 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6810 -1, overflow);
6811 neg_overflow = false;
6813 if (unsigned_p)
6815 tmp = int_const_binop (MINUS_EXPR, arg01,
6816 build_int_cst (TREE_TYPE (arg01), 1), 0);
6817 lo = prod;
6819 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6820 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6821 TREE_INT_CST_HIGH (prod),
6822 TREE_INT_CST_LOW (tmp),
6823 TREE_INT_CST_HIGH (tmp),
6824 &lpart, &hpart, unsigned_p);
6825 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6826 -1, overflow | TREE_OVERFLOW (prod));
6828 else if (tree_int_cst_sgn (arg01) >= 0)
6830 tmp = int_const_binop (MINUS_EXPR, arg01,
6831 build_int_cst (TREE_TYPE (arg01), 1), 0);
6832 switch (tree_int_cst_sgn (arg1))
6834 case -1:
6835 neg_overflow = true;
6836 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6837 hi = prod;
6838 break;
6840 case 0:
6841 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6842 hi = tmp;
6843 break;
6845 case 1:
6846 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6847 lo = prod;
6848 break;
6850 default:
6851 gcc_unreachable ();
6854 else
6856 /* A negative divisor reverses the relational operators. */
6857 code = swap_tree_comparison (code);
6859 tmp = int_const_binop (PLUS_EXPR, arg01,
6860 build_int_cst (TREE_TYPE (arg01), 1), 0);
6861 switch (tree_int_cst_sgn (arg1))
6863 case -1:
6864 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6865 lo = prod;
6866 break;
6868 case 0:
6869 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6870 lo = tmp;
6871 break;
6873 case 1:
6874 neg_overflow = true;
6875 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6876 hi = prod;
6877 break;
6879 default:
6880 gcc_unreachable ();
6884 switch (code)
6886 case EQ_EXPR:
6887 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6888 return omit_one_operand (type, integer_zero_node, arg00);
6889 if (TREE_OVERFLOW (hi))
6890 return fold_build2 (GE_EXPR, type, arg00, lo);
6891 if (TREE_OVERFLOW (lo))
6892 return fold_build2 (LE_EXPR, type, arg00, hi);
6893 return build_range_check (type, arg00, 1, lo, hi);
6895 case NE_EXPR:
6896 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6897 return omit_one_operand (type, integer_one_node, arg00);
6898 if (TREE_OVERFLOW (hi))
6899 return fold_build2 (LT_EXPR, type, arg00, lo);
6900 if (TREE_OVERFLOW (lo))
6901 return fold_build2 (GT_EXPR, type, arg00, hi);
6902 return build_range_check (type, arg00, 0, lo, hi);
6904 case LT_EXPR:
6905 if (TREE_OVERFLOW (lo))
6907 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6908 return omit_one_operand (type, tmp, arg00);
6910 return fold_build2 (LT_EXPR, type, arg00, lo);
6912 case LE_EXPR:
6913 if (TREE_OVERFLOW (hi))
6915 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6916 return omit_one_operand (type, tmp, arg00);
6918 return fold_build2 (LE_EXPR, type, arg00, hi);
6920 case GT_EXPR:
6921 if (TREE_OVERFLOW (hi))
6923 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6924 return omit_one_operand (type, tmp, arg00);
6926 return fold_build2 (GT_EXPR, type, arg00, hi);
6928 case GE_EXPR:
6929 if (TREE_OVERFLOW (lo))
6931 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6932 return omit_one_operand (type, tmp, arg00);
6934 return fold_build2 (GE_EXPR, type, arg00, lo);
6936 default:
6937 break;
6940 return NULL_TREE;
6944 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6945 equality/inequality test, then return a simplified form of the test
6946 using a sign testing. Otherwise return NULL. TYPE is the desired
6947 result type. */
6949 static tree
6950 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6951 tree result_type)
6953 /* If this is testing a single bit, we can optimize the test. */
6954 if ((code == NE_EXPR || code == EQ_EXPR)
6955 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6956 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6958 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6959 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6960 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6962 if (arg00 != NULL_TREE
6963 /* This is only a win if casting to a signed type is cheap,
6964 i.e. when arg00's type is not a partial mode. */
6965 && TYPE_PRECISION (TREE_TYPE (arg00))
6966 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6968 tree stype = signed_type_for (TREE_TYPE (arg00));
6969 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6970 result_type, fold_convert (stype, arg00),
6971 build_int_cst (stype, 0));
6975 return NULL_TREE;
6978 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6979 equality/inequality test, then return a simplified form of
6980 the test using shifts and logical operations. Otherwise return
6981 NULL. TYPE is the desired result type. */
6983 tree
6984 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6985 tree result_type)
6987 /* If this is testing a single bit, we can optimize the test. */
6988 if ((code == NE_EXPR || code == EQ_EXPR)
6989 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6990 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6992 tree inner = TREE_OPERAND (arg0, 0);
6993 tree type = TREE_TYPE (arg0);
6994 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6995 enum machine_mode operand_mode = TYPE_MODE (type);
6996 int ops_unsigned;
6997 tree signed_type, unsigned_type, intermediate_type;
6998 tree tem, one;
7000 /* First, see if we can fold the single bit test into a sign-bit
7001 test. */
7002 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
7003 result_type);
7004 if (tem)
7005 return tem;
7007 /* Otherwise we have (A & C) != 0 where C is a single bit,
7008 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7009 Similarly for (A & C) == 0. */
7011 /* If INNER is a right shift of a constant and it plus BITNUM does
7012 not overflow, adjust BITNUM and INNER. */
7013 if (TREE_CODE (inner) == RSHIFT_EXPR
7014 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7015 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
7016 && bitnum < TYPE_PRECISION (type)
7017 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
7018 bitnum - TYPE_PRECISION (type)))
7020 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
7021 inner = TREE_OPERAND (inner, 0);
7024 /* If we are going to be able to omit the AND below, we must do our
7025 operations as unsigned. If we must use the AND, we have a choice.
7026 Normally unsigned is faster, but for some machines signed is. */
7027 #ifdef LOAD_EXTEND_OP
7028 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
7029 && !flag_syntax_only) ? 0 : 1;
7030 #else
7031 ops_unsigned = 1;
7032 #endif
7034 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7035 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7036 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7037 inner = fold_convert (intermediate_type, inner);
7039 if (bitnum != 0)
7040 inner = build2 (RSHIFT_EXPR, intermediate_type,
7041 inner, size_int (bitnum));
7043 one = build_int_cst (intermediate_type, 1);
7045 if (code == EQ_EXPR)
7046 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
7048 /* Put the AND last so it can combine with more things. */
7049 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7051 /* Make sure to return the proper type. */
7052 inner = fold_convert (result_type, inner);
7054 return inner;
7056 return NULL_TREE;
7059 /* Check whether we are allowed to reorder operands arg0 and arg1,
7060 such that the evaluation of arg1 occurs before arg0. */
7062 static bool
7063 reorder_operands_p (const_tree arg0, const_tree arg1)
7065 if (! flag_evaluation_order)
7066 return true;
7067 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
7068 return true;
7069 return ! TREE_SIDE_EFFECTS (arg0)
7070 && ! TREE_SIDE_EFFECTS (arg1);
7073 /* Test whether it is preferable two swap two operands, ARG0 and
7074 ARG1, for example because ARG0 is an integer constant and ARG1
7075 isn't. If REORDER is true, only recommend swapping if we can
7076 evaluate the operands in reverse order. */
7078 bool
7079 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
7081 STRIP_SIGN_NOPS (arg0);
7082 STRIP_SIGN_NOPS (arg1);
7084 if (TREE_CODE (arg1) == INTEGER_CST)
7085 return 0;
7086 if (TREE_CODE (arg0) == INTEGER_CST)
7087 return 1;
7089 if (TREE_CODE (arg1) == REAL_CST)
7090 return 0;
7091 if (TREE_CODE (arg0) == REAL_CST)
7092 return 1;
7094 if (TREE_CODE (arg1) == FIXED_CST)
7095 return 0;
7096 if (TREE_CODE (arg0) == FIXED_CST)
7097 return 1;
7099 if (TREE_CODE (arg1) == COMPLEX_CST)
7100 return 0;
7101 if (TREE_CODE (arg0) == COMPLEX_CST)
7102 return 1;
7104 if (TREE_CONSTANT (arg1))
7105 return 0;
7106 if (TREE_CONSTANT (arg0))
7107 return 1;
7109 if (optimize_function_for_size_p (cfun))
7110 return 0;
7112 if (reorder && flag_evaluation_order
7113 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7114 return 0;
7116 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7117 for commutative and comparison operators. Ensuring a canonical
7118 form allows the optimizers to find additional redundancies without
7119 having to explicitly check for both orderings. */
7120 if (TREE_CODE (arg0) == SSA_NAME
7121 && TREE_CODE (arg1) == SSA_NAME
7122 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7123 return 1;
7125 /* Put SSA_NAMEs last. */
7126 if (TREE_CODE (arg1) == SSA_NAME)
7127 return 0;
7128 if (TREE_CODE (arg0) == SSA_NAME)
7129 return 1;
7131 /* Put variables last. */
7132 if (DECL_P (arg1))
7133 return 0;
7134 if (DECL_P (arg0))
7135 return 1;
7137 return 0;
7140 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7141 ARG0 is extended to a wider type. */
7143 static tree
7144 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
7146 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7147 tree arg1_unw;
7148 tree shorter_type, outer_type;
7149 tree min, max;
7150 bool above, below;
7152 if (arg0_unw == arg0)
7153 return NULL_TREE;
7154 shorter_type = TREE_TYPE (arg0_unw);
7156 #ifdef HAVE_canonicalize_funcptr_for_compare
7157 /* Disable this optimization if we're casting a function pointer
7158 type on targets that require function pointer canonicalization. */
7159 if (HAVE_canonicalize_funcptr_for_compare
7160 && TREE_CODE (shorter_type) == POINTER_TYPE
7161 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7162 return NULL_TREE;
7163 #endif
7165 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7166 return NULL_TREE;
7168 arg1_unw = get_unwidened (arg1, NULL_TREE);
7170 /* If possible, express the comparison in the shorter mode. */
7171 if ((code == EQ_EXPR || code == NE_EXPR
7172 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7173 && (TREE_TYPE (arg1_unw) == shorter_type
7174 || ((TYPE_PRECISION (shorter_type)
7175 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7176 && (TYPE_UNSIGNED (shorter_type)
7177 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7178 || (TREE_CODE (arg1_unw) == INTEGER_CST
7179 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7180 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7181 && int_fits_type_p (arg1_unw, shorter_type))))
7182 return fold_build2 (code, type, arg0_unw,
7183 fold_convert (shorter_type, arg1_unw));
7185 if (TREE_CODE (arg1_unw) != INTEGER_CST
7186 || TREE_CODE (shorter_type) != INTEGER_TYPE
7187 || !int_fits_type_p (arg1_unw, shorter_type))
7188 return NULL_TREE;
7190 /* If we are comparing with the integer that does not fit into the range
7191 of the shorter type, the result is known. */
7192 outer_type = TREE_TYPE (arg1_unw);
7193 min = lower_bound_in_type (outer_type, shorter_type);
7194 max = upper_bound_in_type (outer_type, shorter_type);
7196 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7197 max, arg1_unw));
7198 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7199 arg1_unw, min));
7201 switch (code)
7203 case EQ_EXPR:
7204 if (above || below)
7205 return omit_one_operand (type, integer_zero_node, arg0);
7206 break;
7208 case NE_EXPR:
7209 if (above || below)
7210 return omit_one_operand (type, integer_one_node, arg0);
7211 break;
7213 case LT_EXPR:
7214 case LE_EXPR:
7215 if (above)
7216 return omit_one_operand (type, integer_one_node, arg0);
7217 else if (below)
7218 return omit_one_operand (type, integer_zero_node, arg0);
7220 case GT_EXPR:
7221 case GE_EXPR:
7222 if (above)
7223 return omit_one_operand (type, integer_zero_node, arg0);
7224 else if (below)
7225 return omit_one_operand (type, integer_one_node, arg0);
7227 default:
7228 break;
7231 return NULL_TREE;
7234 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7235 ARG0 just the signedness is changed. */
7237 static tree
7238 fold_sign_changed_comparison (enum tree_code code, tree type,
7239 tree arg0, tree arg1)
7241 tree arg0_inner;
7242 tree inner_type, outer_type;
7244 if (!CONVERT_EXPR_P (arg0))
7245 return NULL_TREE;
7247 outer_type = TREE_TYPE (arg0);
7248 arg0_inner = TREE_OPERAND (arg0, 0);
7249 inner_type = TREE_TYPE (arg0_inner);
7251 #ifdef HAVE_canonicalize_funcptr_for_compare
7252 /* Disable this optimization if we're casting a function pointer
7253 type on targets that require function pointer canonicalization. */
7254 if (HAVE_canonicalize_funcptr_for_compare
7255 && TREE_CODE (inner_type) == POINTER_TYPE
7256 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7257 return NULL_TREE;
7258 #endif
7260 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7261 return NULL_TREE;
7263 if (TREE_CODE (arg1) != INTEGER_CST
7264 && !(CONVERT_EXPR_P (arg1)
7265 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7266 return NULL_TREE;
7268 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7269 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7270 && code != NE_EXPR
7271 && code != EQ_EXPR)
7272 return NULL_TREE;
7274 if (TREE_CODE (arg1) == INTEGER_CST)
7275 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7276 TREE_INT_CST_HIGH (arg1), 0,
7277 TREE_OVERFLOW (arg1));
7278 else
7279 arg1 = fold_convert (inner_type, arg1);
7281 return fold_build2 (code, type, arg0_inner, arg1);
7284 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7285 step of the array. Reconstructs s and delta in the case of s * delta
7286 being an integer constant (and thus already folded).
7287 ADDR is the address. MULT is the multiplicative expression.
7288 If the function succeeds, the new address expression is returned. Otherwise
7289 NULL_TREE is returned. */
7291 static tree
7292 try_move_mult_to_index (tree addr, tree op1)
7294 tree s, delta, step;
7295 tree ref = TREE_OPERAND (addr, 0), pref;
7296 tree ret, pos;
7297 tree itype;
7298 bool mdim = false;
7300 /* Strip the nops that might be added when converting op1 to sizetype. */
7301 STRIP_NOPS (op1);
7303 /* Canonicalize op1 into a possibly non-constant delta
7304 and an INTEGER_CST s. */
7305 if (TREE_CODE (op1) == MULTNV_EXPR)
7307 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7309 STRIP_NOPS (arg0);
7310 STRIP_NOPS (arg1);
7312 if (TREE_CODE (arg0) == INTEGER_CST)
7314 s = arg0;
7315 delta = arg1;
7317 else if (TREE_CODE (arg1) == INTEGER_CST)
7319 s = arg1;
7320 delta = arg0;
7322 else
7323 return NULL_TREE;
7325 else if (TREE_CODE (op1) == INTEGER_CST)
7327 delta = op1;
7328 s = NULL_TREE;
7330 else
7332 /* Simulate we are delta * 1. */
7333 delta = op1;
7334 s = integer_one_node;
7337 for (;; ref = TREE_OPERAND (ref, 0))
7339 if (TREE_CODE (ref) == ARRAY_REF)
7341 /* Remember if this was a multi-dimensional array. */
7342 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7343 mdim = true;
7345 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7346 if (! itype)
7347 continue;
7349 step = array_ref_element_size (ref);
7350 if (TREE_CODE (step) != INTEGER_CST)
7351 continue;
7353 if (s)
7355 if (! tree_int_cst_equal (step, s))
7356 continue;
7358 else
7360 /* Try if delta is a multiple of step. */
7361 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7362 if (! tmp)
7363 continue;
7364 delta = tmp;
7367 /* Only fold here if we can verify we do not overflow one
7368 dimension of a multi-dimensional array. */
7369 if (mdim)
7371 tree tmp;
7373 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7374 || !INTEGRAL_TYPE_P (itype)
7375 || !TYPE_MAX_VALUE (itype)
7376 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7377 continue;
7379 tmp = fold_binary (PLUS_EXPR, itype,
7380 fold_convert (itype,
7381 TREE_OPERAND (ref, 1)),
7382 fold_convert (itype, delta));
7383 if (!tmp
7384 || TREE_CODE (tmp) != INTEGER_CST
7385 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7386 continue;
7389 break;
7391 else
7392 mdim = false;
7394 if (!handled_component_p (ref))
7395 return NULL_TREE;
7398 /* We found the suitable array reference. So copy everything up to it,
7399 and replace the index. */
7401 pref = TREE_OPERAND (addr, 0);
7402 ret = copy_node (pref);
7403 pos = ret;
7405 while (pref != ref)
7407 pref = TREE_OPERAND (pref, 0);
7408 TREE_OPERAND (pos, 0) = copy_node (pref);
7409 pos = TREE_OPERAND (pos, 0);
7412 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
7413 fold_convert (itype,
7414 TREE_OPERAND (pos, 1)),
7415 fold_convert (itype, delta));
7417 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
7421 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7422 means A >= Y && A != MAX, but in this case we know that
7423 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7425 static tree
7426 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7428 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7430 if (TREE_CODE (bound) == LT_EXPR)
7431 a = TREE_OPERAND (bound, 0);
7432 else if (TREE_CODE (bound) == GT_EXPR)
7433 a = TREE_OPERAND (bound, 1);
7434 else
7435 return NULL_TREE;
7437 typea = TREE_TYPE (a);
7438 if (!INTEGRAL_TYPE_P (typea)
7439 && !POINTER_TYPE_P (typea))
7440 return NULL_TREE;
7442 if (TREE_CODE (ineq) == LT_EXPR)
7444 a1 = TREE_OPERAND (ineq, 1);
7445 y = TREE_OPERAND (ineq, 0);
7447 else if (TREE_CODE (ineq) == GT_EXPR)
7449 a1 = TREE_OPERAND (ineq, 0);
7450 y = TREE_OPERAND (ineq, 1);
7452 else
7453 return NULL_TREE;
7455 if (TREE_TYPE (a1) != typea)
7456 return NULL_TREE;
7458 if (POINTER_TYPE_P (typea))
7460 /* Convert the pointer types into integer before taking the difference. */
7461 tree ta = fold_convert (ssizetype, a);
7462 tree ta1 = fold_convert (ssizetype, a1);
7463 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7465 else
7466 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7468 if (!diff || !integer_onep (diff))
7469 return NULL_TREE;
7471 return fold_build2 (GE_EXPR, type, a, y);
7474 /* Fold a sum or difference of at least one multiplication.
7475 Returns the folded tree or NULL if no simplification could be made. */
7477 static tree
7478 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7480 tree arg00, arg01, arg10, arg11;
7481 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7483 /* (A * C) +- (B * C) -> (A+-B) * C.
7484 (A * C) +- A -> A * (C+-1).
7485 We are most concerned about the case where C is a constant,
7486 but other combinations show up during loop reduction. Since
7487 it is not difficult, try all four possibilities. */
7489 if (MULT_EXPR_P (arg0))
7491 arg00 = TREE_OPERAND (arg0, 0);
7492 arg01 = TREE_OPERAND (arg0, 1);
7494 else if (TREE_CODE (arg0) == INTEGER_CST)
7496 arg00 = build_one_cst (type);
7497 arg01 = arg0;
7499 else
7501 /* We cannot generate constant 1 for fract. */
7502 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7503 return NULL_TREE;
7504 arg00 = arg0;
7505 arg01 = build_one_cst (type);
7507 if (MULT_EXPR_P (arg1))
7509 arg10 = TREE_OPERAND (arg1, 0);
7510 arg11 = TREE_OPERAND (arg1, 1);
7512 else if (TREE_CODE (arg1) == INTEGER_CST)
7514 arg10 = build_one_cst (type);
7515 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7516 the purpose of this canonicalization. */
7517 if (TREE_INT_CST_HIGH (arg1) == -1
7518 && negate_expr_p (arg1)
7519 && PLUS_EXPR_CODE_P (code))
7521 arg11 = negate_expr (arg1);
7522 code = MINUS_EXPR;
7524 else
7525 arg11 = arg1;
7527 else
7529 /* We cannot generate constant 1 for fract. */
7530 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7531 return NULL_TREE;
7532 arg10 = arg1;
7533 arg11 = build_one_cst (type);
7535 same = NULL_TREE;
7537 if (operand_equal_p (arg01, arg11, 0))
7538 same = arg01, alt0 = arg00, alt1 = arg10;
7539 else if (operand_equal_p (arg00, arg10, 0))
7540 same = arg00, alt0 = arg01, alt1 = arg11;
7541 else if (operand_equal_p (arg00, arg11, 0))
7542 same = arg00, alt0 = arg01, alt1 = arg10;
7543 else if (operand_equal_p (arg01, arg10, 0))
7544 same = arg01, alt0 = arg00, alt1 = arg11;
7546 /* No identical multiplicands; see if we can find a common
7547 power-of-two factor in non-power-of-two multiplies. This
7548 can help in multi-dimensional array access. */
7549 else if (host_integerp (arg01, 0)
7550 && host_integerp (arg11, 0))
7552 HOST_WIDE_INT int01, int11, tmp;
7553 bool swap = false;
7554 tree maybe_same;
7555 int01 = TREE_INT_CST_LOW (arg01);
7556 int11 = TREE_INT_CST_LOW (arg11);
7558 /* Move min of absolute values to int11. */
7559 if ((int01 >= 0 ? int01 : -int01)
7560 < (int11 >= 0 ? int11 : -int11))
7562 tmp = int01, int01 = int11, int11 = tmp;
7563 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7564 maybe_same = arg01;
7565 swap = true;
7567 else
7568 maybe_same = arg11;
7570 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7571 /* The remainder should not be a constant, otherwise we
7572 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7573 increased the number of multiplications necessary. */
7574 && TREE_CODE (arg10) != INTEGER_CST)
7576 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7577 build_int_cst (TREE_TYPE (arg00),
7578 int01 / int11));
7579 alt1 = arg10;
7580 same = maybe_same;
7581 if (swap)
7582 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7586 if (same)
7587 return fold_build2 (MULT_EXPR, type,
7588 fold_build2 (strip_nv (code), type,
7589 fold_convert (type, alt0),
7590 fold_convert (type, alt1)),
7591 fold_convert (type, same));
7593 return NULL_TREE;
7596 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7597 specified by EXPR into the buffer PTR of length LEN bytes.
7598 Return the number of bytes placed in the buffer, or zero
7599 upon failure. */
7601 static int
7602 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7604 tree type = TREE_TYPE (expr);
7605 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7606 int byte, offset, word, words;
7607 unsigned char value;
7609 if (total_bytes > len)
7610 return 0;
7611 words = total_bytes / UNITS_PER_WORD;
7613 for (byte = 0; byte < total_bytes; byte++)
7615 int bitpos = byte * BITS_PER_UNIT;
7616 if (bitpos < HOST_BITS_PER_WIDE_INT)
7617 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7618 else
7619 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7620 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7622 if (total_bytes > UNITS_PER_WORD)
7624 word = byte / UNITS_PER_WORD;
7625 if (WORDS_BIG_ENDIAN)
7626 word = (words - 1) - word;
7627 offset = word * UNITS_PER_WORD;
7628 if (BYTES_BIG_ENDIAN)
7629 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7630 else
7631 offset += byte % UNITS_PER_WORD;
7633 else
7634 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7635 ptr[offset] = value;
7637 return total_bytes;
7641 /* Subroutine of native_encode_expr. Encode the REAL_CST
7642 specified by EXPR into the buffer PTR of length LEN bytes.
7643 Return the number of bytes placed in the buffer, or zero
7644 upon failure. */
7646 static int
7647 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7649 tree type = TREE_TYPE (expr);
7650 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7651 int byte, offset, word, words, bitpos;
7652 unsigned char value;
7654 /* There are always 32 bits in each long, no matter the size of
7655 the hosts long. We handle floating point representations with
7656 up to 192 bits. */
7657 long tmp[6];
7659 if (total_bytes > len)
7660 return 0;
7661 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7663 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7665 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7666 bitpos += BITS_PER_UNIT)
7668 byte = (bitpos / BITS_PER_UNIT) & 3;
7669 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7671 if (UNITS_PER_WORD < 4)
7673 word = byte / UNITS_PER_WORD;
7674 if (WORDS_BIG_ENDIAN)
7675 word = (words - 1) - word;
7676 offset = word * UNITS_PER_WORD;
7677 if (BYTES_BIG_ENDIAN)
7678 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7679 else
7680 offset += byte % UNITS_PER_WORD;
7682 else
7683 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7684 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7686 return total_bytes;
7689 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7690 specified by EXPR into the buffer PTR of length LEN bytes.
7691 Return the number of bytes placed in the buffer, or zero
7692 upon failure. */
7694 static int
7695 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7697 int rsize, isize;
7698 tree part;
7700 part = TREE_REALPART (expr);
7701 rsize = native_encode_expr (part, ptr, len);
7702 if (rsize == 0)
7703 return 0;
7704 part = TREE_IMAGPART (expr);
7705 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7706 if (isize != rsize)
7707 return 0;
7708 return rsize + isize;
7712 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7713 specified by EXPR into the buffer PTR of length LEN bytes.
7714 Return the number of bytes placed in the buffer, or zero
7715 upon failure. */
7717 static int
7718 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7720 int i, size, offset, count;
7721 tree itype, elem, elements;
7723 offset = 0;
7724 elements = TREE_VECTOR_CST_ELTS (expr);
7725 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7726 itype = TREE_TYPE (TREE_TYPE (expr));
7727 size = GET_MODE_SIZE (TYPE_MODE (itype));
7728 for (i = 0; i < count; i++)
7730 if (elements)
7732 elem = TREE_VALUE (elements);
7733 elements = TREE_CHAIN (elements);
7735 else
7736 elem = NULL_TREE;
7738 if (elem)
7740 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7741 return 0;
7743 else
7745 if (offset + size > len)
7746 return 0;
7747 memset (ptr+offset, 0, size);
7749 offset += size;
7751 return offset;
7755 /* Subroutine of native_encode_expr. Encode the STRING_CST
7756 specified by EXPR into the buffer PTR of length LEN bytes.
7757 Return the number of bytes placed in the buffer, or zero
7758 upon failure. */
7760 static int
7761 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7763 tree type = TREE_TYPE (expr);
7764 HOST_WIDE_INT total_bytes;
7766 if (TREE_CODE (type) != ARRAY_TYPE
7767 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7768 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7769 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7770 return 0;
7771 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7772 if (total_bytes > len)
7773 return 0;
7774 if (TREE_STRING_LENGTH (expr) < total_bytes)
7776 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7777 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7778 total_bytes - TREE_STRING_LENGTH (expr));
7780 else
7781 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7782 return total_bytes;
7786 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7787 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7788 buffer PTR of length LEN bytes. Return the number of bytes
7789 placed in the buffer, or zero upon failure. */
7792 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7794 switch (TREE_CODE (expr))
7796 case INTEGER_CST:
7797 return native_encode_int (expr, ptr, len);
7799 case REAL_CST:
7800 return native_encode_real (expr, ptr, len);
7802 case COMPLEX_CST:
7803 return native_encode_complex (expr, ptr, len);
7805 case VECTOR_CST:
7806 return native_encode_vector (expr, ptr, len);
7808 case STRING_CST:
7809 return native_encode_string (expr, ptr, len);
7811 default:
7812 return 0;
7817 /* Subroutine of native_interpret_expr. Interpret the contents of
7818 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7819 If the buffer cannot be interpreted, return NULL_TREE. */
7821 static tree
7822 native_interpret_int (tree type, const unsigned char *ptr, int len)
7824 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7825 int byte, offset, word, words;
7826 unsigned char value;
7827 unsigned int HOST_WIDE_INT lo = 0;
7828 HOST_WIDE_INT hi = 0;
7830 if (total_bytes > len)
7831 return NULL_TREE;
7832 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7833 return NULL_TREE;
7834 words = total_bytes / UNITS_PER_WORD;
7836 for (byte = 0; byte < total_bytes; byte++)
7838 int bitpos = byte * BITS_PER_UNIT;
7839 if (total_bytes > UNITS_PER_WORD)
7841 word = byte / UNITS_PER_WORD;
7842 if (WORDS_BIG_ENDIAN)
7843 word = (words - 1) - word;
7844 offset = word * UNITS_PER_WORD;
7845 if (BYTES_BIG_ENDIAN)
7846 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7847 else
7848 offset += byte % UNITS_PER_WORD;
7850 else
7851 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7852 value = ptr[offset];
7854 if (bitpos < HOST_BITS_PER_WIDE_INT)
7855 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7856 else
7857 hi |= (unsigned HOST_WIDE_INT) value
7858 << (bitpos - HOST_BITS_PER_WIDE_INT);
7861 return build_int_cst_wide_type (type, lo, hi);
7865 /* Subroutine of native_interpret_expr. Interpret the contents of
7866 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7867 If the buffer cannot be interpreted, return NULL_TREE. */
7869 static tree
7870 native_interpret_real (tree type, const unsigned char *ptr, int len)
7872 enum machine_mode mode = TYPE_MODE (type);
7873 int total_bytes = GET_MODE_SIZE (mode);
7874 int byte, offset, word, words, bitpos;
7875 unsigned char value;
7876 /* There are always 32 bits in each long, no matter the size of
7877 the hosts long. We handle floating point representations with
7878 up to 192 bits. */
7879 REAL_VALUE_TYPE r;
7880 long tmp[6];
7882 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7883 if (total_bytes > len || total_bytes > 24)
7884 return NULL_TREE;
7885 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7887 memset (tmp, 0, sizeof (tmp));
7888 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7889 bitpos += BITS_PER_UNIT)
7891 byte = (bitpos / BITS_PER_UNIT) & 3;
7892 if (UNITS_PER_WORD < 4)
7894 word = byte / UNITS_PER_WORD;
7895 if (WORDS_BIG_ENDIAN)
7896 word = (words - 1) - word;
7897 offset = word * UNITS_PER_WORD;
7898 if (BYTES_BIG_ENDIAN)
7899 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7900 else
7901 offset += byte % UNITS_PER_WORD;
7903 else
7904 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7905 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7907 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7910 real_from_target (&r, tmp, mode);
7911 return build_real (type, r);
7915 /* Subroutine of native_interpret_expr. Interpret the contents of
7916 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7917 If the buffer cannot be interpreted, return NULL_TREE. */
7919 static tree
7920 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7922 tree etype, rpart, ipart;
7923 int size;
7925 etype = TREE_TYPE (type);
7926 size = GET_MODE_SIZE (TYPE_MODE (etype));
7927 if (size * 2 > len)
7928 return NULL_TREE;
7929 rpart = native_interpret_expr (etype, ptr, size);
7930 if (!rpart)
7931 return NULL_TREE;
7932 ipart = native_interpret_expr (etype, ptr+size, size);
7933 if (!ipart)
7934 return NULL_TREE;
7935 return build_complex (type, rpart, ipart);
7939 /* Subroutine of native_interpret_expr. Interpret the contents of
7940 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7941 If the buffer cannot be interpreted, return NULL_TREE. */
7943 static tree
7944 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7946 tree etype, elem, elements;
7947 int i, size, count;
7949 etype = TREE_TYPE (type);
7950 size = GET_MODE_SIZE (TYPE_MODE (etype));
7951 count = TYPE_VECTOR_SUBPARTS (type);
7952 if (size * count > len)
7953 return NULL_TREE;
7955 elements = NULL_TREE;
7956 for (i = count - 1; i >= 0; i--)
7958 elem = native_interpret_expr (etype, ptr+(i*size), size);
7959 if (!elem)
7960 return NULL_TREE;
7961 elements = tree_cons (NULL_TREE, elem, elements);
7963 return build_vector (type, elements);
7967 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7968 the buffer PTR of length LEN as a constant of type TYPE. For
7969 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7970 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7971 return NULL_TREE. */
7973 tree
7974 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7976 switch (TREE_CODE (type))
7978 case INTEGER_TYPE:
7979 case ENUMERAL_TYPE:
7980 case BOOLEAN_TYPE:
7981 return native_interpret_int (type, ptr, len);
7983 case REAL_TYPE:
7984 return native_interpret_real (type, ptr, len);
7986 case COMPLEX_TYPE:
7987 return native_interpret_complex (type, ptr, len);
7989 case VECTOR_TYPE:
7990 return native_interpret_vector (type, ptr, len);
7992 default:
7993 return NULL_TREE;
7998 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7999 TYPE at compile-time. If we're unable to perform the conversion
8000 return NULL_TREE. */
8002 static tree
8003 fold_view_convert_expr (tree type, tree expr)
8005 /* We support up to 512-bit values (for V8DFmode). */
8006 unsigned char buffer[64];
8007 int len;
8009 /* Check that the host and target are sane. */
8010 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8011 return NULL_TREE;
8013 len = native_encode_expr (expr, buffer, sizeof (buffer));
8014 if (len == 0)
8015 return NULL_TREE;
8017 return native_interpret_expr (type, buffer, len);
8020 /* Build an expression for the address of T. Folds away INDIRECT_REF
8021 to avoid confusing the gimplify process. */
8023 tree
8024 build_fold_addr_expr_with_type (tree t, tree ptrtype)
8026 /* The size of the object is not relevant when talking about its address. */
8027 if (TREE_CODE (t) == WITH_SIZE_EXPR)
8028 t = TREE_OPERAND (t, 0);
8030 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
8031 if (TREE_CODE (t) == INDIRECT_REF
8032 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
8034 t = TREE_OPERAND (t, 0);
8036 if (TREE_TYPE (t) != ptrtype)
8037 t = build1 (NOP_EXPR, ptrtype, t);
8039 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8041 t = build_fold_addr_expr (TREE_OPERAND (t, 0));
8043 if (TREE_TYPE (t) != ptrtype)
8044 t = fold_convert (ptrtype, t);
8046 else
8047 t = build1 (ADDR_EXPR, ptrtype, t);
8049 return t;
8052 /* Build an expression for the address of T. */
8054 tree
8055 build_fold_addr_expr (tree t)
8057 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8059 return build_fold_addr_expr_with_type (t, ptrtype);
8062 /* Fold a unary expression of code CODE and type TYPE with operand
8063 OP0. Return the folded expression if folding is successful.
8064 Otherwise, return NULL_TREE. */
8066 tree
8067 fold_unary (enum tree_code code, tree type, tree op0)
8069 tree tem;
8070 tree arg0;
8071 enum tree_code_class kind = TREE_CODE_CLASS (code);
8073 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8074 && TREE_CODE_LENGTH (code) == 1);
8076 arg0 = op0;
8077 if (arg0)
8079 if (CONVERT_EXPR_CODE_P (code)
8080 || code == FLOAT_EXPR || code == ABS_EXPR)
8082 /* Don't use STRIP_NOPS, because signedness of argument type
8083 matters. */
8084 STRIP_SIGN_NOPS (arg0);
8086 else
8088 /* Strip any conversions that don't change the mode. This
8089 is safe for every expression, except for a comparison
8090 expression because its signedness is derived from its
8091 operands.
8093 Note that this is done as an internal manipulation within
8094 the constant folder, in order to find the simplest
8095 representation of the arguments so that their form can be
8096 studied. In any cases, the appropriate type conversions
8097 should be put back in the tree that will get out of the
8098 constant folder. */
8099 STRIP_NOPS (arg0);
8103 if (TREE_CODE_CLASS (code) == tcc_unary)
8105 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8106 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8107 fold_build1 (code, type,
8108 fold_convert (TREE_TYPE (op0),
8109 TREE_OPERAND (arg0, 1))));
8110 else if (TREE_CODE (arg0) == COND_EXPR)
8112 tree arg01 = TREE_OPERAND (arg0, 1);
8113 tree arg02 = TREE_OPERAND (arg0, 2);
8114 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8115 arg01 = fold_build1 (code, type,
8116 fold_convert (TREE_TYPE (op0), arg01));
8117 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8118 arg02 = fold_build1 (code, type,
8119 fold_convert (TREE_TYPE (op0), arg02));
8120 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
8121 arg01, arg02);
8123 /* If this was a conversion, and all we did was to move into
8124 inside the COND_EXPR, bring it back out. But leave it if
8125 it is a conversion from integer to integer and the
8126 result precision is no wider than a word since such a
8127 conversion is cheap and may be optimized away by combine,
8128 while it couldn't if it were outside the COND_EXPR. Then return
8129 so we don't get into an infinite recursion loop taking the
8130 conversion out and then back in. */
8132 if ((CONVERT_EXPR_CODE_P (code)
8133 || code == NON_LVALUE_EXPR)
8134 && TREE_CODE (tem) == COND_EXPR
8135 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8136 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8137 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8138 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8139 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8140 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8141 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8142 && (INTEGRAL_TYPE_P
8143 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8144 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8145 || flag_syntax_only))
8146 tem = build1 (code, type,
8147 build3 (COND_EXPR,
8148 TREE_TYPE (TREE_OPERAND
8149 (TREE_OPERAND (tem, 1), 0)),
8150 TREE_OPERAND (tem, 0),
8151 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8152 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8153 return tem;
8155 else if (COMPARISON_CLASS_P (arg0))
8157 if (TREE_CODE (type) == BOOLEAN_TYPE)
8159 arg0 = copy_node (arg0);
8160 TREE_TYPE (arg0) = type;
8161 return arg0;
8163 else if (TREE_CODE (type) != INTEGER_TYPE)
8164 return fold_build3 (COND_EXPR, type, arg0,
8165 fold_build1 (code, type,
8166 integer_one_node),
8167 fold_build1 (code, type,
8168 integer_zero_node));
8172 switch (code)
8174 case PAREN_EXPR:
8175 /* Re-association barriers around constants and other re-association
8176 barriers can be removed. */
8177 if (CONSTANT_CLASS_P (op0)
8178 || TREE_CODE (op0) == PAREN_EXPR)
8179 return fold_convert (type, op0);
8180 return NULL_TREE;
8182 CASE_CONVERT:
8183 case FLOAT_EXPR:
8184 case FIX_TRUNC_EXPR:
8185 if (TREE_TYPE (op0) == type)
8186 return op0;
8188 /* If we have (type) (a CMP b) and type is an integral type, return
8189 new expression involving the new type. */
8190 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8191 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8192 TREE_OPERAND (op0, 1));
8194 /* Handle cases of two conversions in a row. */
8195 if (CONVERT_EXPR_P (op0))
8197 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8198 tree inter_type = TREE_TYPE (op0);
8199 int inside_int = INTEGRAL_TYPE_P (inside_type);
8200 int inside_ptr = POINTER_TYPE_P (inside_type);
8201 int inside_float = FLOAT_TYPE_P (inside_type);
8202 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8203 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8204 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8205 int inter_int = INTEGRAL_TYPE_P (inter_type);
8206 int inter_ptr = POINTER_TYPE_P (inter_type);
8207 int inter_float = FLOAT_TYPE_P (inter_type);
8208 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8209 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8210 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8211 int final_int = INTEGRAL_TYPE_P (type);
8212 int final_ptr = POINTER_TYPE_P (type);
8213 int final_float = FLOAT_TYPE_P (type);
8214 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8215 unsigned int final_prec = TYPE_PRECISION (type);
8216 int final_unsignedp = TYPE_UNSIGNED (type);
8218 /* In addition to the cases of two conversions in a row
8219 handled below, if we are converting something to its own
8220 type via an object of identical or wider precision, neither
8221 conversion is needed. */
8222 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8223 && (((inter_int || inter_ptr) && final_int)
8224 || (inter_float && final_float))
8225 && inter_prec >= final_prec)
8226 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8228 /* Likewise, if the intermediate and initial types are either both
8229 float or both integer, we don't need the middle conversion if the
8230 former is wider than the latter and doesn't change the signedness
8231 (for integers). Avoid this if the final type is a pointer since
8232 then we sometimes need the middle conversion. Likewise if the
8233 final type has a precision not equal to the size of its mode. */
8234 if (((inter_int && inside_int)
8235 || (inter_float && inside_float)
8236 || (inter_vec && inside_vec))
8237 && inter_prec >= inside_prec
8238 && (inter_float || inter_vec
8239 || inter_unsignedp == inside_unsignedp)
8240 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8241 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8242 && ! final_ptr
8243 && (! final_vec || inter_prec == inside_prec))
8244 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8246 /* If we have a sign-extension of a zero-extended value, we can
8247 replace that by a single zero-extension. */
8248 if (inside_int && inter_int && final_int
8249 && inside_prec < inter_prec && inter_prec < final_prec
8250 && inside_unsignedp && !inter_unsignedp)
8251 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8253 /* Two conversions in a row are not needed unless:
8254 - some conversion is floating-point (overstrict for now), or
8255 - some conversion is a vector (overstrict for now), or
8256 - the intermediate type is narrower than both initial and
8257 final, or
8258 - the intermediate type and innermost type differ in signedness,
8259 and the outermost type is wider than the intermediate, or
8260 - the initial type is a pointer type and the precisions of the
8261 intermediate and final types differ, or
8262 - the final type is a pointer type and the precisions of the
8263 initial and intermediate types differ. */
8264 if (! inside_float && ! inter_float && ! final_float
8265 && ! inside_vec && ! inter_vec && ! final_vec
8266 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8267 && ! (inside_int && inter_int
8268 && inter_unsignedp != inside_unsignedp
8269 && inter_prec < final_prec)
8270 && ((inter_unsignedp && inter_prec > inside_prec)
8271 == (final_unsignedp && final_prec > inter_prec))
8272 && ! (inside_ptr && inter_prec != final_prec)
8273 && ! (final_ptr && inside_prec != inter_prec)
8274 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8275 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8276 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8279 /* Handle (T *)&A.B.C for A being of type T and B and C
8280 living at offset zero. This occurs frequently in
8281 C++ upcasting and then accessing the base. */
8282 if (TREE_CODE (op0) == ADDR_EXPR
8283 && POINTER_TYPE_P (type)
8284 && handled_component_p (TREE_OPERAND (op0, 0)))
8286 HOST_WIDE_INT bitsize, bitpos;
8287 tree offset;
8288 enum machine_mode mode;
8289 int unsignedp, volatilep;
8290 tree base = TREE_OPERAND (op0, 0);
8291 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8292 &mode, &unsignedp, &volatilep, false);
8293 /* If the reference was to a (constant) zero offset, we can use
8294 the address of the base if it has the same base type
8295 as the result type. */
8296 if (! offset && bitpos == 0
8297 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8298 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8299 return fold_convert (type, build_fold_addr_expr (base));
8302 if (TREE_CODE (op0) == MODIFY_EXPR
8303 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8304 /* Detect assigning a bitfield. */
8305 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8306 && DECL_BIT_FIELD
8307 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8309 /* Don't leave an assignment inside a conversion
8310 unless assigning a bitfield. */
8311 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
8312 /* First do the assignment, then return converted constant. */
8313 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8314 TREE_NO_WARNING (tem) = 1;
8315 TREE_USED (tem) = 1;
8316 return tem;
8319 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8320 constants (if x has signed type, the sign bit cannot be set
8321 in c). This folds extension into the BIT_AND_EXPR.
8322 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8323 very likely don't have maximal range for their precision and this
8324 transformation effectively doesn't preserve non-maximal ranges. */
8325 if (TREE_CODE (type) == INTEGER_TYPE
8326 && TREE_CODE (op0) == BIT_AND_EXPR
8327 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8329 tree and_expr = op0;
8330 tree and0 = TREE_OPERAND (and_expr, 0);
8331 tree and1 = TREE_OPERAND (and_expr, 1);
8332 int change = 0;
8334 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8335 || (TYPE_PRECISION (type)
8336 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8337 change = 1;
8338 else if (TYPE_PRECISION (TREE_TYPE (and1))
8339 <= HOST_BITS_PER_WIDE_INT
8340 && host_integerp (and1, 1))
8342 unsigned HOST_WIDE_INT cst;
8344 cst = tree_low_cst (and1, 1);
8345 cst &= (HOST_WIDE_INT) -1
8346 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8347 change = (cst == 0);
8348 #ifdef LOAD_EXTEND_OP
8349 if (change
8350 && !flag_syntax_only
8351 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8352 == ZERO_EXTEND))
8354 tree uns = unsigned_type_for (TREE_TYPE (and0));
8355 and0 = fold_convert (uns, and0);
8356 and1 = fold_convert (uns, and1);
8358 #endif
8360 if (change)
8362 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8363 TREE_INT_CST_HIGH (and1), 0,
8364 TREE_OVERFLOW (and1));
8365 return fold_build2 (BIT_AND_EXPR, type,
8366 fold_convert (type, and0), tem);
8370 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type. */
8371 if (POINTER_TYPE_P (type)
8372 && POINTER_PLUS_EXPR_P (arg0))
8374 tree arg00 = TREE_OPERAND (arg0, 0);
8375 tree arg01 = TREE_OPERAND (arg0, 1);
8377 return fold_build2 (TREE_CODE (arg0), type,
8378 fold_convert (type, arg00), arg01);
8381 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8382 of the same precision, and X is an integer type not narrower than
8383 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8384 if (INTEGRAL_TYPE_P (type)
8385 && TREE_CODE (op0) == BIT_NOT_EXPR
8386 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8387 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8388 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8390 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8391 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8392 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8393 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
8396 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8397 type of X and Y (integer types only). */
8398 if (INTEGRAL_TYPE_P (type)
8399 && MULT_EXPR_P (op0)
8400 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8401 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8403 /* Be careful to fall back to MULT_EXPR from MULTNV_EXPR. */
8404 return fold_build2 (MULT_EXPR, type,
8405 fold_convert (type,
8406 TREE_OPERAND (op0, 0)),
8407 fold_convert (type,
8408 TREE_OPERAND (op0, 1)));
8411 tem = fold_convert_const (code, type, op0);
8412 return tem ? tem : NULL_TREE;
8414 case FIXED_CONVERT_EXPR:
8415 tem = fold_convert_const (code, type, arg0);
8416 return tem ? tem : NULL_TREE;
8418 case VIEW_CONVERT_EXPR:
8419 if (TREE_TYPE (op0) == type)
8420 return op0;
8421 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8422 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8424 /* For integral conversions with the same precision or pointer
8425 conversions use a NOP_EXPR instead. */
8426 if ((INTEGRAL_TYPE_P (type)
8427 || POINTER_TYPE_P (type))
8428 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8429 || POINTER_TYPE_P (TREE_TYPE (op0)))
8430 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8431 return fold_convert (type, op0);
8433 /* Strip inner integral conversions that do not change the precision. */
8434 if (CONVERT_EXPR_P (op0)
8435 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8436 || POINTER_TYPE_P (TREE_TYPE (op0)))
8437 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8438 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8439 && (TYPE_PRECISION (TREE_TYPE (op0))
8440 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8441 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8443 return fold_view_convert_expr (type, op0);
8445 case NEGATE_EXPR:
8446 case NEGATENV_EXPR:
8447 tem = fold_negate_expr (arg0);
8448 if (tem)
8449 return fold_convert (type, tem);
8450 return NULL_TREE;
8452 case ABS_EXPR:
8453 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8454 return fold_abs_const (arg0, type);
8455 else if (NEGATE_EXPR_P (arg0))
8456 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8457 /* Convert fabs((double)float) into (double)fabsf(float). */
8458 else if (TREE_CODE (arg0) == NOP_EXPR
8459 && TREE_CODE (type) == REAL_TYPE)
8461 tree targ0 = strip_float_extensions (arg0);
8462 if (targ0 != arg0)
8463 return fold_convert (type, fold_build1 (ABS_EXPR,
8464 TREE_TYPE (targ0),
8465 targ0));
8467 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8468 else if (TREE_CODE (arg0) == ABS_EXPR)
8469 return arg0;
8470 else if (tree_expr_nonnegative_p (arg0))
8471 return arg0;
8473 /* Strip sign ops from argument. */
8474 if (TREE_CODE (type) == REAL_TYPE)
8476 tem = fold_strip_sign_ops (arg0);
8477 if (tem)
8478 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8480 return NULL_TREE;
8482 case CONJ_EXPR:
8483 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8484 return fold_convert (type, arg0);
8485 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8487 tree itype = TREE_TYPE (type);
8488 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8489 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8490 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8492 if (TREE_CODE (arg0) == COMPLEX_CST)
8494 tree itype = TREE_TYPE (type);
8495 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8496 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8497 return build_complex (type, rpart, negate_expr (ipart));
8499 if (TREE_CODE (arg0) == CONJ_EXPR)
8500 return fold_convert (type, TREE_OPERAND (arg0, 0));
8501 return NULL_TREE;
8503 case BIT_NOT_EXPR:
8504 if (TREE_CODE (arg0) == INTEGER_CST)
8505 return fold_not_const (arg0, type);
8506 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8507 return fold_convert (type, TREE_OPERAND (arg0, 0));
8508 /* Convert ~ (-A) to A - 1. */
8509 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8510 return fold_build2 (MINUS_EXPR, type,
8511 fold_convert (type, TREE_OPERAND (arg0, 0)),
8512 build_int_cst (type, 1));
8513 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8514 else if (INTEGRAL_TYPE_P (type)
8515 && ((TREE_CODE (arg0) == MINUS_EXPR
8516 && integer_onep (TREE_OPERAND (arg0, 1)))
8517 || (TREE_CODE (arg0) == PLUS_EXPR
8518 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8519 return fold_build1 (NEGATE_EXPR, type,
8520 fold_convert (type, TREE_OPERAND (arg0, 0)));
8521 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8522 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8523 && (tem = fold_unary (BIT_NOT_EXPR, type,
8524 fold_convert (type,
8525 TREE_OPERAND (arg0, 0)))))
8526 return fold_build2 (BIT_XOR_EXPR, type, tem,
8527 fold_convert (type, TREE_OPERAND (arg0, 1)));
8528 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8529 && (tem = fold_unary (BIT_NOT_EXPR, type,
8530 fold_convert (type,
8531 TREE_OPERAND (arg0, 1)))))
8532 return fold_build2 (BIT_XOR_EXPR, type,
8533 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8534 /* Perform BIT_NOT_EXPR on each element individually. */
8535 else if (TREE_CODE (arg0) == VECTOR_CST)
8537 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8538 int count = TYPE_VECTOR_SUBPARTS (type), i;
8540 for (i = 0; i < count; i++)
8542 if (elements)
8544 elem = TREE_VALUE (elements);
8545 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8546 if (elem == NULL_TREE)
8547 break;
8548 elements = TREE_CHAIN (elements);
8550 else
8551 elem = build_int_cst (TREE_TYPE (type), -1);
8552 list = tree_cons (NULL_TREE, elem, list);
8554 if (i == count)
8555 return build_vector (type, nreverse (list));
8558 return NULL_TREE;
8560 case TRUTH_NOT_EXPR:
8561 /* The argument to invert_truthvalue must have Boolean type. */
8562 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8563 arg0 = fold_convert (boolean_type_node, arg0);
8565 /* Note that the operand of this must be an int
8566 and its values must be 0 or 1.
8567 ("true" is a fixed value perhaps depending on the language,
8568 but we don't handle values other than 1 correctly yet.) */
8569 tem = fold_truth_not_expr (arg0);
8570 if (!tem)
8571 return NULL_TREE;
8572 return fold_convert (type, tem);
8574 case REALPART_EXPR:
8575 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8576 return fold_convert (type, arg0);
8577 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8578 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8579 TREE_OPERAND (arg0, 1));
8580 if (TREE_CODE (arg0) == COMPLEX_CST)
8581 return fold_convert (type, TREE_REALPART (arg0));
8582 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8584 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8585 tem = fold_build2 (TREE_CODE (arg0), itype,
8586 fold_build1 (REALPART_EXPR, itype,
8587 TREE_OPERAND (arg0, 0)),
8588 fold_build1 (REALPART_EXPR, itype,
8589 TREE_OPERAND (arg0, 1)));
8590 return fold_convert (type, tem);
8592 if (TREE_CODE (arg0) == CONJ_EXPR)
8594 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8595 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8596 return fold_convert (type, tem);
8598 if (TREE_CODE (arg0) == CALL_EXPR)
8600 tree fn = get_callee_fndecl (arg0);
8601 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8602 switch (DECL_FUNCTION_CODE (fn))
8604 CASE_FLT_FN (BUILT_IN_CEXPI):
8605 fn = mathfn_built_in (type, BUILT_IN_COS);
8606 if (fn)
8607 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8608 break;
8610 default:
8611 break;
8614 return NULL_TREE;
8616 case IMAGPART_EXPR:
8617 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8618 return fold_convert (type, integer_zero_node);
8619 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8620 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8621 TREE_OPERAND (arg0, 0));
8622 if (TREE_CODE (arg0) == COMPLEX_CST)
8623 return fold_convert (type, TREE_IMAGPART (arg0));
8624 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8626 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8627 tem = fold_build2 (TREE_CODE (arg0), itype,
8628 fold_build1 (IMAGPART_EXPR, itype,
8629 TREE_OPERAND (arg0, 0)),
8630 fold_build1 (IMAGPART_EXPR, itype,
8631 TREE_OPERAND (arg0, 1)));
8632 return fold_convert (type, tem);
8634 if (TREE_CODE (arg0) == CONJ_EXPR)
8636 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8637 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8638 return fold_convert (type, negate_expr (tem));
8640 if (TREE_CODE (arg0) == CALL_EXPR)
8642 tree fn = get_callee_fndecl (arg0);
8643 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8644 switch (DECL_FUNCTION_CODE (fn))
8646 CASE_FLT_FN (BUILT_IN_CEXPI):
8647 fn = mathfn_built_in (type, BUILT_IN_SIN);
8648 if (fn)
8649 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8650 break;
8652 default:
8653 break;
8656 return NULL_TREE;
8658 default:
8659 return NULL_TREE;
8660 } /* switch (code) */
8664 /* If the operation was a conversion do _not_ mark a resulting constant
8665 with TREE_OVERFLOW if the original constant was not. These conversions
8666 have implementation defined behavior and retaining the TREE_OVERFLOW
8667 flag here would confuse later passes such as VRP. */
8668 tree
8669 fold_unary_ignore_overflow (enum tree_code code, tree type, tree op0)
8671 tree res = fold_unary (code, type, op0);
8672 if (res
8673 && TREE_CODE (res) == INTEGER_CST
8674 && TREE_CODE (op0) == INTEGER_CST
8675 && CONVERT_EXPR_CODE_P (code))
8676 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8678 return res;
8681 /* Fold a binary expression of code CODE and type TYPE with operands
8682 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8683 Return the folded expression if folding is successful. Otherwise,
8684 return NULL_TREE. */
8686 static tree
8687 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8689 enum tree_code compl_code;
8691 if (code == MIN_EXPR)
8692 compl_code = MAX_EXPR;
8693 else if (code == MAX_EXPR)
8694 compl_code = MIN_EXPR;
8695 else
8696 gcc_unreachable ();
8698 /* MIN (MAX (a, b), b) == b. */
8699 if (TREE_CODE (op0) == compl_code
8700 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8701 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8703 /* MIN (MAX (b, a), b) == b. */
8704 if (TREE_CODE (op0) == compl_code
8705 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8706 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8707 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8709 /* MIN (a, MAX (a, b)) == a. */
8710 if (TREE_CODE (op1) == compl_code
8711 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8712 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8713 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8715 /* MIN (a, MAX (b, a)) == a. */
8716 if (TREE_CODE (op1) == compl_code
8717 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8718 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8719 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8721 return NULL_TREE;
8724 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8725 by changing CODE to reduce the magnitude of constants involved in
8726 ARG0 of the comparison.
8727 Returns a canonicalized comparison tree if a simplification was
8728 possible, otherwise returns NULL_TREE.
8729 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8730 valid if signed overflow is undefined. */
8732 static tree
8733 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8734 tree arg0, tree arg1,
8735 bool *strict_overflow_p)
8737 enum tree_code code0 = TREE_CODE (arg0);
8738 tree t, cst0 = NULL_TREE;
8739 int sgn0;
8740 bool swap = false;
8742 /* Match A +- CST code arg1 and CST code arg1. We can change the
8743 first form only if the operation does not wrap. */
8744 if (!((/* In principle pointer arithmetic also can be non-wrapping,
8745 but that causes problems elsewhere. */
8746 (code0 == MINUSNV_EXPR
8747 || code0 == PLUSNV_EXPR)
8748 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8749 || code0 == INTEGER_CST))
8750 return NULL_TREE;
8752 /* Identify the constant in arg0 and its sign. */
8753 if (code0 == INTEGER_CST)
8754 cst0 = arg0;
8755 else
8756 cst0 = TREE_OPERAND (arg0, 1);
8757 sgn0 = tree_int_cst_sgn (cst0);
8759 /* Overflowed constants and zero will cause problems. */
8760 if (integer_zerop (cst0)
8761 || TREE_OVERFLOW (cst0))
8762 return NULL_TREE;
8764 /* See if we can reduce the magnitude of the constant in
8765 arg0 by changing the comparison code. */
8766 if (code0 == INTEGER_CST)
8768 /* CST <= arg1 -> CST-1 < arg1. */
8769 if (code == LE_EXPR && sgn0 == 1)
8770 code = LT_EXPR;
8771 /* -CST < arg1 -> -CST-1 <= arg1. */
8772 else if (code == LT_EXPR && sgn0 == -1)
8773 code = LE_EXPR;
8774 /* CST > arg1 -> CST-1 >= arg1. */
8775 else if (code == GT_EXPR && sgn0 == 1)
8776 code = GE_EXPR;
8777 /* -CST >= arg1 -> -CST-1 > arg1. */
8778 else if (code == GE_EXPR && sgn0 == -1)
8779 code = GT_EXPR;
8780 else
8781 return NULL_TREE;
8782 /* arg1 code' CST' might be more canonical. */
8783 swap = true;
8785 else
8787 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8788 if (code == LT_EXPR
8789 && code0 == ((sgn0 == -1) ? PLUSNV_EXPR : MINUSNV_EXPR))
8790 code = LE_EXPR;
8791 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8792 else if (code == GT_EXPR
8793 && code0 == ((sgn0 == -1) ? MINUSNV_EXPR : PLUSNV_EXPR))
8794 code = GE_EXPR;
8795 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8796 else if (code == LE_EXPR
8797 && code0 == ((sgn0 == -1) ? MINUSNV_EXPR : PLUSNV_EXPR))
8798 code = LT_EXPR;
8799 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8800 else if (code == GE_EXPR
8801 && code0 == ((sgn0 == -1) ? PLUSNV_EXPR : MINUSNV_EXPR))
8802 code = GT_EXPR;
8803 else
8804 return NULL_TREE;
8805 if (!TREE_NO_WARNING (arg0))
8806 *strict_overflow_p = true;
8809 /* Now build the constant reduced in magnitude. But not if that
8810 would produce one outside of its types range. */
8811 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8812 && ((sgn0 == 1
8813 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8814 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8815 || (sgn0 == -1
8816 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8817 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8818 /* We cannot swap the comparison here as that would cause us to
8819 endlessly recurse. */
8820 return NULL_TREE;
8822 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8823 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8824 /* If A - CST didn't overflow so does A - (CST - 1). So it is safe
8825 to keep the *NV_EXPR variants. */
8826 if (code0 != INTEGER_CST)
8827 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8829 /* If swapping might yield to a more canonical form, do so. */
8830 if (swap)
8831 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8832 else
8833 return fold_build2 (code, type, t, arg1);
8836 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8837 overflow further. Try to decrease the magnitude of constants involved
8838 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8839 and put sole constants at the second argument position.
8840 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8842 static tree
8843 maybe_canonicalize_comparison (enum tree_code code, tree type,
8844 tree arg0, tree arg1)
8846 tree t;
8847 bool strict_overflow_p;
8848 const char * const warnmsg = G_("assuming signed overflow does not occur "
8849 "when reducing constant in comparison");
8851 /* Try canonicalization by simplifying arg0. */
8852 strict_overflow_p = false;
8853 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8854 &strict_overflow_p);
8855 if (t)
8857 if (strict_overflow_p)
8858 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8859 return t;
8862 /* Try canonicalization by simplifying arg1 using the swapped
8863 comparison. */
8864 code = swap_tree_comparison (code);
8865 strict_overflow_p = false;
8866 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8867 &strict_overflow_p);
8868 if (t && strict_overflow_p)
8869 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8870 return t;
8873 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8874 space. This is used to avoid issuing overflow warnings for
8875 expressions like &p->x which can not wrap. */
8877 static bool
8878 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8880 unsigned HOST_WIDE_INT offset_low, total_low;
8881 HOST_WIDE_INT size, offset_high, total_high;
8883 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8884 return true;
8886 if (bitpos < 0)
8887 return true;
8889 if (offset == NULL_TREE)
8891 offset_low = 0;
8892 offset_high = 0;
8894 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8895 return true;
8896 else
8898 offset_low = TREE_INT_CST_LOW (offset);
8899 offset_high = TREE_INT_CST_HIGH (offset);
8902 if (add_double_with_sign (offset_low, offset_high,
8903 bitpos / BITS_PER_UNIT, 0,
8904 &total_low, &total_high,
8905 true))
8906 return true;
8908 if (total_high != 0)
8909 return true;
8911 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8912 if (size <= 0)
8913 return true;
8915 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8916 array. */
8917 if (TREE_CODE (base) == ADDR_EXPR)
8919 HOST_WIDE_INT base_size;
8921 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8922 if (base_size > 0 && size < base_size)
8923 size = base_size;
8926 return total_low > (unsigned HOST_WIDE_INT) size;
8929 /* Subroutine of fold_binary. This routine performs all of the
8930 transformations that are common to the equality/inequality
8931 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8932 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8933 fold_binary should call fold_binary. Fold a comparison with
8934 tree code CODE and type TYPE with operands OP0 and OP1. Return
8935 the folded comparison or NULL_TREE. */
8937 static tree
8938 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8940 tree arg0, arg1, tem;
8942 arg0 = op0;
8943 arg1 = op1;
8945 STRIP_SIGN_NOPS (arg0);
8946 STRIP_SIGN_NOPS (arg1);
8948 tem = fold_relational_const (code, type, arg0, arg1);
8949 if (tem != NULL_TREE)
8950 return tem;
8952 /* If one arg is a real or integer constant, put it last. */
8953 if (tree_swap_operands_p (arg0, arg1, true))
8954 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8956 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1
8957 if the original addition does not overflow. */
8958 if ((TREE_CODE (arg0) == PLUSNV_EXPR || TREE_CODE (arg0) == MINUSNV_EXPR)
8959 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8960 && TREE_CODE (arg1) == INTEGER_CST)
8962 tree const1 = TREE_OPERAND (arg0, 1);
8963 tree const2 = arg1;
8964 tree variable = TREE_OPERAND (arg0, 0);
8965 tree lhs;
8966 int lhs_add;
8967 unsigned HOST_WIDE_INT low;
8968 HOST_WIDE_INT hi;
8969 int overflow;
8971 lhs_add = TREE_CODE (arg0) != PLUSNV_EXPR;
8973 overflow = int_const_binop_1 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8974 const2, const1, &low, &hi);
8975 overflow |= fit_double_type (low, hi, &low, &hi, TREE_TYPE (arg1));
8977 /* If there was overflow on combining the two constants we have to
8978 flip the comparison code. */
8979 if (overflow)
8980 code = swap_tree_comparison (code);
8982 lhs = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
8983 if (!TREE_NO_WARNING (arg0))
8984 fold_overflow_warning (("assuming signed overflow does not occur "
8985 "when changing X +- C1 cmp C2 to "
8986 "X cmp C1 +- C2"),
8987 WARN_STRICT_OVERFLOW_COMPARISON);
8988 return fold_build2 (code, type, variable, lhs);
8991 /* For comparisons of pointers we can decompose it to a compile time
8992 comparison of the base objects and the offsets into the object.
8993 This requires at least one operand being an ADDR_EXPR or a
8994 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8995 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8996 && (TREE_CODE (arg0) == ADDR_EXPR
8997 || TREE_CODE (arg1) == ADDR_EXPR
8998 || POINTER_PLUS_EXPR_P (arg0)
8999 || POINTER_PLUS_EXPR_P (arg1)))
9001 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9002 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9003 enum machine_mode mode;
9004 int volatilep, unsignedp;
9005 bool indirect_base0 = false, indirect_base1 = false;
9006 bool no_overflow = true;
9008 /* Get base and offset for the access. Strip ADDR_EXPR for
9009 get_inner_reference, but put it back by stripping INDIRECT_REF
9010 off the base object if possible. indirect_baseN will be true
9011 if baseN is not an address but refers to the object itself. */
9012 base0 = arg0;
9013 if (TREE_CODE (arg0) == ADDR_EXPR)
9015 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9016 &bitsize, &bitpos0, &offset0, &mode,
9017 &unsignedp, &volatilep, false);
9018 if (TREE_CODE (base0) == INDIRECT_REF)
9019 base0 = TREE_OPERAND (base0, 0);
9020 else
9021 indirect_base0 = true;
9023 else if (POINTER_PLUS_EXPR_P (arg0))
9025 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9026 no_overflow = false;
9027 base0 = TREE_OPERAND (arg0, 0);
9028 offset0 = TREE_OPERAND (arg0, 1);
9031 base1 = arg1;
9032 if (TREE_CODE (arg1) == ADDR_EXPR)
9034 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9035 &bitsize, &bitpos1, &offset1, &mode,
9036 &unsignedp, &volatilep, false);
9037 if (TREE_CODE (base1) == INDIRECT_REF)
9038 base1 = TREE_OPERAND (base1, 0);
9039 else
9040 indirect_base1 = true;
9042 else if (POINTER_PLUS_EXPR_P (arg1))
9044 if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9045 no_overflow = false;
9046 base1 = TREE_OPERAND (arg1, 0);
9047 offset1 = TREE_OPERAND (arg1, 1);
9050 /* If we have equivalent bases we might be able to simplify. */
9051 if (indirect_base0 == indirect_base1
9052 && operand_equal_p (base0, base1, 0))
9054 /* We can fold this expression to a constant if the non-constant
9055 offset parts are equal. */
9056 if ((offset0 == offset1
9057 || (offset0 && offset1
9058 && operand_equal_p (offset0, offset1, 0)))
9059 && (code == EQ_EXPR
9060 || code == NE_EXPR
9061 || no_overflow))
9063 if (code != EQ_EXPR
9064 && code != NE_EXPR
9065 && bitpos0 != bitpos1
9066 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9067 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9068 fold_overflow_warning (("assuming pointer wraparound does not "
9069 "occur when comparing P +- C1 with "
9070 "P +- C2"),
9071 WARN_STRICT_OVERFLOW_CONDITIONAL);
9073 switch (code)
9075 case EQ_EXPR:
9076 return constant_boolean_node (bitpos0 == bitpos1, type);
9077 case NE_EXPR:
9078 return constant_boolean_node (bitpos0 != bitpos1, type);
9079 case LT_EXPR:
9080 return constant_boolean_node (bitpos0 < bitpos1, type);
9081 case LE_EXPR:
9082 return constant_boolean_node (bitpos0 <= bitpos1, type);
9083 case GE_EXPR:
9084 return constant_boolean_node (bitpos0 >= bitpos1, type);
9085 case GT_EXPR:
9086 return constant_boolean_node (bitpos0 > bitpos1, type);
9087 default:;
9090 /* We can simplify the comparison to a comparison of the variable
9091 offset parts if the constant offset parts are equal.
9092 Be careful to use signed size type here because otherwise we
9093 mess with array offsets in the wrong way. This is possible
9094 because pointer arithmetic is restricted to retain within an
9095 object and overflow on pointer differences is undefined as of
9096 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9097 else if (bitpos0 == bitpos1
9098 && ((code == EQ_EXPR || code == NE_EXPR)
9099 || no_overflow))
9101 tree signed_size_type_node;
9102 signed_size_type_node = signed_type_for (size_type_node);
9104 /* By converting to signed size type we cover middle-end pointer
9105 arithmetic which operates on unsigned pointer types of size
9106 type size and ARRAY_REF offsets which are properly sign or
9107 zero extended from their type in case it is narrower than
9108 size type. */
9109 if (offset0 == NULL_TREE)
9110 offset0 = build_int_cst (signed_size_type_node, 0);
9111 else
9112 offset0 = fold_convert (signed_size_type_node, offset0);
9113 if (offset1 == NULL_TREE)
9114 offset1 = build_int_cst (signed_size_type_node, 0);
9115 else
9116 offset1 = fold_convert (signed_size_type_node, offset1);
9118 if (code != EQ_EXPR
9119 && code != NE_EXPR
9120 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9121 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9122 fold_overflow_warning (("assuming pointer wraparound does not "
9123 "occur when comparing P +- C1 with "
9124 "P +- C2"),
9125 WARN_STRICT_OVERFLOW_COMPARISON);
9127 return fold_build2 (code, type, offset0, offset1);
9130 /* For non-equal bases we can simplify if they are addresses
9131 of local binding decls or constants. */
9132 else if (indirect_base0 && indirect_base1
9133 /* We know that !operand_equal_p (base0, base1, 0)
9134 because the if condition was false. But make
9135 sure two decls are not the same. */
9136 && base0 != base1
9137 && TREE_CODE (arg0) == ADDR_EXPR
9138 && TREE_CODE (arg1) == ADDR_EXPR
9139 && (((TREE_CODE (base0) == VAR_DECL
9140 || TREE_CODE (base0) == PARM_DECL)
9141 && (targetm.binds_local_p (base0)
9142 || CONSTANT_CLASS_P (base1)))
9143 || CONSTANT_CLASS_P (base0))
9144 && (((TREE_CODE (base1) == VAR_DECL
9145 || TREE_CODE (base1) == PARM_DECL)
9146 && (targetm.binds_local_p (base1)
9147 || CONSTANT_CLASS_P (base0)))
9148 || CONSTANT_CLASS_P (base1)))
9150 if (code == EQ_EXPR)
9151 return omit_two_operands (type, boolean_false_node, arg0, arg1);
9152 else if (code == NE_EXPR)
9153 return omit_two_operands (type, boolean_true_node, arg0, arg1);
9155 /* For equal offsets we can simplify to a comparison of the
9156 base addresses. */
9157 else if (bitpos0 == bitpos1
9158 && (indirect_base0
9159 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9160 && (indirect_base1
9161 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9162 && ((offset0 == offset1)
9163 || (offset0 && offset1
9164 && operand_equal_p (offset0, offset1, 0))))
9166 if (indirect_base0)
9167 base0 = build_fold_addr_expr (base0);
9168 if (indirect_base1)
9169 base1 = build_fold_addr_expr (base1);
9170 return fold_build2 (code, type, base0, base1);
9174 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9175 X CMP Y +- C2 +- C1 if X +- C1 does not overflow.. This is then
9176 valid if the resulting offset is smaller in absolute value than the
9177 original one. */
9178 if ((((TREE_CODE (arg0) == PLUSNV_EXPR || TREE_CODE (arg0) == MINUSNV_EXPR)
9179 && (PLUS_EXPR_P (arg1) || MINUS_EXPR_P (arg1)))
9180 || ((TREE_CODE (arg1) == PLUSNV_EXPR || TREE_CODE (arg1) == MINUSNV_EXPR)
9181 && (PLUS_EXPR_P (arg0) || MINUS_EXPR_P (arg0))))
9182 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9183 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST)
9185 tree const1 = TREE_OPERAND (arg0, 1);
9186 tree const2 = TREE_OPERAND (arg1, 1);
9187 tree variable1 = TREE_OPERAND (arg0, 0);
9188 tree variable2 = TREE_OPERAND (arg1, 0);
9189 tree cst;
9190 unsigned HOST_WIDE_INT low;
9191 HOST_WIDE_INT hi;
9192 int overflow;
9193 const char * const warnmsg = G_("assuming signed overflow does not "
9194 "occur when combining constants around "
9195 "a comparison");
9197 /* Put the constant on the side where it doesn't overflow and is
9198 of lower absolute value than before if the operation on
9199 the other side doesn't overflow. */
9200 if (TREE_CODE (arg0) == PLUSNV_EXPR || TREE_CODE (arg0) == MINUSNV_EXPR)
9202 overflow = int_const_binop_1 ((strip_nv (TREE_CODE (arg0))
9203 == strip_nv (TREE_CODE (arg1)))
9204 ? MINUS_EXPR : PLUS_EXPR,
9205 const2, const1, &low, &hi);
9206 overflow |= fit_double_type (low, hi, &low, &hi, TREE_TYPE (arg1));
9207 cst = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
9208 if (!overflow
9209 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9211 if (!TREE_NO_WARNING (arg0))
9212 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9213 return fold_build2 (code, type,
9214 variable1,
9215 fold_build2 (strip_nv (TREE_CODE (arg1)),
9216 TREE_TYPE (arg1),
9217 variable2, cst));
9221 if (TREE_CODE (arg1) == PLUSNV_EXPR || TREE_CODE (arg1) == MINUSNV_EXPR)
9223 overflow = int_const_binop_1 ((strip_nv (TREE_CODE (arg0))
9224 == strip_nv (TREE_CODE (arg1)))
9225 ? MINUS_EXPR : PLUS_EXPR,
9226 const1, const2, &low, &hi);
9227 overflow |= fit_double_type (low, hi, &low, &hi, TREE_TYPE (arg0));
9228 cst = build_int_cst_wide (TREE_TYPE (arg0), low, hi);
9229 if (!overflow
9230 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9232 if (!TREE_NO_WARNING (arg1))
9233 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9234 return fold_build2 (code, type,
9235 fold_build2 (strip_nv (TREE_CODE (arg0)),
9236 TREE_TYPE (arg0),
9237 variable1, cst),
9238 variable2);
9243 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9244 signed arithmetic case. That form is created by the compiler
9245 often enough for folding it to be of value. One example is in
9246 computing loop trip counts after Operator Strength Reduction. */
9247 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9248 && TREE_CODE (arg0) == MULT_EXPR
9249 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9250 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9251 && integer_zerop (arg1))
9253 tree const1 = TREE_OPERAND (arg0, 1);
9254 tree const2 = arg1; /* zero */
9255 tree variable1 = TREE_OPERAND (arg0, 0);
9256 enum tree_code cmp_code = code;
9258 gcc_assert (!integer_zerop (const1));
9260 fold_overflow_warning (("assuming signed overflow does not occur when "
9261 "eliminating multiplication in comparison "
9262 "with zero"),
9263 WARN_STRICT_OVERFLOW_COMPARISON);
9265 /* If const1 is negative we swap the sense of the comparison. */
9266 if (tree_int_cst_sgn (const1) < 0)
9267 cmp_code = swap_tree_comparison (cmp_code);
9269 return fold_build2 (cmp_code, type, variable1, const2);
9272 tem = maybe_canonicalize_comparison (code, type, op0, op1);
9273 if (tem)
9274 return tem;
9276 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9278 tree targ0 = strip_float_extensions (arg0);
9279 tree targ1 = strip_float_extensions (arg1);
9280 tree newtype = TREE_TYPE (targ0);
9282 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9283 newtype = TREE_TYPE (targ1);
9285 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9286 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9287 return fold_build2 (code, type, fold_convert (newtype, targ0),
9288 fold_convert (newtype, targ1));
9290 /* (-a) CMP (-b) -> b CMP a */
9291 if (TREE_CODE (arg0) == NEGATE_EXPR
9292 && TREE_CODE (arg1) == NEGATE_EXPR)
9293 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9294 TREE_OPERAND (arg0, 0));
9296 if (TREE_CODE (arg1) == REAL_CST)
9298 REAL_VALUE_TYPE cst;
9299 cst = TREE_REAL_CST (arg1);
9301 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9302 if (TREE_CODE (arg0) == NEGATE_EXPR)
9303 return fold_build2 (swap_tree_comparison (code), type,
9304 TREE_OPERAND (arg0, 0),
9305 build_real (TREE_TYPE (arg1),
9306 REAL_VALUE_NEGATE (cst)));
9308 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9309 /* a CMP (-0) -> a CMP 0 */
9310 if (REAL_VALUE_MINUS_ZERO (cst))
9311 return fold_build2 (code, type, arg0,
9312 build_real (TREE_TYPE (arg1), dconst0));
9314 /* x != NaN is always true, other ops are always false. */
9315 if (REAL_VALUE_ISNAN (cst)
9316 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9318 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9319 return omit_one_operand (type, tem, arg0);
9322 /* Fold comparisons against infinity. */
9323 if (REAL_VALUE_ISINF (cst)
9324 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9326 tem = fold_inf_compare (code, type, arg0, arg1);
9327 if (tem != NULL_TREE)
9328 return tem;
9332 /* If this is a comparison of a real constant with a PLUS_EXPR
9333 or a MINUS_EXPR of a real constant, we can convert it into a
9334 comparison with a revised real constant as long as no overflow
9335 occurs when unsafe_math_optimizations are enabled. */
9336 if (flag_unsafe_math_optimizations
9337 && TREE_CODE (arg1) == REAL_CST
9338 && (TREE_CODE (arg0) == PLUS_EXPR
9339 || TREE_CODE (arg0) == MINUS_EXPR)
9340 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9341 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9342 ? MINUS_EXPR : PLUS_EXPR,
9343 arg1, TREE_OPERAND (arg0, 1), 0))
9344 && !TREE_OVERFLOW (tem))
9345 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9347 /* Likewise, we can simplify a comparison of a real constant with
9348 a MINUS_EXPR whose first operand is also a real constant, i.e.
9349 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9350 floating-point types only if -fassociative-math is set. */
9351 if (flag_associative_math
9352 && TREE_CODE (arg1) == REAL_CST
9353 && TREE_CODE (arg0) == MINUS_EXPR
9354 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9355 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9356 arg1, 0))
9357 && !TREE_OVERFLOW (tem))
9358 return fold_build2 (swap_tree_comparison (code), type,
9359 TREE_OPERAND (arg0, 1), tem);
9361 /* Fold comparisons against built-in math functions. */
9362 if (TREE_CODE (arg1) == REAL_CST
9363 && flag_unsafe_math_optimizations
9364 && ! flag_errno_math)
9366 enum built_in_function fcode = builtin_mathfn_code (arg0);
9368 if (fcode != END_BUILTINS)
9370 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9371 if (tem != NULL_TREE)
9372 return tem;
9377 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9378 && CONVERT_EXPR_P (arg0))
9380 /* If we are widening one operand of an integer comparison,
9381 see if the other operand is similarly being widened. Perhaps we
9382 can do the comparison in the narrower type. */
9383 tem = fold_widened_comparison (code, type, arg0, arg1);
9384 if (tem)
9385 return tem;
9387 /* Or if we are changing signedness. */
9388 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9389 if (tem)
9390 return tem;
9393 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9394 constant, we can simplify it. */
9395 if (TREE_CODE (arg1) == INTEGER_CST
9396 && (TREE_CODE (arg0) == MIN_EXPR
9397 || TREE_CODE (arg0) == MAX_EXPR)
9398 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9400 tem = optimize_minmax_comparison (code, type, op0, op1);
9401 if (tem)
9402 return tem;
9405 /* Simplify comparison of something with itself. (For IEEE
9406 floating-point, we can only do some of these simplifications.) */
9407 if (operand_equal_p (arg0, arg1, 0))
9409 switch (code)
9411 case EQ_EXPR:
9412 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9413 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9414 return constant_boolean_node (1, type);
9415 break;
9417 case GE_EXPR:
9418 case LE_EXPR:
9419 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9420 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9421 return constant_boolean_node (1, type);
9422 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9424 case NE_EXPR:
9425 /* For NE, we can only do this simplification if integer
9426 or we don't honor IEEE floating point NaNs. */
9427 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9428 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9429 break;
9430 /* ... fall through ... */
9431 case GT_EXPR:
9432 case LT_EXPR:
9433 return constant_boolean_node (0, type);
9434 default:
9435 gcc_unreachable ();
9439 /* If we are comparing an expression that just has comparisons
9440 of two integer values, arithmetic expressions of those comparisons,
9441 and constants, we can simplify it. There are only three cases
9442 to check: the two values can either be equal, the first can be
9443 greater, or the second can be greater. Fold the expression for
9444 those three values. Since each value must be 0 or 1, we have
9445 eight possibilities, each of which corresponds to the constant 0
9446 or 1 or one of the six possible comparisons.
9448 This handles common cases like (a > b) == 0 but also handles
9449 expressions like ((x > y) - (y > x)) > 0, which supposedly
9450 occur in macroized code. */
9452 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9454 tree cval1 = 0, cval2 = 0;
9455 int save_p = 0;
9457 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9458 /* Don't handle degenerate cases here; they should already
9459 have been handled anyway. */
9460 && cval1 != 0 && cval2 != 0
9461 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9462 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9463 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9464 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9465 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9466 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9467 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9469 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9470 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9472 /* We can't just pass T to eval_subst in case cval1 or cval2
9473 was the same as ARG1. */
9475 tree high_result
9476 = fold_build2 (code, type,
9477 eval_subst (arg0, cval1, maxval,
9478 cval2, minval),
9479 arg1);
9480 tree equal_result
9481 = fold_build2 (code, type,
9482 eval_subst (arg0, cval1, maxval,
9483 cval2, maxval),
9484 arg1);
9485 tree low_result
9486 = fold_build2 (code, type,
9487 eval_subst (arg0, cval1, minval,
9488 cval2, maxval),
9489 arg1);
9491 /* All three of these results should be 0 or 1. Confirm they are.
9492 Then use those values to select the proper code to use. */
9494 if (TREE_CODE (high_result) == INTEGER_CST
9495 && TREE_CODE (equal_result) == INTEGER_CST
9496 && TREE_CODE (low_result) == INTEGER_CST)
9498 /* Make a 3-bit mask with the high-order bit being the
9499 value for `>', the next for '=', and the low for '<'. */
9500 switch ((integer_onep (high_result) * 4)
9501 + (integer_onep (equal_result) * 2)
9502 + integer_onep (low_result))
9504 case 0:
9505 /* Always false. */
9506 return omit_one_operand (type, integer_zero_node, arg0);
9507 case 1:
9508 code = LT_EXPR;
9509 break;
9510 case 2:
9511 code = EQ_EXPR;
9512 break;
9513 case 3:
9514 code = LE_EXPR;
9515 break;
9516 case 4:
9517 code = GT_EXPR;
9518 break;
9519 case 5:
9520 code = NE_EXPR;
9521 break;
9522 case 6:
9523 code = GE_EXPR;
9524 break;
9525 case 7:
9526 /* Always true. */
9527 return omit_one_operand (type, integer_one_node, arg0);
9530 if (save_p)
9531 return save_expr (build2 (code, type, cval1, cval2));
9532 return fold_build2 (code, type, cval1, cval2);
9537 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9538 into a single range test. */
9539 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9540 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9541 && TREE_CODE (arg1) == INTEGER_CST
9542 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9543 && !integer_zerop (TREE_OPERAND (arg0, 1))
9544 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9545 && !TREE_OVERFLOW (arg1))
9547 tem = fold_div_compare (code, type, arg0, arg1);
9548 if (tem != NULL_TREE)
9549 return tem;
9552 /* Fold ~X op ~Y as Y op X. */
9553 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9554 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9556 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9557 return fold_build2 (code, type,
9558 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9559 TREE_OPERAND (arg0, 0));
9562 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9563 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9564 && TREE_CODE (arg1) == INTEGER_CST)
9566 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9567 return fold_build2 (swap_tree_comparison (code), type,
9568 TREE_OPERAND (arg0, 0),
9569 fold_build1 (BIT_NOT_EXPR, cmp_type,
9570 fold_convert (cmp_type, arg1)));
9573 return NULL_TREE;
9577 /* Subroutine of fold_binary. Optimize complex multiplications of the
9578 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9579 argument EXPR represents the expression "z" of type TYPE. */
9581 static tree
9582 fold_mult_zconjz (tree type, tree expr)
9584 tree itype = TREE_TYPE (type);
9585 tree rpart, ipart, tem;
9587 if (TREE_CODE (expr) == COMPLEX_EXPR)
9589 rpart = TREE_OPERAND (expr, 0);
9590 ipart = TREE_OPERAND (expr, 1);
9592 else if (TREE_CODE (expr) == COMPLEX_CST)
9594 rpart = TREE_REALPART (expr);
9595 ipart = TREE_IMAGPART (expr);
9597 else
9599 expr = save_expr (expr);
9600 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9601 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9604 rpart = save_expr (rpart);
9605 ipart = save_expr (ipart);
9606 tem = fold_build2 (PLUS_EXPR, itype,
9607 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9608 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9609 return fold_build2 (COMPLEX_EXPR, type, tem,
9610 fold_convert (itype, integer_zero_node));
9614 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9615 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9616 guarantees that P and N have the same least significant log2(M) bits.
9617 N is not otherwise constrained. In particular, N is not normalized to
9618 0 <= N < M as is common. In general, the precise value of P is unknown.
9619 M is chosen as large as possible such that constant N can be determined.
9621 Returns M and sets *RESIDUE to N.
9623 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9624 account. This is not always possible due to PR 35705.
9627 static unsigned HOST_WIDE_INT
9628 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9629 bool allow_func_align)
9631 enum tree_code code;
9633 *residue = 0;
9635 code = TREE_CODE (expr);
9636 if (code == ADDR_EXPR)
9638 expr = TREE_OPERAND (expr, 0);
9639 if (handled_component_p (expr))
9641 HOST_WIDE_INT bitsize, bitpos;
9642 tree offset;
9643 enum machine_mode mode;
9644 int unsignedp, volatilep;
9646 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9647 &mode, &unsignedp, &volatilep, false);
9648 *residue = bitpos / BITS_PER_UNIT;
9649 if (offset)
9651 if (TREE_CODE (offset) == INTEGER_CST)
9652 *residue += TREE_INT_CST_LOW (offset);
9653 else
9654 /* We don't handle more complicated offset expressions. */
9655 return 1;
9659 if (DECL_P (expr)
9660 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9661 return DECL_ALIGN_UNIT (expr);
9663 else if (code == POINTER_PLUS_EXPR)
9665 tree op0, op1;
9666 unsigned HOST_WIDE_INT modulus;
9667 enum tree_code inner_code;
9669 op0 = TREE_OPERAND (expr, 0);
9670 STRIP_NOPS (op0);
9671 modulus = get_pointer_modulus_and_residue (op0, residue,
9672 allow_func_align);
9674 op1 = TREE_OPERAND (expr, 1);
9675 STRIP_NOPS (op1);
9676 inner_code = TREE_CODE (op1);
9677 if (inner_code == INTEGER_CST)
9679 *residue += TREE_INT_CST_LOW (op1);
9680 return modulus;
9682 else if (inner_code == MULT_EXPR)
9684 op1 = TREE_OPERAND (op1, 1);
9685 if (TREE_CODE (op1) == INTEGER_CST)
9687 unsigned HOST_WIDE_INT align;
9689 /* Compute the greatest power-of-2 divisor of op1. */
9690 align = TREE_INT_CST_LOW (op1);
9691 align &= -align;
9693 /* If align is non-zero and less than *modulus, replace
9694 *modulus with align., If align is 0, then either op1 is 0
9695 or the greatest power-of-2 divisor of op1 doesn't fit in an
9696 unsigned HOST_WIDE_INT. In either case, no additional
9697 constraint is imposed. */
9698 if (align)
9699 modulus = MIN (modulus, align);
9701 return modulus;
9706 /* If we get here, we were unable to determine anything useful about the
9707 expression. */
9708 return 1;
9712 /* Fold a binary expression of code CODE and type TYPE with operands
9713 OP0 and OP1. Return the folded expression if folding is
9714 successful. Otherwise, return NULL_TREE. */
9716 tree
9717 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9719 enum tree_code_class kind = TREE_CODE_CLASS (code);
9720 tree arg0, arg1, tem;
9721 tree t1 = NULL_TREE;
9722 bool strict_overflow_p;
9724 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9725 && TREE_CODE_LENGTH (code) == 2
9726 && op0 != NULL_TREE
9727 && op1 != NULL_TREE);
9729 arg0 = op0;
9730 arg1 = op1;
9732 /* Strip any conversions that don't change the mode. This is
9733 safe for every expression, except for a comparison expression
9734 because its signedness is derived from its operands. So, in
9735 the latter case, only strip conversions that don't change the
9736 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9737 preserved.
9739 Note that this is done as an internal manipulation within the
9740 constant folder, in order to find the simplest representation
9741 of the arguments so that their form can be studied. In any
9742 cases, the appropriate type conversions should be put back in
9743 the tree that will get out of the constant folder. */
9745 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9747 STRIP_SIGN_NOPS (arg0);
9748 STRIP_SIGN_NOPS (arg1);
9750 else
9752 STRIP_NOPS (arg0);
9753 STRIP_NOPS (arg1);
9756 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9757 constant but we can't do arithmetic on them. */
9758 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9759 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9760 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9761 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9762 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9763 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9765 if (kind == tcc_binary)
9767 /* Make sure type and arg0 have the same saturating flag. */
9768 gcc_assert (TYPE_SATURATING (type)
9769 == TYPE_SATURATING (TREE_TYPE (arg0)));
9770 tem = const_binop (code, arg0, arg1, 0);
9772 else if (kind == tcc_comparison)
9773 tem = fold_relational_const (code, type, arg0, arg1);
9774 else
9775 tem = NULL_TREE;
9777 if (tem != NULL_TREE)
9779 if (TREE_TYPE (tem) != type)
9780 tem = fold_convert (type, tem);
9781 return tem;
9785 /* If this is a commutative operation, and ARG0 is a constant, move it
9786 to ARG1 to reduce the number of tests below. */
9787 if (commutative_tree_code (code)
9788 && tree_swap_operands_p (arg0, arg1, true))
9789 return fold_build2 (code, type, op1, op0);
9791 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9793 First check for cases where an arithmetic operation is applied to a
9794 compound, conditional, or comparison operation. Push the arithmetic
9795 operation inside the compound or conditional to see if any folding
9796 can then be done. Convert comparison to conditional for this purpose.
9797 The also optimizes non-constant cases that used to be done in
9798 expand_expr.
9800 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9801 one of the operands is a comparison and the other is a comparison, a
9802 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9803 code below would make the expression more complex. Change it to a
9804 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9805 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9807 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9808 || code == EQ_EXPR || code == NE_EXPR)
9809 && ((truth_value_p (TREE_CODE (arg0))
9810 && (truth_value_p (TREE_CODE (arg1))
9811 || (TREE_CODE (arg1) == BIT_AND_EXPR
9812 && integer_onep (TREE_OPERAND (arg1, 1)))))
9813 || (truth_value_p (TREE_CODE (arg1))
9814 && (truth_value_p (TREE_CODE (arg0))
9815 || (TREE_CODE (arg0) == BIT_AND_EXPR
9816 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9818 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9819 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9820 : TRUTH_XOR_EXPR,
9821 boolean_type_node,
9822 fold_convert (boolean_type_node, arg0),
9823 fold_convert (boolean_type_node, arg1));
9825 if (code == EQ_EXPR)
9826 tem = invert_truthvalue (tem);
9828 return fold_convert (type, tem);
9831 if (TREE_CODE_CLASS (code) == tcc_binary
9832 || TREE_CODE_CLASS (code) == tcc_comparison)
9834 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9835 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9836 fold_build2 (code, type,
9837 fold_convert (TREE_TYPE (op0),
9838 TREE_OPERAND (arg0, 1)),
9839 op1));
9840 if (TREE_CODE (arg1) == COMPOUND_EXPR
9841 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9842 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9843 fold_build2 (code, type, op0,
9844 fold_convert (TREE_TYPE (op1),
9845 TREE_OPERAND (arg1, 1))));
9847 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9849 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9850 arg0, arg1,
9851 /*cond_first_p=*/1);
9852 if (tem != NULL_TREE)
9853 return tem;
9856 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9858 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9859 arg1, arg0,
9860 /*cond_first_p=*/0);
9861 if (tem != NULL_TREE)
9862 return tem;
9866 switch (code)
9868 case POINTER_PLUS_EXPR:
9869 case POINTER_PLUSNV_EXPR:
9870 /* 0 +p index -> (type)index */
9871 if (integer_zerop (arg0))
9872 return non_lvalue (fold_convert (type, arg1));
9874 /* PTR +p 0 -> PTR */
9875 if (integer_zerop (arg1))
9876 return non_lvalue (fold_convert (type, arg0));
9878 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9879 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9880 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9881 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9882 fold_convert (sizetype, arg1),
9883 fold_convert (sizetype, arg0)));
9885 /* index +p PTR -> PTR +p index */
9886 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9887 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9888 return fold_build2 (code, type,
9889 fold_convert (type, arg1),
9890 fold_convert (sizetype, arg0));
9892 /* (PTR +p B) +p A -> PTR +p (B + A) */
9893 if (POINTER_PLUS_EXPR_P (arg0))
9895 tree inner;
9896 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9897 tree arg00 = TREE_OPERAND (arg0, 0);
9898 enum tree_code ncode = POINTER_PLUS_EXPR;
9899 if (code == POINTER_PLUSNV_EXPR
9900 && TREE_CODE (arg0) == POINTER_PLUSNV_EXPR)
9901 ncode = POINTER_PLUSNV_EXPR;
9902 inner = fold_build2 (PLUS_EXPR, sizetype,
9903 arg01, fold_convert (sizetype, arg1));
9904 return fold_convert (type,
9905 fold_build2 (ncode,
9906 TREE_TYPE (arg00), arg00, inner));
9909 /* PTR_CST +p CST -> CST1 */
9910 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9911 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9913 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9914 of the array. Loop optimizer sometimes produce this type of
9915 expressions. */
9916 if (TREE_CODE (arg0) == ADDR_EXPR)
9918 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9919 if (tem)
9920 return fold_convert (type, tem);
9923 return NULL_TREE;
9925 case PLUS_EXPR:
9926 case PLUSNV_EXPR:
9927 /* A + (-B) -> A - B */
9928 if (NEGATE_EXPR_P (arg1))
9929 return fold_build2 (MINUS_EXPR, type,
9930 fold_convert (type, arg0),
9931 fold_convert (type, TREE_OPERAND (arg1, 0)));
9932 /* (-A) + B -> B - A */
9933 if (NEGATE_EXPR_P (arg0)
9934 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9935 return fold_build2 (MINUS_EXPR, type,
9936 fold_convert (type, arg1),
9937 fold_convert (type, TREE_OPERAND (arg0, 0)));
9939 if (INTEGRAL_TYPE_P (type))
9941 /* Convert ~A + 1 to -A. */
9942 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9943 && integer_onep (arg1))
9944 return fold_build1 (NEGATE_EXPR, type,
9945 fold_convert (type, TREE_OPERAND (arg0, 0)));
9947 /* ~X + X is -1. */
9948 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9949 && !TYPE_OVERFLOW_TRAPS (type))
9951 tree tem = TREE_OPERAND (arg0, 0);
9953 STRIP_NOPS (tem);
9954 if (operand_equal_p (tem, arg1, 0))
9956 t1 = build_int_cst_type (type, -1);
9957 return omit_one_operand (type, t1, arg1);
9961 /* X + ~X is -1. */
9962 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9963 && !TYPE_OVERFLOW_TRAPS (type))
9965 tree tem = TREE_OPERAND (arg1, 0);
9967 STRIP_NOPS (tem);
9968 if (operand_equal_p (arg0, tem, 0))
9970 t1 = build_int_cst_type (type, -1);
9971 return omit_one_operand (type, t1, arg0);
9975 /* X + (X / CST) * -CST is X % CST. */
9976 if (MULT_EXPR_P (arg1)
9977 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9978 && operand_equal_p (arg0,
9979 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9981 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9982 tree cst1 = TREE_OPERAND (arg1, 1);
9983 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9984 if (sum && integer_zerop (sum))
9985 return fold_convert (type,
9986 fold_build2 (TRUNC_MOD_EXPR,
9987 TREE_TYPE (arg0), arg0, cst0));
9991 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9992 same or one. Make sure type is not saturating.
9993 fold_plusminus_mult_expr will re-associate. */
9994 if ((MULT_EXPR_P (arg0)
9995 || MULT_EXPR_P (arg1))
9996 && !TYPE_SATURATING (type)
9997 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9999 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10000 if (tem)
10001 return tem;
10004 if (! FLOAT_TYPE_P (type))
10006 if (integer_zerop (arg1))
10007 return non_lvalue (fold_convert (type, arg0));
10009 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10010 with a constant, and the two constants have no bits in common,
10011 we should treat this as a BIT_IOR_EXPR since this may produce more
10012 simplifications. */
10013 if (TREE_CODE (arg0) == BIT_AND_EXPR
10014 && TREE_CODE (arg1) == BIT_AND_EXPR
10015 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10016 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10017 && integer_zerop (const_binop (BIT_AND_EXPR,
10018 TREE_OPERAND (arg0, 1),
10019 TREE_OPERAND (arg1, 1), 0)))
10021 code = BIT_IOR_EXPR;
10022 goto bit_ior;
10025 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10026 (plus (plus (mult) (mult)) (foo)) so that we can
10027 take advantage of the factoring cases below. */
10028 if (((PLUS_EXPR_P (arg0) || MINUS_EXPR_P (arg0))
10029 && MULT_EXPR_P (arg1))
10030 || ((PLUS_EXPR_P (arg1) || MINUS_EXPR_P (arg1))
10031 && MULT_EXPR_P (arg0)))
10033 tree parg0, parg1, parg, marg;
10034 enum tree_code pcode;
10036 if (MULT_EXPR_P (arg1))
10037 parg = arg0, marg = arg1;
10038 else
10039 parg = arg1, marg = arg0;
10040 pcode = strip_nv (TREE_CODE (parg));
10041 parg0 = TREE_OPERAND (parg, 0);
10042 parg1 = TREE_OPERAND (parg, 1);
10043 STRIP_NOPS (parg0);
10044 STRIP_NOPS (parg1);
10046 if (MULT_EXPR_P (parg0)
10047 && !MULT_EXPR_P (parg1))
10048 return fold_build2 (pcode, type,
10049 fold_build2 (PLUS_EXPR, type,
10050 fold_convert (type, parg0),
10051 fold_convert (type, marg)),
10052 fold_convert (type, parg1));
10053 if (!MULT_EXPR_P (parg0)
10054 && MULT_EXPR_P (parg1))
10055 return fold_build2 (PLUS_EXPR, type,
10056 fold_convert (type, parg0),
10057 fold_build2 (pcode, type,
10058 fold_convert (type, marg),
10059 fold_convert (type,
10060 parg1)));
10063 else
10065 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10066 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10067 return non_lvalue (fold_convert (type, arg0));
10069 /* Likewise if the operands are reversed. */
10070 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10071 return non_lvalue (fold_convert (type, arg1));
10073 /* Convert X + -C into X - C. */
10074 if (TREE_CODE (arg1) == REAL_CST
10075 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10077 tem = fold_negate_const (arg1, type);
10078 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10079 return fold_build2 (MINUS_EXPR, type,
10080 fold_convert (type, arg0),
10081 fold_convert (type, tem));
10084 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10085 to __complex__ ( x, y ). This is not the same for SNaNs or
10086 if signed zeros are involved. */
10087 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10088 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10089 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10091 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10092 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10093 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10094 bool arg0rz = false, arg0iz = false;
10095 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10096 || (arg0i && (arg0iz = real_zerop (arg0i))))
10098 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10099 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10100 if (arg0rz && arg1i && real_zerop (arg1i))
10102 tree rp = arg1r ? arg1r
10103 : build1 (REALPART_EXPR, rtype, arg1);
10104 tree ip = arg0i ? arg0i
10105 : build1 (IMAGPART_EXPR, rtype, arg0);
10106 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10108 else if (arg0iz && arg1r && real_zerop (arg1r))
10110 tree rp = arg0r ? arg0r
10111 : build1 (REALPART_EXPR, rtype, arg0);
10112 tree ip = arg1i ? arg1i
10113 : build1 (IMAGPART_EXPR, rtype, arg1);
10114 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10119 if (flag_unsafe_math_optimizations
10120 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10121 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10122 && (tem = distribute_real_division (code, type, arg0, arg1)))
10123 return tem;
10125 /* Convert x+x into x*2.0. */
10126 if (operand_equal_p (arg0, arg1, 0)
10127 && SCALAR_FLOAT_TYPE_P (type))
10128 return fold_build2 (MULT_EXPR, type, arg0,
10129 build_real (type, dconst2));
10131 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10132 We associate floats only if the user has specified
10133 -fassociative-math. */
10134 if (flag_associative_math
10135 && TREE_CODE (arg1) == PLUS_EXPR
10136 && TREE_CODE (arg0) != MULT_EXPR)
10138 tree tree10 = TREE_OPERAND (arg1, 0);
10139 tree tree11 = TREE_OPERAND (arg1, 1);
10140 if (TREE_CODE (tree11) == MULT_EXPR
10141 && TREE_CODE (tree10) == MULT_EXPR)
10143 tree tree0;
10144 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
10145 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
10148 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10149 We associate floats only if the user has specified
10150 -fassociative-math. */
10151 if (flag_associative_math
10152 && TREE_CODE (arg0) == PLUS_EXPR
10153 && TREE_CODE (arg1) != MULT_EXPR)
10155 tree tree00 = TREE_OPERAND (arg0, 0);
10156 tree tree01 = TREE_OPERAND (arg0, 1);
10157 if (TREE_CODE (tree01) == MULT_EXPR
10158 && TREE_CODE (tree00) == MULT_EXPR)
10160 tree tree0;
10161 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
10162 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
10167 /* Convert (T1)X + (T1)Y to (T1)(X + Y) if (T1)X is only
10168 a sign-change. */
10169 if (TREE_CODE (type) == INTEGER_TYPE
10170 && CONVERT_EXPR_P (op0)
10171 && CONVERT_EXPR_P (op1)
10172 && (TREE_TYPE (TREE_OPERAND (op0, 0))
10173 == TREE_TYPE (TREE_OPERAND (op1, 0)))
10174 /* Do not expose arithmetic in Ada subtypes. */
10175 && !TREE_TYPE (TREE_TYPE (TREE_OPERAND (op0, 0)))
10176 && TREE_CODE (TREE_TYPE (TREE_OPERAND (op0, 0))) == INTEGER_TYPE
10177 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op0, 0)))
10178 != TYPE_UNSIGNED (type))
10179 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))
10180 == TYPE_PRECISION (type)))
10181 return fold_convert (type,
10182 fold_build2 (strip_nv (code),
10183 TREE_TYPE (TREE_OPERAND (op0, 0)),
10184 TREE_OPERAND (op0, 0),
10185 TREE_OPERAND (op1, 0)));
10187 bit_rotate:
10188 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10189 is a rotate of A by C1 bits. */
10190 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10191 is a rotate of A by B bits. */
10193 enum tree_code code0, code1;
10194 tree rtype;
10195 code0 = TREE_CODE (arg0);
10196 code1 = TREE_CODE (arg1);
10197 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10198 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10199 && operand_equal_p (TREE_OPERAND (arg0, 0),
10200 TREE_OPERAND (arg1, 0), 0)
10201 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10202 TYPE_UNSIGNED (rtype))
10203 /* Only create rotates in complete modes. Other cases are not
10204 expanded properly. */
10205 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10207 tree tree01, tree11;
10208 enum tree_code code01, code11;
10210 tree01 = TREE_OPERAND (arg0, 1);
10211 tree11 = TREE_OPERAND (arg1, 1);
10212 STRIP_NOPS (tree01);
10213 STRIP_NOPS (tree11);
10214 code01 = TREE_CODE (tree01);
10215 code11 = TREE_CODE (tree11);
10216 if (code01 == INTEGER_CST
10217 && code11 == INTEGER_CST
10218 && TREE_INT_CST_HIGH (tree01) == 0
10219 && TREE_INT_CST_HIGH (tree11) == 0
10220 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10221 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10222 return fold_convert (type,
10223 build2 (LROTATE_EXPR,
10224 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10225 TREE_OPERAND (arg0, 0),
10226 code0 == LSHIFT_EXPR
10227 ? tree01 : tree11));
10228 else if (MINUS_EXPR_CODE_P (code11))
10230 tree tree110, tree111;
10231 tree110 = TREE_OPERAND (tree11, 0);
10232 tree111 = TREE_OPERAND (tree11, 1);
10233 STRIP_NOPS (tree110);
10234 STRIP_NOPS (tree111);
10235 if (TREE_CODE (tree110) == INTEGER_CST
10236 && 0 == compare_tree_int (tree110,
10237 TYPE_PRECISION
10238 (TREE_TYPE (TREE_OPERAND
10239 (arg0, 0))))
10240 && operand_equal_p (tree01, tree111, 0))
10241 return fold_convert (type,
10242 build2 ((code0 == LSHIFT_EXPR
10243 ? LROTATE_EXPR
10244 : RROTATE_EXPR),
10245 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10246 TREE_OPERAND (arg0, 0), tree01));
10248 else if (MINUS_EXPR_CODE_P (code01))
10250 tree tree010, tree011;
10251 tree010 = TREE_OPERAND (tree01, 0);
10252 tree011 = TREE_OPERAND (tree01, 1);
10253 STRIP_NOPS (tree010);
10254 STRIP_NOPS (tree011);
10255 if (TREE_CODE (tree010) == INTEGER_CST
10256 && 0 == compare_tree_int (tree010,
10257 TYPE_PRECISION
10258 (TREE_TYPE (TREE_OPERAND
10259 (arg0, 0))))
10260 && operand_equal_p (tree11, tree011, 0))
10261 return fold_convert (type,
10262 build2 ((code0 != LSHIFT_EXPR
10263 ? LROTATE_EXPR
10264 : RROTATE_EXPR),
10265 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10266 TREE_OPERAND (arg0, 0), tree11));
10271 associate:
10272 /* In most languages, can't associate operations on floats through
10273 parentheses. Rather than remember where the parentheses were, we
10274 don't associate floats at all, unless the user has specified
10275 -fassociative-math.
10276 And, we need to make sure type is not saturating. */
10278 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10279 && !TYPE_SATURATING (type))
10281 tree var0, con0, lit0, minus_lit0;
10282 tree var1, con1, lit1, minus_lit1;
10284 /* Split both trees into variables, constants, and literals. Then
10285 associate each group together, the constants with literals,
10286 then the result with variables. This increases the chances of
10287 literals being recombined later and of generating relocatable
10288 expressions for the sum of a constant and literal. */
10289 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10290 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10291 MINUS_EXPR_CODE_P (code));
10293 /* Only do something if we found more than two objects. Otherwise,
10294 nothing has changed and we risk infinite recursion. */
10295 if (2 < ((var0 != 0) + (var1 != 0)
10296 + (con0 != 0) + (con1 != 0)
10297 + (lit0 != 0) + (lit1 != 0)
10298 + (minus_lit0 != 0) + (minus_lit1 != 0)))
10300 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10301 if (MINUS_EXPR_CODE_P (code))
10302 code = PLUS_EXPR;
10304 var0 = associate_trees (var0, var1, code, type);
10305 con0 = associate_trees (con0, con1, code, type);
10306 lit0 = associate_trees (lit0, lit1, code, type);
10307 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
10309 /* Preserve the MINUS_EXPR if the negative part of the literal is
10310 greater than the positive part. Otherwise, the multiplicative
10311 folding code (i.e extract_muldiv) may be fooled in case
10312 unsigned constants are subtracted, like in the following
10313 example: ((X*2 + 4) - 8U)/2. */
10314 if (minus_lit0 && lit0)
10316 if (TREE_CODE (lit0) == INTEGER_CST
10317 && TREE_CODE (minus_lit0) == INTEGER_CST
10318 && tree_int_cst_lt (lit0, minus_lit0))
10320 minus_lit0 = associate_trees (minus_lit0, lit0,
10321 MINUS_EXPR, type);
10322 lit0 = 0;
10324 else
10326 lit0 = associate_trees (lit0, minus_lit0,
10327 MINUS_EXPR, type);
10328 minus_lit0 = 0;
10331 if (minus_lit0)
10333 if (con0 == 0)
10334 return fold_convert (type,
10335 associate_trees (var0, minus_lit0,
10336 MINUS_EXPR, type));
10337 else
10339 con0 = associate_trees (con0, minus_lit0,
10340 MINUS_EXPR, type);
10341 return fold_convert (type,
10342 associate_trees (var0, con0,
10343 PLUS_EXPR, type));
10347 con0 = associate_trees (con0, lit0, code, type);
10348 return fold_convert (type, associate_trees (var0, con0,
10349 code, type));
10353 return NULL_TREE;
10355 case MINUS_EXPR:
10356 case MINUSNV_EXPR:
10357 /* Pointer simplifications for subtraction, simple reassociations. */
10358 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10360 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10361 if (POINTER_PLUS_EXPR_P (arg0)
10362 && POINTER_PLUS_EXPR_P (arg1))
10364 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10365 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10366 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10367 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10368 return fold_build2 (PLUS_EXPR, type,
10369 fold_build2 (MINUS_EXPR, type, arg00, arg10),
10370 fold_build2 (MINUS_EXPR, type, arg01, arg11));
10372 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming
10373 PTR0 - PTR1 simplifies. */
10374 else if (POINTER_PLUS_EXPR_P (arg0))
10376 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10377 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10378 tree tmp = fold_binary (MINUS_EXPR, type, arg00,
10379 fold_convert (type, arg1));
10380 if (tmp)
10381 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
10385 /* A - (-B) -> A + B */
10386 if (NEGATE_EXPR_P (arg1))
10387 return fold_build2 (PLUS_EXPR, type, op0,
10388 fold_convert (type, TREE_OPERAND (arg1, 0)));
10389 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10390 if (NEGATE_EXPR_P (arg0)
10391 && (FLOAT_TYPE_P (type)
10392 || INTEGRAL_TYPE_P (type))
10393 && negate_expr_p (arg1)
10394 && reorder_operands_p (arg0, arg1))
10395 return fold_build2 (MINUS_EXPR, type,
10396 fold_convert (type, negate_expr (arg1)),
10397 fold_convert (type, TREE_OPERAND (arg0, 0)));
10398 /* Convert -A - 1 to ~A. */
10399 if (INTEGRAL_TYPE_P (type)
10400 && ((TREE_CODE (arg0) == NEGATE_EXPR
10401 && !TYPE_OVERFLOW_TRAPS (type))
10402 || TREE_CODE (arg0) == NEGATENV_EXPR)
10403 && integer_onep (arg1))
10404 return fold_build1 (BIT_NOT_EXPR, type,
10405 fold_convert (type, TREE_OPERAND (arg0, 0)));
10407 /* Convert -1 - A to ~A. */
10408 if (INTEGRAL_TYPE_P (type)
10409 && integer_all_onesp (arg0))
10410 return fold_build1 (BIT_NOT_EXPR, type, op1);
10413 /* X - (X / CST) * CST is X % CST. */
10414 if (INTEGRAL_TYPE_P (type)
10415 && MULT_EXPR_P (arg1)
10416 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10417 && operand_equal_p (arg0,
10418 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10419 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10420 TREE_OPERAND (arg1, 1), 0))
10421 return fold_convert (type,
10422 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10423 arg0, TREE_OPERAND (arg1, 1)));
10425 if (! FLOAT_TYPE_P (type))
10427 if (integer_zerop (arg0))
10428 return negate_expr (fold_convert (type, arg1));
10429 if (integer_zerop (arg1))
10430 return non_lvalue (fold_convert (type, arg0));
10432 /* Fold A - (A & B) into ~B & A. */
10433 if (!TREE_SIDE_EFFECTS (arg0)
10434 && TREE_CODE (arg1) == BIT_AND_EXPR)
10436 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10438 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10439 return fold_build2 (BIT_AND_EXPR, type,
10440 fold_build1 (BIT_NOT_EXPR, type, arg10),
10441 fold_convert (type, arg0));
10443 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10445 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10446 return fold_build2 (BIT_AND_EXPR, type,
10447 fold_build1 (BIT_NOT_EXPR, type, arg11),
10448 fold_convert (type, arg0));
10452 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10453 any power of 2 minus 1. */
10454 if (TREE_CODE (arg0) == BIT_AND_EXPR
10455 && TREE_CODE (arg1) == BIT_AND_EXPR
10456 && operand_equal_p (TREE_OPERAND (arg0, 0),
10457 TREE_OPERAND (arg1, 0), 0))
10459 tree mask0 = TREE_OPERAND (arg0, 1);
10460 tree mask1 = TREE_OPERAND (arg1, 1);
10461 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
10463 if (operand_equal_p (tem, mask1, 0))
10465 tem = fold_build2 (BIT_XOR_EXPR, type,
10466 TREE_OPERAND (arg0, 0), mask1);
10467 return fold_build2 (MINUS_EXPR, type, tem, mask1);
10472 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10473 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10474 return non_lvalue (fold_convert (type, arg0));
10476 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10477 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10478 (-ARG1 + ARG0) reduces to -ARG1. */
10479 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10480 return negate_expr (fold_convert (type, arg1));
10482 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10483 __complex__ ( x, -y ). This is not the same for SNaNs or if
10484 signed zeros are involved. */
10485 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10486 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10487 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10489 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10490 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10491 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10492 bool arg0rz = false, arg0iz = false;
10493 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10494 || (arg0i && (arg0iz = real_zerop (arg0i))))
10496 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10497 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10498 if (arg0rz && arg1i && real_zerop (arg1i))
10500 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10501 arg1r ? arg1r
10502 : build1 (REALPART_EXPR, rtype, arg1));
10503 tree ip = arg0i ? arg0i
10504 : build1 (IMAGPART_EXPR, rtype, arg0);
10505 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10507 else if (arg0iz && arg1r && real_zerop (arg1r))
10509 tree rp = arg0r ? arg0r
10510 : build1 (REALPART_EXPR, rtype, arg0);
10511 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10512 arg1i ? arg1i
10513 : build1 (IMAGPART_EXPR, rtype, arg1));
10514 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10519 /* Fold &x - &x. This can happen from &x.foo - &x.
10520 This is unsafe for certain floats even in non-IEEE formats.
10521 In IEEE, it is unsafe because it does wrong for NaNs.
10522 Also note that operand_equal_p is always false if an operand
10523 is volatile. */
10525 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10526 && operand_equal_p (arg0, arg1, 0))
10527 return fold_convert (type, integer_zero_node);
10529 /* A - B -> A + (-B) if B is easily negatable. */
10530 if ((negate_expr_p (arg1)
10531 /* Avoid negating constants if that would change overflow
10532 behavior. */
10533 && (code == MINUS_EXPR
10534 || TREE_CODE (arg1) != INTEGER_CST
10535 || may_negate_without_overflow_p (arg1)))
10536 && ((FLOAT_TYPE_P (type)
10537 /* Avoid this transformation if B is a positive REAL_CST. */
10538 && (TREE_CODE (arg1) != REAL_CST
10539 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10540 || INTEGRAL_TYPE_P (type)))
10542 enum tree_code ncode = PLUS_EXPR;
10543 /* If the original subtraction is signed and did not overflow
10544 so does the new addition if the negation of arg1 does not
10545 overflow (which we know for constants only). */
10546 if (code == MINUSNV_EXPR
10547 && TREE_CODE (arg1) == INTEGER_CST
10548 && !TYPE_UNSIGNED (type))
10549 ncode = PLUSNV_EXPR;
10550 return fold_build2 (ncode, type,
10551 fold_convert (type, arg0),
10552 fold_convert (type, negate_expr (arg1)));
10555 /* Try folding difference of addresses. */
10557 HOST_WIDE_INT diff;
10559 if ((TREE_CODE (arg0) == ADDR_EXPR
10560 || TREE_CODE (arg1) == ADDR_EXPR)
10561 && ptr_difference_const (arg0, arg1, &diff))
10562 return build_int_cst_type (type, diff);
10565 /* Fold &a[i] - &a[j] to i-j. */
10566 if (TREE_CODE (arg0) == ADDR_EXPR
10567 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10568 && TREE_CODE (arg1) == ADDR_EXPR
10569 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10571 tree aref0 = TREE_OPERAND (arg0, 0);
10572 tree aref1 = TREE_OPERAND (arg1, 0);
10573 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10574 TREE_OPERAND (aref1, 0), 0))
10576 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10577 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10578 tree esz = array_ref_element_size (aref0);
10579 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10580 return fold_build2 (MULT_EXPR, type, diff,
10581 fold_convert (type, esz));
10586 if (FLOAT_TYPE_P (type)
10587 && flag_unsafe_math_optimizations
10588 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10589 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10590 && (tem = distribute_real_division (code, type, arg0, arg1)))
10591 return tem;
10593 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10594 same or one. Make sure type is not saturating.
10595 fold_plusminus_mult_expr will re-associate. */
10596 if ((MULT_EXPR_P (arg0)
10597 || MULT_EXPR_P (arg1))
10598 && !TYPE_SATURATING (type)
10599 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10601 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10602 if (tem)
10603 return tem;
10606 /* Convert (T1)X - (T1)Y to (T1)(X - Y) if (T1)X is only
10607 a sign-change. */
10608 if (TREE_CODE (type) == INTEGER_TYPE
10609 && CONVERT_EXPR_P (op0)
10610 && CONVERT_EXPR_P (op1)
10611 && (TREE_TYPE (TREE_OPERAND (op0, 0))
10612 == TREE_TYPE (TREE_OPERAND (op1, 0)))
10613 /* Do not expose arithmetic in Ada subtypes. */
10614 && !TREE_TYPE (TREE_TYPE (TREE_OPERAND (op0, 0)))
10615 && TREE_CODE (TREE_TYPE (TREE_OPERAND (op0, 0))) == INTEGER_TYPE
10616 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op0, 0)))
10617 != TYPE_UNSIGNED (type))
10618 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))
10619 == TYPE_PRECISION (type)))
10620 return fold_convert (type,
10621 fold_build2 (strip_nv (code),
10622 TREE_TYPE (TREE_OPERAND (op0, 0)),
10623 TREE_OPERAND (op0, 0),
10624 TREE_OPERAND (op1, 0)));
10626 goto associate;
10628 case MULT_EXPR:
10629 case MULTNV_EXPR:
10630 if (! FLOAT_TYPE_P (type))
10632 if (integer_zerop (arg1))
10633 return omit_one_operand (type, arg1, arg0);
10634 if (integer_onep (arg1))
10635 return non_lvalue (fold_convert (type, arg0));
10636 /* Transform x * -1 into -x. Make sure to do the negation
10637 on the original operand with conversions not stripped
10638 because we can only strip non-sign-changing conversions. */
10639 if (integer_all_onesp (arg1))
10640 return fold_convert (type, negate_expr (op0));
10643 /* (-A) * (-B) -> A * B. */
10644 if (NEGATE_EXPR_P (arg0) && negate_expr_p (arg1))
10645 return fold_build2 (MULT_EXPR, type,
10646 fold_convert (type, TREE_OPERAND (arg0, 0)),
10647 fold_convert (type, negate_expr (arg1)));
10648 if (NEGATE_EXPR_P (arg1) && negate_expr_p (arg0))
10649 return fold_build2 (MULT_EXPR, type,
10650 fold_convert (type, negate_expr (arg0)),
10651 fold_convert (type, TREE_OPERAND (arg1, 0)));
10653 if (! FLOAT_TYPE_P (type))
10655 /* Transform x * -C into -x * C if x is easily negatable. */
10656 if (TREE_CODE (arg1) == INTEGER_CST
10657 && tree_int_cst_sgn (arg1) == -1
10658 && negate_expr_p (arg0)
10659 && (tem = negate_expr (arg1)) != arg1
10660 && !TREE_OVERFLOW (tem))
10661 return fold_build2 (MULT_EXPR, type,
10662 fold_convert (type, negate_expr (arg0)), tem);
10664 /* (a * (1 << b)) is (a << b) */
10665 if (TREE_CODE (arg1) == LSHIFT_EXPR
10666 && integer_onep (TREE_OPERAND (arg1, 0)))
10667 return fold_build2 (LSHIFT_EXPR, type, op0,
10668 TREE_OPERAND (arg1, 1));
10669 if (TREE_CODE (arg0) == LSHIFT_EXPR
10670 && integer_onep (TREE_OPERAND (arg0, 0)))
10671 return fold_build2 (LSHIFT_EXPR, type, op1,
10672 TREE_OPERAND (arg0, 1));
10674 /* (A + A) * C -> A * 2 * C */
10675 if (PLUS_EXPR_P (arg0)
10676 && TREE_CODE (arg1) == INTEGER_CST
10677 && operand_equal_p (TREE_OPERAND (arg0, 0),
10678 TREE_OPERAND (arg0, 1), 0))
10679 return fold_build2 (MULT_EXPR, type,
10680 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10681 TREE_OPERAND (arg0, 1)),
10682 fold_build2 (MULT_EXPR, type,
10683 build_int_cst (type, 2) , arg1));
10685 strict_overflow_p = false;
10686 if (TREE_CODE (arg1) == INTEGER_CST
10687 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10688 &strict_overflow_p)))
10690 if (strict_overflow_p)
10691 fold_overflow_warning (("assuming signed overflow does not "
10692 "occur when simplifying "
10693 "multiplication"),
10694 WARN_STRICT_OVERFLOW_MISC);
10695 return fold_convert (type, tem);
10698 /* Optimize z * conj(z) for integer complex numbers. */
10699 if (TREE_CODE (arg0) == CONJ_EXPR
10700 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10701 return fold_mult_zconjz (type, arg1);
10702 if (TREE_CODE (arg1) == CONJ_EXPR
10703 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10704 return fold_mult_zconjz (type, arg0);
10706 else
10708 /* Maybe fold x * 0 to 0. The expressions aren't the same
10709 when x is NaN, since x * 0 is also NaN. Nor are they the
10710 same in modes with signed zeros, since multiplying a
10711 negative value by 0 gives -0, not +0. */
10712 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10713 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10714 && real_zerop (arg1))
10715 return omit_one_operand (type, arg1, arg0);
10716 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10717 Likewise for complex arithmetic with signed zeros. */
10718 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10719 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10720 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10721 && real_onep (arg1))
10722 return non_lvalue (fold_convert (type, arg0));
10724 /* Transform x * -1.0 into -x. */
10725 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10726 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10727 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10728 && real_minus_onep (arg1))
10729 return fold_convert (type, negate_expr (arg0));
10731 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10732 the result for floating point types due to rounding so it is applied
10733 only if -fassociative-math was specify. */
10734 if (flag_associative_math
10735 && TREE_CODE (arg0) == RDIV_EXPR
10736 && TREE_CODE (arg1) == REAL_CST
10737 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10739 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10740 arg1, 0);
10741 if (tem)
10742 return fold_build2 (RDIV_EXPR, type, tem,
10743 TREE_OPERAND (arg0, 1));
10746 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10747 if (operand_equal_p (arg0, arg1, 0))
10749 tree tem = fold_strip_sign_ops (arg0);
10750 if (tem != NULL_TREE)
10752 tem = fold_convert (type, tem);
10753 return fold_build2 (MULT_EXPR, type, tem, tem);
10757 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10758 This is not the same for NaNs or if signed zeros are
10759 involved. */
10760 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10761 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10762 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10763 && TREE_CODE (arg1) == COMPLEX_CST
10764 && real_zerop (TREE_REALPART (arg1)))
10766 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10767 if (real_onep (TREE_IMAGPART (arg1)))
10768 return fold_build2 (COMPLEX_EXPR, type,
10769 negate_expr (fold_build1 (IMAGPART_EXPR,
10770 rtype, arg0)),
10771 fold_build1 (REALPART_EXPR, rtype, arg0));
10772 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10773 return fold_build2 (COMPLEX_EXPR, type,
10774 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10775 negate_expr (fold_build1 (REALPART_EXPR,
10776 rtype, arg0)));
10779 /* Optimize z * conj(z) for floating point complex numbers.
10780 Guarded by flag_unsafe_math_optimizations as non-finite
10781 imaginary components don't produce scalar results. */
10782 if (flag_unsafe_math_optimizations
10783 && TREE_CODE (arg0) == CONJ_EXPR
10784 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10785 return fold_mult_zconjz (type, arg1);
10786 if (flag_unsafe_math_optimizations
10787 && TREE_CODE (arg1) == CONJ_EXPR
10788 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10789 return fold_mult_zconjz (type, arg0);
10791 if (flag_unsafe_math_optimizations)
10793 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10794 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10796 /* Optimizations of root(...)*root(...). */
10797 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10799 tree rootfn, arg;
10800 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10801 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10803 /* Optimize sqrt(x)*sqrt(x) as x. */
10804 if (BUILTIN_SQRT_P (fcode0)
10805 && operand_equal_p (arg00, arg10, 0)
10806 && ! HONOR_SNANS (TYPE_MODE (type)))
10807 return arg00;
10809 /* Optimize root(x)*root(y) as root(x*y). */
10810 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10811 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10812 return build_call_expr (rootfn, 1, arg);
10815 /* Optimize expN(x)*expN(y) as expN(x+y). */
10816 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10818 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10819 tree arg = fold_build2 (PLUS_EXPR, type,
10820 CALL_EXPR_ARG (arg0, 0),
10821 CALL_EXPR_ARG (arg1, 0));
10822 return build_call_expr (expfn, 1, arg);
10825 /* Optimizations of pow(...)*pow(...). */
10826 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10827 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10828 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10830 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10831 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10832 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10833 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10835 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10836 if (operand_equal_p (arg01, arg11, 0))
10838 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10839 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10840 return build_call_expr (powfn, 2, arg, arg01);
10843 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10844 if (operand_equal_p (arg00, arg10, 0))
10846 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10847 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10848 return build_call_expr (powfn, 2, arg00, arg);
10852 /* Optimize tan(x)*cos(x) as sin(x). */
10853 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10854 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10855 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10856 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10857 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10858 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10859 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10860 CALL_EXPR_ARG (arg1, 0), 0))
10862 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10864 if (sinfn != NULL_TREE)
10865 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10868 /* Optimize x*pow(x,c) as pow(x,c+1). */
10869 if (fcode1 == BUILT_IN_POW
10870 || fcode1 == BUILT_IN_POWF
10871 || fcode1 == BUILT_IN_POWL)
10873 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10874 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10875 if (TREE_CODE (arg11) == REAL_CST
10876 && !TREE_OVERFLOW (arg11)
10877 && operand_equal_p (arg0, arg10, 0))
10879 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10880 REAL_VALUE_TYPE c;
10881 tree arg;
10883 c = TREE_REAL_CST (arg11);
10884 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10885 arg = build_real (type, c);
10886 return build_call_expr (powfn, 2, arg0, arg);
10890 /* Optimize pow(x,c)*x as pow(x,c+1). */
10891 if (fcode0 == BUILT_IN_POW
10892 || fcode0 == BUILT_IN_POWF
10893 || fcode0 == BUILT_IN_POWL)
10895 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10896 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10897 if (TREE_CODE (arg01) == REAL_CST
10898 && !TREE_OVERFLOW (arg01)
10899 && operand_equal_p (arg1, arg00, 0))
10901 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10902 REAL_VALUE_TYPE c;
10903 tree arg;
10905 c = TREE_REAL_CST (arg01);
10906 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10907 arg = build_real (type, c);
10908 return build_call_expr (powfn, 2, arg1, arg);
10912 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10913 if (optimize_function_for_speed_p (cfun)
10914 && operand_equal_p (arg0, arg1, 0))
10916 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10918 if (powfn)
10920 tree arg = build_real (type, dconst2);
10921 return build_call_expr (powfn, 2, arg0, arg);
10926 goto associate;
10928 case BIT_IOR_EXPR:
10929 bit_ior:
10930 if (integer_all_onesp (arg1))
10931 return omit_one_operand (type, arg1, arg0);
10932 if (integer_zerop (arg1))
10933 return non_lvalue (fold_convert (type, arg0));
10934 if (operand_equal_p (arg0, arg1, 0))
10935 return non_lvalue (fold_convert (type, arg0));
10937 /* ~X | X is -1. */
10938 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10939 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10941 t1 = fold_convert (type, integer_zero_node);
10942 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10943 return omit_one_operand (type, t1, arg1);
10946 /* X | ~X is -1. */
10947 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10948 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10950 t1 = fold_convert (type, integer_zero_node);
10951 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10952 return omit_one_operand (type, t1, arg0);
10955 /* Canonicalize (X & C1) | C2. */
10956 if (TREE_CODE (arg0) == BIT_AND_EXPR
10957 && TREE_CODE (arg1) == INTEGER_CST
10958 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10960 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10961 int width = TYPE_PRECISION (type), w;
10962 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10963 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10964 hi2 = TREE_INT_CST_HIGH (arg1);
10965 lo2 = TREE_INT_CST_LOW (arg1);
10967 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10968 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10969 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10971 if (width > HOST_BITS_PER_WIDE_INT)
10973 mhi = (unsigned HOST_WIDE_INT) -1
10974 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10975 mlo = -1;
10977 else
10979 mhi = 0;
10980 mlo = (unsigned HOST_WIDE_INT) -1
10981 >> (HOST_BITS_PER_WIDE_INT - width);
10984 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10985 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10986 return fold_build2 (BIT_IOR_EXPR, type,
10987 TREE_OPERAND (arg0, 0), arg1);
10989 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10990 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10991 mode which allows further optimizations. */
10992 hi1 &= mhi;
10993 lo1 &= mlo;
10994 hi2 &= mhi;
10995 lo2 &= mlo;
10996 hi3 = hi1 & ~hi2;
10997 lo3 = lo1 & ~lo2;
10998 for (w = BITS_PER_UNIT;
10999 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11000 w <<= 1)
11002 unsigned HOST_WIDE_INT mask
11003 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11004 if (((lo1 | lo2) & mask) == mask
11005 && (lo1 & ~mask) == 0 && hi1 == 0)
11007 hi3 = 0;
11008 lo3 = mask;
11009 break;
11012 if (hi3 != hi1 || lo3 != lo1)
11013 return fold_build2 (BIT_IOR_EXPR, type,
11014 fold_build2 (BIT_AND_EXPR, type,
11015 TREE_OPERAND (arg0, 0),
11016 build_int_cst_wide (type,
11017 lo3, hi3)),
11018 arg1);
11021 /* (X & Y) | Y is (X, Y). */
11022 if (TREE_CODE (arg0) == BIT_AND_EXPR
11023 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11024 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
11025 /* (X & Y) | X is (Y, X). */
11026 if (TREE_CODE (arg0) == BIT_AND_EXPR
11027 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11028 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11029 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
11030 /* X | (X & Y) is (Y, X). */
11031 if (TREE_CODE (arg1) == BIT_AND_EXPR
11032 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11033 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11034 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
11035 /* X | (Y & X) is (Y, X). */
11036 if (TREE_CODE (arg1) == BIT_AND_EXPR
11037 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11038 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11039 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
11041 t1 = distribute_bit_expr (code, type, arg0, arg1);
11042 if (t1 != NULL_TREE)
11043 return t1;
11045 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11047 This results in more efficient code for machines without a NAND
11048 instruction. Combine will canonicalize to the first form
11049 which will allow use of NAND instructions provided by the
11050 backend if they exist. */
11051 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11052 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11054 return fold_build1 (BIT_NOT_EXPR, type,
11055 build2 (BIT_AND_EXPR, type,
11056 fold_convert (type,
11057 TREE_OPERAND (arg0, 0)),
11058 fold_convert (type,
11059 TREE_OPERAND (arg1, 0))));
11062 /* See if this can be simplified into a rotate first. If that
11063 is unsuccessful continue in the association code. */
11064 goto bit_rotate;
11066 case BIT_XOR_EXPR:
11067 if (integer_zerop (arg1))
11068 return non_lvalue (fold_convert (type, arg0));
11069 if (integer_all_onesp (arg1))
11070 return fold_build1 (BIT_NOT_EXPR, type, op0);
11071 if (operand_equal_p (arg0, arg1, 0))
11072 return omit_one_operand (type, integer_zero_node, arg0);
11074 /* ~X ^ X is -1. */
11075 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11076 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11078 t1 = fold_convert (type, integer_zero_node);
11079 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
11080 return omit_one_operand (type, t1, arg1);
11083 /* X ^ ~X is -1. */
11084 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11085 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11087 t1 = fold_convert (type, integer_zero_node);
11088 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
11089 return omit_one_operand (type, t1, arg0);
11092 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11093 with a constant, and the two constants have no bits in common,
11094 we should treat this as a BIT_IOR_EXPR since this may produce more
11095 simplifications. */
11096 if (TREE_CODE (arg0) == BIT_AND_EXPR
11097 && TREE_CODE (arg1) == BIT_AND_EXPR
11098 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11099 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11100 && integer_zerop (const_binop (BIT_AND_EXPR,
11101 TREE_OPERAND (arg0, 1),
11102 TREE_OPERAND (arg1, 1), 0)))
11104 code = BIT_IOR_EXPR;
11105 goto bit_ior;
11108 /* (X | Y) ^ X -> Y & ~ X*/
11109 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11110 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11112 tree t2 = TREE_OPERAND (arg0, 1);
11113 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
11114 arg1);
11115 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11116 fold_convert (type, t1));
11117 return t1;
11120 /* (Y | X) ^ X -> Y & ~ X*/
11121 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11122 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11124 tree t2 = TREE_OPERAND (arg0, 0);
11125 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
11126 arg1);
11127 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11128 fold_convert (type, t1));
11129 return t1;
11132 /* X ^ (X | Y) -> Y & ~ X*/
11133 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11134 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11136 tree t2 = TREE_OPERAND (arg1, 1);
11137 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
11138 arg0);
11139 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11140 fold_convert (type, t1));
11141 return t1;
11144 /* X ^ (Y | X) -> Y & ~ X*/
11145 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11146 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11148 tree t2 = TREE_OPERAND (arg1, 0);
11149 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
11150 arg0);
11151 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11152 fold_convert (type, t1));
11153 return t1;
11156 /* Convert ~X ^ ~Y to X ^ Y. */
11157 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11158 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11159 return fold_build2 (code, type,
11160 fold_convert (type, TREE_OPERAND (arg0, 0)),
11161 fold_convert (type, TREE_OPERAND (arg1, 0)));
11163 /* Convert ~X ^ C to X ^ ~C. */
11164 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11165 && TREE_CODE (arg1) == INTEGER_CST)
11166 return fold_build2 (code, type,
11167 fold_convert (type, TREE_OPERAND (arg0, 0)),
11168 fold_build1 (BIT_NOT_EXPR, type, arg1));
11170 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11171 if (TREE_CODE (arg0) == BIT_AND_EXPR
11172 && integer_onep (TREE_OPERAND (arg0, 1))
11173 && integer_onep (arg1))
11174 return fold_build2 (EQ_EXPR, type, arg0,
11175 build_int_cst (TREE_TYPE (arg0), 0));
11177 /* Fold (X & Y) ^ Y as ~X & Y. */
11178 if (TREE_CODE (arg0) == BIT_AND_EXPR
11179 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11181 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11182 return fold_build2 (BIT_AND_EXPR, type,
11183 fold_build1 (BIT_NOT_EXPR, type, tem),
11184 fold_convert (type, arg1));
11186 /* Fold (X & Y) ^ X as ~Y & X. */
11187 if (TREE_CODE (arg0) == BIT_AND_EXPR
11188 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11189 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11191 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11192 return fold_build2 (BIT_AND_EXPR, type,
11193 fold_build1 (BIT_NOT_EXPR, type, tem),
11194 fold_convert (type, arg1));
11196 /* Fold X ^ (X & Y) as X & ~Y. */
11197 if (TREE_CODE (arg1) == BIT_AND_EXPR
11198 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11200 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11201 return fold_build2 (BIT_AND_EXPR, type,
11202 fold_convert (type, arg0),
11203 fold_build1 (BIT_NOT_EXPR, type, tem));
11205 /* Fold X ^ (Y & X) as ~Y & X. */
11206 if (TREE_CODE (arg1) == BIT_AND_EXPR
11207 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11208 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11210 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11211 return fold_build2 (BIT_AND_EXPR, type,
11212 fold_build1 (BIT_NOT_EXPR, type, tem),
11213 fold_convert (type, arg0));
11216 /* See if this can be simplified into a rotate first. If that
11217 is unsuccessful continue in the association code. */
11218 goto bit_rotate;
11220 case BIT_AND_EXPR:
11221 if (integer_all_onesp (arg1))
11222 return non_lvalue (fold_convert (type, arg0));
11223 if (integer_zerop (arg1))
11224 return omit_one_operand (type, arg1, arg0);
11225 if (operand_equal_p (arg0, arg1, 0))
11226 return non_lvalue (fold_convert (type, arg0));
11228 /* ~X & X is always zero. */
11229 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11230 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11231 return omit_one_operand (type, integer_zero_node, arg1);
11233 /* X & ~X is always zero. */
11234 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11235 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11236 return omit_one_operand (type, integer_zero_node, arg0);
11238 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11239 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11240 && TREE_CODE (arg1) == INTEGER_CST
11241 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11243 tree tmp1 = fold_convert (type, arg1);
11244 tree tmp2 = fold_convert (type, TREE_OPERAND (arg0, 0));
11245 tree tmp3 = fold_convert (type, TREE_OPERAND (arg0, 1));
11246 tmp2 = fold_build2 (BIT_AND_EXPR, type, tmp2, tmp1);
11247 tmp3 = fold_build2 (BIT_AND_EXPR, type, tmp3, tmp1);
11248 return fold_convert (type,
11249 fold_build2 (BIT_IOR_EXPR, type, tmp2, tmp3));
11252 /* (X | Y) & Y is (X, Y). */
11253 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11254 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11255 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
11256 /* (X | Y) & X is (Y, X). */
11257 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11258 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11259 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11260 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
11261 /* X & (X | Y) is (Y, X). */
11262 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11263 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11264 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11265 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
11266 /* X & (Y | X) is (Y, X). */
11267 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11268 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11269 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11270 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
11272 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11273 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11274 && integer_onep (TREE_OPERAND (arg0, 1))
11275 && integer_onep (arg1))
11277 tem = TREE_OPERAND (arg0, 0);
11278 return fold_build2 (EQ_EXPR, type,
11279 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11280 build_int_cst (TREE_TYPE (tem), 1)),
11281 build_int_cst (TREE_TYPE (tem), 0));
11283 /* Fold ~X & 1 as (X & 1) == 0. */
11284 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11285 && integer_onep (arg1))
11287 tem = TREE_OPERAND (arg0, 0);
11288 return fold_build2 (EQ_EXPR, type,
11289 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11290 build_int_cst (TREE_TYPE (tem), 1)),
11291 build_int_cst (TREE_TYPE (tem), 0));
11294 /* Fold (X ^ Y) & Y as ~X & Y. */
11295 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11296 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11298 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11299 return fold_build2 (BIT_AND_EXPR, type,
11300 fold_build1 (BIT_NOT_EXPR, type, tem),
11301 fold_convert (type, arg1));
11303 /* Fold (X ^ Y) & X as ~Y & X. */
11304 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11305 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11306 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11308 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11309 return fold_build2 (BIT_AND_EXPR, type,
11310 fold_build1 (BIT_NOT_EXPR, type, tem),
11311 fold_convert (type, arg1));
11313 /* Fold X & (X ^ Y) as X & ~Y. */
11314 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11315 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11317 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11318 return fold_build2 (BIT_AND_EXPR, type,
11319 fold_convert (type, arg0),
11320 fold_build1 (BIT_NOT_EXPR, type, tem));
11322 /* Fold X & (Y ^ X) as ~Y & X. */
11323 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11324 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11325 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11327 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11328 return fold_build2 (BIT_AND_EXPR, type,
11329 fold_build1 (BIT_NOT_EXPR, type, tem),
11330 fold_convert (type, arg0));
11333 t1 = distribute_bit_expr (code, type, arg0, arg1);
11334 if (t1 != NULL_TREE)
11335 return t1;
11336 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11337 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11338 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11340 unsigned int prec
11341 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11343 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11344 && (~TREE_INT_CST_LOW (arg1)
11345 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11346 return fold_convert (type, TREE_OPERAND (arg0, 0));
11349 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11351 This results in more efficient code for machines without a NOR
11352 instruction. Combine will canonicalize to the first form
11353 which will allow use of NOR instructions provided by the
11354 backend if they exist. */
11355 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11356 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11358 return fold_build1 (BIT_NOT_EXPR, type,
11359 build2 (BIT_IOR_EXPR, type,
11360 fold_convert (type,
11361 TREE_OPERAND (arg0, 0)),
11362 fold_convert (type,
11363 TREE_OPERAND (arg1, 0))));
11366 /* If arg0 is derived from the address of an object or function, we may
11367 be able to fold this expression using the object or function's
11368 alignment. */
11369 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11371 unsigned HOST_WIDE_INT modulus, residue;
11372 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11374 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11375 integer_onep (arg1));
11377 /* This works because modulus is a power of 2. If this weren't the
11378 case, we'd have to replace it by its greatest power-of-2
11379 divisor: modulus & -modulus. */
11380 if (low < modulus)
11381 return build_int_cst (type, residue & low);
11384 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11385 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11386 if the new mask might be further optimized. */
11387 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11388 || TREE_CODE (arg0) == RSHIFT_EXPR)
11389 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11390 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11391 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11392 < TYPE_PRECISION (TREE_TYPE (arg0))
11393 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11394 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11396 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11397 unsigned HOST_WIDE_INT mask
11398 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11399 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11400 tree shift_type = TREE_TYPE (arg0);
11402 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11403 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11404 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11405 && TYPE_PRECISION (TREE_TYPE (arg0))
11406 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11408 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11409 tree arg00 = TREE_OPERAND (arg0, 0);
11410 /* See if more bits can be proven as zero because of
11411 zero extension. */
11412 if (TREE_CODE (arg00) == NOP_EXPR
11413 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11415 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11416 if (TYPE_PRECISION (inner_type)
11417 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11418 && TYPE_PRECISION (inner_type) < prec)
11420 prec = TYPE_PRECISION (inner_type);
11421 /* See if we can shorten the right shift. */
11422 if (shiftc < prec)
11423 shift_type = inner_type;
11426 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11427 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11428 zerobits <<= prec - shiftc;
11429 /* For arithmetic shift if sign bit could be set, zerobits
11430 can contain actually sign bits, so no transformation is
11431 possible, unless MASK masks them all away. In that
11432 case the shift needs to be converted into logical shift. */
11433 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11434 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11436 if ((mask & zerobits) == 0)
11437 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11438 else
11439 zerobits = 0;
11443 /* ((X << 16) & 0xff00) is (X, 0). */
11444 if ((mask & zerobits) == mask)
11445 return omit_one_operand (type, build_int_cst (type, 0), arg0);
11447 newmask = mask | zerobits;
11448 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11450 unsigned int prec;
11452 /* Only do the transformation if NEWMASK is some integer
11453 mode's mask. */
11454 for (prec = BITS_PER_UNIT;
11455 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11456 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11457 break;
11458 if (prec < HOST_BITS_PER_WIDE_INT
11459 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11461 tree newmaskt;
11463 if (shift_type != TREE_TYPE (arg0))
11465 tem = fold_build2 (TREE_CODE (arg0), shift_type,
11466 fold_convert (shift_type,
11467 TREE_OPERAND (arg0, 0)),
11468 TREE_OPERAND (arg0, 1));
11469 tem = fold_convert (type, tem);
11471 else
11472 tem = op0;
11473 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11474 if (!tree_int_cst_equal (newmaskt, arg1))
11475 return fold_build2 (BIT_AND_EXPR, type, tem, newmaskt);
11480 goto associate;
11482 case RDIV_EXPR:
11483 /* Don't touch a floating-point divide by zero unless the mode
11484 of the constant can represent infinity. */
11485 if (TREE_CODE (arg1) == REAL_CST
11486 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11487 && real_zerop (arg1))
11488 return NULL_TREE;
11490 /* Optimize A / A to 1.0 if we don't care about
11491 NaNs or Infinities. Skip the transformation
11492 for non-real operands. */
11493 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11494 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11495 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11496 && operand_equal_p (arg0, arg1, 0))
11498 tree r = build_real (TREE_TYPE (arg0), dconst1);
11500 return omit_two_operands (type, r, arg0, arg1);
11503 /* The complex version of the above A / A optimization. */
11504 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11505 && operand_equal_p (arg0, arg1, 0))
11507 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11508 if (! HONOR_NANS (TYPE_MODE (elem_type))
11509 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11511 tree r = build_real (elem_type, dconst1);
11512 /* omit_two_operands will call fold_convert for us. */
11513 return omit_two_operands (type, r, arg0, arg1);
11517 /* (-A) / (-B) -> A / B */
11518 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11519 return fold_build2 (RDIV_EXPR, type,
11520 TREE_OPERAND (arg0, 0),
11521 negate_expr (arg1));
11522 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11523 return fold_build2 (RDIV_EXPR, type,
11524 negate_expr (arg0),
11525 TREE_OPERAND (arg1, 0));
11527 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11528 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11529 && real_onep (arg1))
11530 return non_lvalue (fold_convert (type, arg0));
11532 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11533 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11534 && real_minus_onep (arg1))
11535 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11537 /* If ARG1 is a constant, we can convert this to a multiply by the
11538 reciprocal. This does not have the same rounding properties,
11539 so only do this if -freciprocal-math. We can actually
11540 always safely do it if ARG1 is a power of two, but it's hard to
11541 tell if it is or not in a portable manner. */
11542 if (TREE_CODE (arg1) == REAL_CST)
11544 if (flag_reciprocal_math
11545 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11546 arg1, 0)))
11547 return fold_build2 (MULT_EXPR, type, arg0, tem);
11548 /* Find the reciprocal if optimizing and the result is exact. */
11549 if (optimize)
11551 REAL_VALUE_TYPE r;
11552 r = TREE_REAL_CST (arg1);
11553 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11555 tem = build_real (type, r);
11556 return fold_build2 (MULT_EXPR, type,
11557 fold_convert (type, arg0), tem);
11561 /* Convert A/B/C to A/(B*C). */
11562 if (flag_reciprocal_math
11563 && TREE_CODE (arg0) == RDIV_EXPR)
11564 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11565 fold_build2 (MULT_EXPR, type,
11566 TREE_OPERAND (arg0, 1), arg1));
11568 /* Convert A/(B/C) to (A/B)*C. */
11569 if (flag_reciprocal_math
11570 && TREE_CODE (arg1) == RDIV_EXPR)
11571 return fold_build2 (MULT_EXPR, type,
11572 fold_build2 (RDIV_EXPR, type, arg0,
11573 TREE_OPERAND (arg1, 0)),
11574 TREE_OPERAND (arg1, 1));
11576 /* Convert C1/(X*C2) into (C1/C2)/X. */
11577 if (flag_reciprocal_math
11578 && TREE_CODE (arg1) == MULT_EXPR
11579 && TREE_CODE (arg0) == REAL_CST
11580 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11582 tree tem = const_binop (RDIV_EXPR, arg0,
11583 TREE_OPERAND (arg1, 1), 0);
11584 if (tem)
11585 return fold_build2 (RDIV_EXPR, type, tem,
11586 TREE_OPERAND (arg1, 0));
11589 if (flag_unsafe_math_optimizations)
11591 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11592 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11594 /* Optimize sin(x)/cos(x) as tan(x). */
11595 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11596 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11597 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11598 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11599 CALL_EXPR_ARG (arg1, 0), 0))
11601 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11603 if (tanfn != NULL_TREE)
11604 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11607 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11608 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11609 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11610 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11611 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11612 CALL_EXPR_ARG (arg1, 0), 0))
11614 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11616 if (tanfn != NULL_TREE)
11618 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11619 return fold_build2 (RDIV_EXPR, type,
11620 build_real (type, dconst1), tmp);
11624 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11625 NaNs or Infinities. */
11626 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11627 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11628 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11630 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11631 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11633 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11634 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11635 && operand_equal_p (arg00, arg01, 0))
11637 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11639 if (cosfn != NULL_TREE)
11640 return build_call_expr (cosfn, 1, arg00);
11644 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11645 NaNs or Infinities. */
11646 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11647 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11648 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11650 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11651 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11653 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11654 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11655 && operand_equal_p (arg00, arg01, 0))
11657 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11659 if (cosfn != NULL_TREE)
11661 tree tmp = build_call_expr (cosfn, 1, arg00);
11662 return fold_build2 (RDIV_EXPR, type,
11663 build_real (type, dconst1),
11664 tmp);
11669 /* Optimize pow(x,c)/x as pow(x,c-1). */
11670 if (fcode0 == BUILT_IN_POW
11671 || fcode0 == BUILT_IN_POWF
11672 || fcode0 == BUILT_IN_POWL)
11674 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11675 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11676 if (TREE_CODE (arg01) == REAL_CST
11677 && !TREE_OVERFLOW (arg01)
11678 && operand_equal_p (arg1, arg00, 0))
11680 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11681 REAL_VALUE_TYPE c;
11682 tree arg;
11684 c = TREE_REAL_CST (arg01);
11685 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11686 arg = build_real (type, c);
11687 return build_call_expr (powfn, 2, arg1, arg);
11691 /* Optimize a/root(b/c) into a*root(c/b). */
11692 if (BUILTIN_ROOT_P (fcode1))
11694 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11696 if (TREE_CODE (rootarg) == RDIV_EXPR)
11698 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11699 tree b = TREE_OPERAND (rootarg, 0);
11700 tree c = TREE_OPERAND (rootarg, 1);
11702 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11704 tmp = build_call_expr (rootfn, 1, tmp);
11705 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11709 /* Optimize x/expN(y) into x*expN(-y). */
11710 if (BUILTIN_EXPONENT_P (fcode1))
11712 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11713 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11714 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11715 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11718 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11719 if (fcode1 == BUILT_IN_POW
11720 || fcode1 == BUILT_IN_POWF
11721 || fcode1 == BUILT_IN_POWL)
11723 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11724 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11725 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11726 tree neg11 = fold_convert (type, negate_expr (arg11));
11727 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11728 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11731 return NULL_TREE;
11733 case TRUNC_DIV_EXPR:
11734 case FLOOR_DIV_EXPR:
11735 /* Simplify A / (B << N) where A and B are positive and B is
11736 a power of 2, to A >> (N + log2(B)). */
11737 strict_overflow_p = false;
11738 if (TREE_CODE (arg1) == LSHIFT_EXPR
11739 && (TYPE_UNSIGNED (type)
11740 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11742 tree sval = TREE_OPERAND (arg1, 0);
11743 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11745 tree sh_cnt = TREE_OPERAND (arg1, 1);
11746 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11748 if (strict_overflow_p)
11749 fold_overflow_warning (("assuming signed overflow does not "
11750 "occur when simplifying A / (B << N)"),
11751 WARN_STRICT_OVERFLOW_MISC);
11753 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11754 sh_cnt, build_int_cst (NULL_TREE, pow2));
11755 return fold_build2 (RSHIFT_EXPR, type,
11756 fold_convert (type, arg0), sh_cnt);
11760 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11761 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11762 if (INTEGRAL_TYPE_P (type)
11763 && TYPE_UNSIGNED (type)
11764 && code == FLOOR_DIV_EXPR)
11765 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11767 /* Fall thru */
11769 case ROUND_DIV_EXPR:
11770 case CEIL_DIV_EXPR:
11771 case EXACT_DIV_EXPR:
11772 if (integer_onep (arg1))
11773 return non_lvalue (fold_convert (type, arg0));
11774 if (integer_zerop (arg1))
11775 return NULL_TREE;
11776 /* X / -1 is -X. */
11777 if (!TYPE_UNSIGNED (type)
11778 && TREE_CODE (arg1) == INTEGER_CST
11779 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11780 && TREE_INT_CST_HIGH (arg1) == -1)
11781 return fold_convert (type, negate_expr (arg0));
11783 /* Convert -A / -B to A / B when the type is signed and overflow is
11784 undefined. */
11785 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11786 && TREE_CODE (arg0) == NEGATE_EXPR
11787 && negate_expr_p (arg1))
11789 if (INTEGRAL_TYPE_P (type))
11790 fold_overflow_warning (("assuming signed overflow does not occur "
11791 "when distributing negation across "
11792 "division"),
11793 WARN_STRICT_OVERFLOW_MISC);
11794 return fold_build2 (code, type,
11795 fold_convert (type, TREE_OPERAND (arg0, 0)),
11796 fold_convert (type, negate_expr (arg1)));
11798 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11799 && TREE_CODE (arg1) == NEGATE_EXPR
11800 && negate_expr_p (arg0))
11802 if (INTEGRAL_TYPE_P (type))
11803 fold_overflow_warning (("assuming signed overflow does not occur "
11804 "when distributing negation across "
11805 "division"),
11806 WARN_STRICT_OVERFLOW_MISC);
11807 return fold_build2 (code, type,
11808 fold_convert (type, negate_expr (arg0)),
11809 fold_convert (type, TREE_OPERAND (arg1, 0)));
11812 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11813 operation, EXACT_DIV_EXPR.
11815 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11816 At one time others generated faster code, it's not clear if they do
11817 after the last round to changes to the DIV code in expmed.c. */
11818 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11819 && multiple_of_p (type, arg0, arg1))
11820 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11822 strict_overflow_p = false;
11823 if (TREE_CODE (arg1) == INTEGER_CST
11824 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11825 &strict_overflow_p)))
11827 if (strict_overflow_p)
11828 fold_overflow_warning (("assuming signed overflow does not occur "
11829 "when simplifying division"),
11830 WARN_STRICT_OVERFLOW_MISC);
11831 return fold_convert (type, tem);
11834 return NULL_TREE;
11836 case CEIL_MOD_EXPR:
11837 case FLOOR_MOD_EXPR:
11838 case ROUND_MOD_EXPR:
11839 case TRUNC_MOD_EXPR:
11840 /* X % 1 is always zero, but be sure to preserve any side
11841 effects in X. */
11842 if (integer_onep (arg1))
11843 return omit_one_operand (type, integer_zero_node, arg0);
11845 /* X % 0, return X % 0 unchanged so that we can get the
11846 proper warnings and errors. */
11847 if (integer_zerop (arg1))
11848 return NULL_TREE;
11850 /* 0 % X is always zero, but be sure to preserve any side
11851 effects in X. Place this after checking for X == 0. */
11852 if (integer_zerop (arg0))
11853 return omit_one_operand (type, integer_zero_node, arg1);
11855 /* X % -1 is zero. */
11856 if (!TYPE_UNSIGNED (type)
11857 && TREE_CODE (arg1) == INTEGER_CST
11858 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11859 && TREE_INT_CST_HIGH (arg1) == -1)
11860 return omit_one_operand (type, integer_zero_node, arg0);
11862 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11863 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11864 strict_overflow_p = false;
11865 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11866 && (TYPE_UNSIGNED (type)
11867 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11869 tree c = arg1;
11870 /* Also optimize A % (C << N) where C is a power of 2,
11871 to A & ((C << N) - 1). */
11872 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11873 c = TREE_OPERAND (arg1, 0);
11875 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11877 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11878 build_int_cst (TREE_TYPE (arg1), 1));
11879 if (strict_overflow_p)
11880 fold_overflow_warning (("assuming signed overflow does not "
11881 "occur when simplifying "
11882 "X % (power of two)"),
11883 WARN_STRICT_OVERFLOW_MISC);
11884 return fold_build2 (BIT_AND_EXPR, type,
11885 fold_convert (type, arg0),
11886 fold_convert (type, mask));
11890 /* X % -C is the same as X % C. */
11891 if (code == TRUNC_MOD_EXPR
11892 && !TYPE_UNSIGNED (type)
11893 && TREE_CODE (arg1) == INTEGER_CST
11894 && !TREE_OVERFLOW (arg1)
11895 && TREE_INT_CST_HIGH (arg1) < 0
11896 && !TYPE_OVERFLOW_TRAPS (type)
11897 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11898 && !sign_bit_p (arg1, arg1))
11899 return fold_build2 (code, type, fold_convert (type, arg0),
11900 fold_convert (type, negate_expr (arg1)));
11902 /* X % -Y is the same as X % Y. */
11903 if (code == TRUNC_MOD_EXPR
11904 && !TYPE_UNSIGNED (type)
11905 && TREE_CODE (arg1) == NEGATE_EXPR
11906 && !TYPE_OVERFLOW_TRAPS (type))
11907 return fold_build2 (code, type, fold_convert (type, arg0),
11908 fold_convert (type, TREE_OPERAND (arg1, 0)));
11910 if (TREE_CODE (arg1) == INTEGER_CST
11911 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11912 &strict_overflow_p)))
11914 if (strict_overflow_p)
11915 fold_overflow_warning (("assuming signed overflow does not occur "
11916 "when simplifying modulus"),
11917 WARN_STRICT_OVERFLOW_MISC);
11918 return fold_convert (type, tem);
11921 return NULL_TREE;
11923 case LROTATE_EXPR:
11924 case RROTATE_EXPR:
11925 if (integer_all_onesp (arg0))
11926 return omit_one_operand (type, arg0, arg1);
11927 goto shift;
11929 case RSHIFT_EXPR:
11930 /* Optimize -1 >> x for arithmetic right shifts. */
11931 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11932 && tree_expr_nonnegative_p (arg1))
11933 return omit_one_operand (type, arg0, arg1);
11934 /* ... fall through ... */
11936 case LSHIFT_EXPR:
11937 shift:
11938 if (integer_zerop (arg1))
11939 return non_lvalue (fold_convert (type, arg0));
11940 if (integer_zerop (arg0))
11941 return omit_one_operand (type, arg0, arg1);
11943 /* Since negative shift count is not well-defined,
11944 don't try to compute it in the compiler. */
11945 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11946 return NULL_TREE;
11948 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11949 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11950 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11951 && host_integerp (TREE_OPERAND (arg0, 1), false)
11952 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11954 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11955 + TREE_INT_CST_LOW (arg1));
11957 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11958 being well defined. */
11959 if (low >= TYPE_PRECISION (type))
11961 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11962 low = low % TYPE_PRECISION (type);
11963 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11964 return omit_one_operand (type, build_int_cst (type, 0),
11965 TREE_OPERAND (arg0, 0));
11966 else
11967 low = TYPE_PRECISION (type) - 1;
11970 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11971 build_int_cst (type, low));
11974 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11975 into x & ((unsigned)-1 >> c) for unsigned types. */
11976 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11977 || (TYPE_UNSIGNED (type)
11978 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11979 && host_integerp (arg1, false)
11980 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11981 && host_integerp (TREE_OPERAND (arg0, 1), false)
11982 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11984 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11985 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11986 tree lshift;
11987 tree arg00;
11989 if (low0 == low1)
11991 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11993 lshift = build_int_cst (type, -1);
11994 lshift = int_const_binop (code, lshift, arg1, 0);
11996 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
12000 /* Rewrite an LROTATE_EXPR by a constant into an
12001 RROTATE_EXPR by a new constant. */
12002 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12004 tree tem = build_int_cst (TREE_TYPE (arg1),
12005 TYPE_PRECISION (type));
12006 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
12007 return fold_build2 (RROTATE_EXPR, type, op0, tem);
12010 /* If we have a rotate of a bit operation with the rotate count and
12011 the second operand of the bit operation both constant,
12012 permute the two operations. */
12013 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12014 && (TREE_CODE (arg0) == BIT_AND_EXPR
12015 || TREE_CODE (arg0) == BIT_IOR_EXPR
12016 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12017 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12018 return fold_build2 (TREE_CODE (arg0), type,
12019 fold_build2 (code, type,
12020 TREE_OPERAND (arg0, 0), arg1),
12021 fold_build2 (code, type,
12022 TREE_OPERAND (arg0, 1), arg1));
12024 /* Two consecutive rotates adding up to the precision of the
12025 type can be ignored. */
12026 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12027 && TREE_CODE (arg0) == RROTATE_EXPR
12028 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12029 && TREE_INT_CST_HIGH (arg1) == 0
12030 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12031 && ((TREE_INT_CST_LOW (arg1)
12032 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12033 == (unsigned int) TYPE_PRECISION (type)))
12034 return TREE_OPERAND (arg0, 0);
12036 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12037 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12038 if the latter can be further optimized. */
12039 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12040 && TREE_CODE (arg0) == BIT_AND_EXPR
12041 && TREE_CODE (arg1) == INTEGER_CST
12042 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12044 tree mask = fold_build2 (code, type,
12045 fold_convert (type, TREE_OPERAND (arg0, 1)),
12046 arg1);
12047 tree shift = fold_build2 (code, type,
12048 fold_convert (type, TREE_OPERAND (arg0, 0)),
12049 arg1);
12050 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
12051 if (tem)
12052 return tem;
12055 return NULL_TREE;
12057 case MIN_EXPR:
12058 if (operand_equal_p (arg0, arg1, 0))
12059 return omit_one_operand (type, arg0, arg1);
12060 if (INTEGRAL_TYPE_P (type)
12061 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12062 return omit_one_operand (type, arg1, arg0);
12063 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
12064 if (tem)
12065 return tem;
12066 goto associate;
12068 case MAX_EXPR:
12069 if (operand_equal_p (arg0, arg1, 0))
12070 return omit_one_operand (type, arg0, arg1);
12071 if (INTEGRAL_TYPE_P (type)
12072 && TYPE_MAX_VALUE (type)
12073 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12074 return omit_one_operand (type, arg1, arg0);
12075 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
12076 if (tem)
12077 return tem;
12078 goto associate;
12080 case TRUTH_ANDIF_EXPR:
12081 /* Note that the operands of this must be ints
12082 and their values must be 0 or 1.
12083 ("true" is a fixed value perhaps depending on the language.) */
12084 /* If first arg is constant zero, return it. */
12085 if (integer_zerop (arg0))
12086 return fold_convert (type, arg0);
12087 case TRUTH_AND_EXPR:
12088 /* If either arg is constant true, drop it. */
12089 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12090 return non_lvalue (fold_convert (type, arg1));
12091 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12092 /* Preserve sequence points. */
12093 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12094 return non_lvalue (fold_convert (type, arg0));
12095 /* If second arg is constant zero, result is zero, but first arg
12096 must be evaluated. */
12097 if (integer_zerop (arg1))
12098 return omit_one_operand (type, arg1, arg0);
12099 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12100 case will be handled here. */
12101 if (integer_zerop (arg0))
12102 return omit_one_operand (type, arg0, arg1);
12104 /* !X && X is always false. */
12105 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12106 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12107 return omit_one_operand (type, integer_zero_node, arg1);
12108 /* X && !X is always false. */
12109 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12110 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12111 return omit_one_operand (type, integer_zero_node, arg0);
12113 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12114 means A >= Y && A != MAX, but in this case we know that
12115 A < X <= MAX. */
12117 if (!TREE_SIDE_EFFECTS (arg0)
12118 && !TREE_SIDE_EFFECTS (arg1))
12120 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
12121 if (tem && !operand_equal_p (tem, arg0, 0))
12122 return fold_build2 (code, type, tem, arg1);
12124 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
12125 if (tem && !operand_equal_p (tem, arg1, 0))
12126 return fold_build2 (code, type, arg0, tem);
12129 truth_andor:
12130 /* We only do these simplifications if we are optimizing. */
12131 if (!optimize)
12132 return NULL_TREE;
12134 /* Check for things like (A || B) && (A || C). We can convert this
12135 to A || (B && C). Note that either operator can be any of the four
12136 truth and/or operations and the transformation will still be
12137 valid. Also note that we only care about order for the
12138 ANDIF and ORIF operators. If B contains side effects, this
12139 might change the truth-value of A. */
12140 if (TREE_CODE (arg0) == TREE_CODE (arg1)
12141 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12142 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12143 || TREE_CODE (arg0) == TRUTH_AND_EXPR
12144 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12145 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12147 tree a00 = TREE_OPERAND (arg0, 0);
12148 tree a01 = TREE_OPERAND (arg0, 1);
12149 tree a10 = TREE_OPERAND (arg1, 0);
12150 tree a11 = TREE_OPERAND (arg1, 1);
12151 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12152 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12153 && (code == TRUTH_AND_EXPR
12154 || code == TRUTH_OR_EXPR));
12156 if (operand_equal_p (a00, a10, 0))
12157 return fold_build2 (TREE_CODE (arg0), type, a00,
12158 fold_build2 (code, type, a01, a11));
12159 else if (commutative && operand_equal_p (a00, a11, 0))
12160 return fold_build2 (TREE_CODE (arg0), type, a00,
12161 fold_build2 (code, type, a01, a10));
12162 else if (commutative && operand_equal_p (a01, a10, 0))
12163 return fold_build2 (TREE_CODE (arg0), type, a01,
12164 fold_build2 (code, type, a00, a11));
12166 /* This case if tricky because we must either have commutative
12167 operators or else A10 must not have side-effects. */
12169 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12170 && operand_equal_p (a01, a11, 0))
12171 return fold_build2 (TREE_CODE (arg0), type,
12172 fold_build2 (code, type, a00, a10),
12173 a01);
12176 /* See if we can build a range comparison. */
12177 if (0 != (tem = fold_range_test (code, type, op0, op1)))
12178 return tem;
12180 /* Check for the possibility of merging component references. If our
12181 lhs is another similar operation, try to merge its rhs with our
12182 rhs. Then try to merge our lhs and rhs. */
12183 if (TREE_CODE (arg0) == code
12184 && 0 != (tem = fold_truthop (code, type,
12185 TREE_OPERAND (arg0, 1), arg1)))
12186 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12188 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
12189 return tem;
12191 return NULL_TREE;
12193 case TRUTH_ORIF_EXPR:
12194 /* Note that the operands of this must be ints
12195 and their values must be 0 or true.
12196 ("true" is a fixed value perhaps depending on the language.) */
12197 /* If first arg is constant true, return it. */
12198 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12199 return fold_convert (type, arg0);
12200 case TRUTH_OR_EXPR:
12201 /* If either arg is constant zero, drop it. */
12202 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12203 return non_lvalue (fold_convert (type, arg1));
12204 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12205 /* Preserve sequence points. */
12206 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12207 return non_lvalue (fold_convert (type, arg0));
12208 /* If second arg is constant true, result is true, but we must
12209 evaluate first arg. */
12210 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12211 return omit_one_operand (type, arg1, arg0);
12212 /* Likewise for first arg, but note this only occurs here for
12213 TRUTH_OR_EXPR. */
12214 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12215 return omit_one_operand (type, arg0, arg1);
12217 /* !X || X is always true. */
12218 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12219 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12220 return omit_one_operand (type, integer_one_node, arg1);
12221 /* X || !X is always true. */
12222 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12223 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12224 return omit_one_operand (type, integer_one_node, arg0);
12226 goto truth_andor;
12228 case TRUTH_XOR_EXPR:
12229 /* If the second arg is constant zero, drop it. */
12230 if (integer_zerop (arg1))
12231 return non_lvalue (fold_convert (type, arg0));
12232 /* If the second arg is constant true, this is a logical inversion. */
12233 if (integer_onep (arg1))
12235 /* Only call invert_truthvalue if operand is a truth value. */
12236 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12237 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12238 else
12239 tem = invert_truthvalue (arg0);
12240 return non_lvalue (fold_convert (type, tem));
12242 /* Identical arguments cancel to zero. */
12243 if (operand_equal_p (arg0, arg1, 0))
12244 return omit_one_operand (type, integer_zero_node, arg0);
12246 /* !X ^ X is always true. */
12247 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12248 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12249 return omit_one_operand (type, integer_one_node, arg1);
12251 /* X ^ !X is always true. */
12252 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12253 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12254 return omit_one_operand (type, integer_one_node, arg0);
12256 return NULL_TREE;
12258 case EQ_EXPR:
12259 case NE_EXPR:
12260 tem = fold_comparison (code, type, op0, op1);
12261 if (tem != NULL_TREE)
12262 return tem;
12264 /* bool_var != 0 becomes bool_var. */
12265 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12266 && code == NE_EXPR)
12267 return non_lvalue (fold_convert (type, arg0));
12269 /* bool_var == 1 becomes bool_var. */
12270 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12271 && code == EQ_EXPR)
12272 return non_lvalue (fold_convert (type, arg0));
12274 /* bool_var != 1 becomes !bool_var. */
12275 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12276 && code == NE_EXPR)
12277 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12279 /* bool_var == 0 becomes !bool_var. */
12280 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12281 && code == EQ_EXPR)
12282 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12284 /* If this is an equality comparison of the address of two non-weak,
12285 unaliased symbols neither of which are extern (since we do not
12286 have access to attributes for externs), then we know the result. */
12287 if (TREE_CODE (arg0) == ADDR_EXPR
12288 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12289 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12290 && ! lookup_attribute ("alias",
12291 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12292 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12293 && TREE_CODE (arg1) == ADDR_EXPR
12294 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12295 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12296 && ! lookup_attribute ("alias",
12297 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12298 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12300 /* We know that we're looking at the address of two
12301 non-weak, unaliased, static _DECL nodes.
12303 It is both wasteful and incorrect to call operand_equal_p
12304 to compare the two ADDR_EXPR nodes. It is wasteful in that
12305 all we need to do is test pointer equality for the arguments
12306 to the two ADDR_EXPR nodes. It is incorrect to use
12307 operand_equal_p as that function is NOT equivalent to a
12308 C equality test. It can in fact return false for two
12309 objects which would test as equal using the C equality
12310 operator. */
12311 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12312 return constant_boolean_node (equal
12313 ? code == EQ_EXPR : code != EQ_EXPR,
12314 type);
12317 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12318 a MINUS_EXPR of a constant, we can convert it into a comparison with
12319 a revised constant as long as no overflow occurs. */
12320 if (TREE_CODE (arg1) == INTEGER_CST
12321 && (TREE_CODE (arg0) == PLUS_EXPR
12322 || TREE_CODE (arg0) == MINUS_EXPR)
12323 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12324 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12325 ? MINUS_EXPR : PLUS_EXPR,
12326 fold_convert (TREE_TYPE (arg0), arg1),
12327 TREE_OPERAND (arg0, 1), 0))
12328 && !TREE_OVERFLOW (tem))
12329 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12331 /* Similarly for a NEGATE_EXPR. */
12332 if (TREE_CODE (arg0) == NEGATE_EXPR
12333 && TREE_CODE (arg1) == INTEGER_CST
12334 && 0 != (tem = negate_expr (arg1))
12335 && TREE_CODE (tem) == INTEGER_CST
12336 && !TREE_OVERFLOW (tem))
12337 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12339 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12340 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12341 && TREE_CODE (arg1) == INTEGER_CST
12342 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12343 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12344 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
12345 fold_convert (TREE_TYPE (arg0), arg1),
12346 TREE_OPERAND (arg0, 1)));
12348 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12349 if ((TREE_CODE (arg0) == PLUS_EXPR
12350 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12351 || TREE_CODE (arg0) == MINUS_EXPR)
12352 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12353 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12354 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12356 tree val = TREE_OPERAND (arg0, 1);
12357 return omit_two_operands (type,
12358 fold_build2 (code, type,
12359 val,
12360 build_int_cst (TREE_TYPE (val),
12361 0)),
12362 TREE_OPERAND (arg0, 0), arg1);
12365 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12366 if (TREE_CODE (arg0) == MINUS_EXPR
12367 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12368 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12369 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12371 return omit_two_operands (type,
12372 code == NE_EXPR
12373 ? boolean_true_node : boolean_false_node,
12374 TREE_OPERAND (arg0, 1), arg1);
12377 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12378 for !=. Don't do this for ordered comparisons due to overflow. */
12379 if (TREE_CODE (arg0) == MINUS_EXPR
12380 && integer_zerop (arg1))
12381 return fold_build2 (code, type,
12382 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12384 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12385 if (TREE_CODE (arg0) == ABS_EXPR
12386 && (integer_zerop (arg1) || real_zerop (arg1)))
12387 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
12389 /* If this is an EQ or NE comparison with zero and ARG0 is
12390 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12391 two operations, but the latter can be done in one less insn
12392 on machines that have only two-operand insns or on which a
12393 constant cannot be the first operand. */
12394 if (TREE_CODE (arg0) == BIT_AND_EXPR
12395 && integer_zerop (arg1))
12397 tree arg00 = TREE_OPERAND (arg0, 0);
12398 tree arg01 = TREE_OPERAND (arg0, 1);
12399 if (TREE_CODE (arg00) == LSHIFT_EXPR
12400 && integer_onep (TREE_OPERAND (arg00, 0)))
12402 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
12403 arg01, TREE_OPERAND (arg00, 1));
12404 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12405 build_int_cst (TREE_TYPE (arg0), 1));
12406 return fold_build2 (code, type,
12407 fold_convert (TREE_TYPE (arg1), tem), arg1);
12409 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12410 && integer_onep (TREE_OPERAND (arg01, 0)))
12412 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
12413 arg00, TREE_OPERAND (arg01, 1));
12414 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12415 build_int_cst (TREE_TYPE (arg0), 1));
12416 return fold_build2 (code, type,
12417 fold_convert (TREE_TYPE (arg1), tem), arg1);
12421 /* If this is an NE or EQ comparison of zero against the result of a
12422 signed MOD operation whose second operand is a power of 2, make
12423 the MOD operation unsigned since it is simpler and equivalent. */
12424 if (integer_zerop (arg1)
12425 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12426 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12427 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12428 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12429 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12430 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12432 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12433 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
12434 fold_convert (newtype,
12435 TREE_OPERAND (arg0, 0)),
12436 fold_convert (newtype,
12437 TREE_OPERAND (arg0, 1)));
12439 return fold_build2 (code, type, newmod,
12440 fold_convert (newtype, arg1));
12443 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12444 C1 is a valid shift constant, and C2 is a power of two, i.e.
12445 a single bit. */
12446 if (TREE_CODE (arg0) == BIT_AND_EXPR
12447 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12448 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12449 == INTEGER_CST
12450 && integer_pow2p (TREE_OPERAND (arg0, 1))
12451 && integer_zerop (arg1))
12453 tree itype = TREE_TYPE (arg0);
12454 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12455 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12457 /* Check for a valid shift count. */
12458 if (TREE_INT_CST_HIGH (arg001) == 0
12459 && TREE_INT_CST_LOW (arg001) < prec)
12461 tree arg01 = TREE_OPERAND (arg0, 1);
12462 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12463 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12464 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12465 can be rewritten as (X & (C2 << C1)) != 0. */
12466 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12468 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
12469 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
12470 return fold_build2 (code, type, tem, arg1);
12472 /* Otherwise, for signed (arithmetic) shifts,
12473 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12474 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12475 else if (!TYPE_UNSIGNED (itype))
12476 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12477 arg000, build_int_cst (itype, 0));
12478 /* Otherwise, of unsigned (logical) shifts,
12479 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12480 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12481 else
12482 return omit_one_operand (type,
12483 code == EQ_EXPR ? integer_one_node
12484 : integer_zero_node,
12485 arg000);
12489 /* If this is an NE comparison of zero with an AND of one, remove the
12490 comparison since the AND will give the correct value. */
12491 if (code == NE_EXPR
12492 && integer_zerop (arg1)
12493 && TREE_CODE (arg0) == BIT_AND_EXPR
12494 && integer_onep (TREE_OPERAND (arg0, 1)))
12495 return fold_convert (type, arg0);
12497 /* If we have (A & C) == C where C is a power of 2, convert this into
12498 (A & C) != 0. Similarly for NE_EXPR. */
12499 if (TREE_CODE (arg0) == BIT_AND_EXPR
12500 && integer_pow2p (TREE_OPERAND (arg0, 1))
12501 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12502 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12503 arg0, fold_convert (TREE_TYPE (arg0),
12504 integer_zero_node));
12506 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12507 bit, then fold the expression into A < 0 or A >= 0. */
12508 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
12509 if (tem)
12510 return tem;
12512 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12513 Similarly for NE_EXPR. */
12514 if (TREE_CODE (arg0) == BIT_AND_EXPR
12515 && TREE_CODE (arg1) == INTEGER_CST
12516 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12518 tree notc = fold_build1 (BIT_NOT_EXPR,
12519 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12520 TREE_OPERAND (arg0, 1));
12521 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12522 arg1, notc);
12523 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12524 if (integer_nonzerop (dandnotc))
12525 return omit_one_operand (type, rslt, arg0);
12528 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12529 Similarly for NE_EXPR. */
12530 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12531 && TREE_CODE (arg1) == INTEGER_CST
12532 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12534 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12535 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12536 TREE_OPERAND (arg0, 1), notd);
12537 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12538 if (integer_nonzerop (candnotd))
12539 return omit_one_operand (type, rslt, arg0);
12542 /* If this is a comparison of a field, we may be able to simplify it. */
12543 if ((TREE_CODE (arg0) == COMPONENT_REF
12544 || TREE_CODE (arg0) == BIT_FIELD_REF)
12545 /* Handle the constant case even without -O
12546 to make sure the warnings are given. */
12547 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12549 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
12550 if (t1)
12551 return t1;
12554 /* Optimize comparisons of strlen vs zero to a compare of the
12555 first character of the string vs zero. To wit,
12556 strlen(ptr) == 0 => *ptr == 0
12557 strlen(ptr) != 0 => *ptr != 0
12558 Other cases should reduce to one of these two (or a constant)
12559 due to the return value of strlen being unsigned. */
12560 if (TREE_CODE (arg0) == CALL_EXPR
12561 && integer_zerop (arg1))
12563 tree fndecl = get_callee_fndecl (arg0);
12565 if (fndecl
12566 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12567 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12568 && call_expr_nargs (arg0) == 1
12569 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12571 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12572 return fold_build2 (code, type, iref,
12573 build_int_cst (TREE_TYPE (iref), 0));
12577 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12578 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12579 if (TREE_CODE (arg0) == RSHIFT_EXPR
12580 && integer_zerop (arg1)
12581 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12583 tree arg00 = TREE_OPERAND (arg0, 0);
12584 tree arg01 = TREE_OPERAND (arg0, 1);
12585 tree itype = TREE_TYPE (arg00);
12586 if (TREE_INT_CST_HIGH (arg01) == 0
12587 && TREE_INT_CST_LOW (arg01)
12588 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12590 if (TYPE_UNSIGNED (itype))
12592 itype = signed_type_for (itype);
12593 arg00 = fold_convert (itype, arg00);
12595 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12596 type, arg00, build_int_cst (itype, 0));
12600 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12601 if (integer_zerop (arg1)
12602 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12603 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12604 TREE_OPERAND (arg0, 1));
12606 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12607 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12608 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12609 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12610 build_int_cst (TREE_TYPE (arg1), 0));
12611 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12612 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12613 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12614 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12615 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12616 build_int_cst (TREE_TYPE (arg1), 0));
12618 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12619 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12620 && TREE_CODE (arg1) == INTEGER_CST
12621 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12622 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12623 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12624 TREE_OPERAND (arg0, 1), arg1));
12626 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12627 (X & C) == 0 when C is a single bit. */
12628 if (TREE_CODE (arg0) == BIT_AND_EXPR
12629 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12630 && integer_zerop (arg1)
12631 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12633 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12634 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12635 TREE_OPERAND (arg0, 1));
12636 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12637 type, tem, arg1);
12640 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12641 constant C is a power of two, i.e. a single bit. */
12642 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12643 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12644 && integer_zerop (arg1)
12645 && integer_pow2p (TREE_OPERAND (arg0, 1))
12646 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12647 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12649 tree arg00 = TREE_OPERAND (arg0, 0);
12650 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12651 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12654 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12655 when is C is a power of two, i.e. a single bit. */
12656 if (TREE_CODE (arg0) == BIT_AND_EXPR
12657 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12658 && integer_zerop (arg1)
12659 && integer_pow2p (TREE_OPERAND (arg0, 1))
12660 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12661 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12663 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12664 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12665 arg000, TREE_OPERAND (arg0, 1));
12666 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12667 tem, build_int_cst (TREE_TYPE (tem), 0));
12670 if (integer_zerop (arg1)
12671 && tree_expr_nonzero_p (arg0))
12673 tree res = constant_boolean_node (code==NE_EXPR, type);
12674 return omit_one_operand (type, res, arg0);
12677 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12678 if (TREE_CODE (arg0) == NEGATE_EXPR
12679 && TREE_CODE (arg1) == NEGATE_EXPR)
12680 return fold_build2 (code, type,
12681 TREE_OPERAND (arg0, 0),
12682 TREE_OPERAND (arg1, 0));
12684 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12685 if (TREE_CODE (arg0) == BIT_AND_EXPR
12686 && TREE_CODE (arg1) == BIT_AND_EXPR)
12688 tree arg00 = TREE_OPERAND (arg0, 0);
12689 tree arg01 = TREE_OPERAND (arg0, 1);
12690 tree arg10 = TREE_OPERAND (arg1, 0);
12691 tree arg11 = TREE_OPERAND (arg1, 1);
12692 tree itype = TREE_TYPE (arg0);
12694 if (operand_equal_p (arg01, arg11, 0))
12695 return fold_build2 (code, type,
12696 fold_build2 (BIT_AND_EXPR, itype,
12697 fold_build2 (BIT_XOR_EXPR, itype,
12698 arg00, arg10),
12699 arg01),
12700 build_int_cst (itype, 0));
12702 if (operand_equal_p (arg01, arg10, 0))
12703 return fold_build2 (code, type,
12704 fold_build2 (BIT_AND_EXPR, itype,
12705 fold_build2 (BIT_XOR_EXPR, itype,
12706 arg00, arg11),
12707 arg01),
12708 build_int_cst (itype, 0));
12710 if (operand_equal_p (arg00, arg11, 0))
12711 return fold_build2 (code, type,
12712 fold_build2 (BIT_AND_EXPR, itype,
12713 fold_build2 (BIT_XOR_EXPR, itype,
12714 arg01, arg10),
12715 arg00),
12716 build_int_cst (itype, 0));
12718 if (operand_equal_p (arg00, arg10, 0))
12719 return fold_build2 (code, type,
12720 fold_build2 (BIT_AND_EXPR, itype,
12721 fold_build2 (BIT_XOR_EXPR, itype,
12722 arg01, arg11),
12723 arg00),
12724 build_int_cst (itype, 0));
12727 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12728 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12730 tree arg00 = TREE_OPERAND (arg0, 0);
12731 tree arg01 = TREE_OPERAND (arg0, 1);
12732 tree arg10 = TREE_OPERAND (arg1, 0);
12733 tree arg11 = TREE_OPERAND (arg1, 1);
12734 tree itype = TREE_TYPE (arg0);
12736 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12737 operand_equal_p guarantees no side-effects so we don't need
12738 to use omit_one_operand on Z. */
12739 if (operand_equal_p (arg01, arg11, 0))
12740 return fold_build2 (code, type, arg00, arg10);
12741 if (operand_equal_p (arg01, arg10, 0))
12742 return fold_build2 (code, type, arg00, arg11);
12743 if (operand_equal_p (arg00, arg11, 0))
12744 return fold_build2 (code, type, arg01, arg10);
12745 if (operand_equal_p (arg00, arg10, 0))
12746 return fold_build2 (code, type, arg01, arg11);
12748 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12749 if (TREE_CODE (arg01) == INTEGER_CST
12750 && TREE_CODE (arg11) == INTEGER_CST)
12751 return fold_build2 (code, type,
12752 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12753 fold_build2 (BIT_XOR_EXPR, itype,
12754 arg01, arg11)),
12755 arg10);
12758 /* Attempt to simplify equality/inequality comparisons of complex
12759 values. Only lower the comparison if the result is known or
12760 can be simplified to a single scalar comparison. */
12761 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12762 || TREE_CODE (arg0) == COMPLEX_CST)
12763 && (TREE_CODE (arg1) == COMPLEX_EXPR
12764 || TREE_CODE (arg1) == COMPLEX_CST))
12766 tree real0, imag0, real1, imag1;
12767 tree rcond, icond;
12769 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12771 real0 = TREE_OPERAND (arg0, 0);
12772 imag0 = TREE_OPERAND (arg0, 1);
12774 else
12776 real0 = TREE_REALPART (arg0);
12777 imag0 = TREE_IMAGPART (arg0);
12780 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12782 real1 = TREE_OPERAND (arg1, 0);
12783 imag1 = TREE_OPERAND (arg1, 1);
12785 else
12787 real1 = TREE_REALPART (arg1);
12788 imag1 = TREE_IMAGPART (arg1);
12791 rcond = fold_binary (code, type, real0, real1);
12792 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12794 if (integer_zerop (rcond))
12796 if (code == EQ_EXPR)
12797 return omit_two_operands (type, boolean_false_node,
12798 imag0, imag1);
12799 return fold_build2 (NE_EXPR, type, imag0, imag1);
12801 else
12803 if (code == NE_EXPR)
12804 return omit_two_operands (type, boolean_true_node,
12805 imag0, imag1);
12806 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12810 icond = fold_binary (code, type, imag0, imag1);
12811 if (icond && TREE_CODE (icond) == INTEGER_CST)
12813 if (integer_zerop (icond))
12815 if (code == EQ_EXPR)
12816 return omit_two_operands (type, boolean_false_node,
12817 real0, real1);
12818 return fold_build2 (NE_EXPR, type, real0, real1);
12820 else
12822 if (code == NE_EXPR)
12823 return omit_two_operands (type, boolean_true_node,
12824 real0, real1);
12825 return fold_build2 (EQ_EXPR, type, real0, real1);
12830 return NULL_TREE;
12832 case LT_EXPR:
12833 case GT_EXPR:
12834 case LE_EXPR:
12835 case GE_EXPR:
12836 tem = fold_comparison (code, type, op0, op1);
12837 if (tem != NULL_TREE)
12838 return tem;
12840 /* Transform comparisons of the form X +- C CMP X. */
12841 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12842 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12843 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12844 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12845 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12846 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12848 tree arg01 = TREE_OPERAND (arg0, 1);
12849 enum tree_code code0 = TREE_CODE (arg0);
12850 int is_positive;
12852 if (TREE_CODE (arg01) == REAL_CST)
12853 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12854 else
12855 is_positive = tree_int_cst_sgn (arg01);
12857 /* (X - c) > X becomes false. */
12858 if (code == GT_EXPR
12859 && ((code0 == MINUS_EXPR && is_positive >= 0)
12860 || (code0 == PLUS_EXPR && is_positive <= 0)))
12862 if (TREE_CODE (arg01) == INTEGER_CST
12863 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12864 fold_overflow_warning (("assuming signed overflow does not "
12865 "occur when assuming that (X - c) > X "
12866 "is always false"),
12867 WARN_STRICT_OVERFLOW_ALL);
12868 return constant_boolean_node (0, type);
12871 /* Likewise (X + c) < X becomes false. */
12872 if (code == LT_EXPR
12873 && ((code0 == PLUS_EXPR && is_positive >= 0)
12874 || (code0 == MINUS_EXPR && is_positive <= 0)))
12876 if (TREE_CODE (arg01) == INTEGER_CST
12877 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12878 fold_overflow_warning (("assuming signed overflow does not "
12879 "occur when assuming that "
12880 "(X + c) < X is always false"),
12881 WARN_STRICT_OVERFLOW_ALL);
12882 return constant_boolean_node (0, type);
12885 /* Convert (X - c) <= X to true. */
12886 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12887 && code == LE_EXPR
12888 && ((code0 == MINUS_EXPR && is_positive >= 0)
12889 || (code0 == PLUS_EXPR && is_positive <= 0)))
12891 if (TREE_CODE (arg01) == INTEGER_CST
12892 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12893 fold_overflow_warning (("assuming signed overflow does not "
12894 "occur when assuming that "
12895 "(X - c) <= X is always true"),
12896 WARN_STRICT_OVERFLOW_ALL);
12897 return constant_boolean_node (1, type);
12900 /* Convert (X + c) >= X to true. */
12901 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12902 && code == GE_EXPR
12903 && ((code0 == PLUS_EXPR && is_positive >= 0)
12904 || (code0 == MINUS_EXPR && is_positive <= 0)))
12906 if (TREE_CODE (arg01) == INTEGER_CST
12907 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12908 fold_overflow_warning (("assuming signed overflow does not "
12909 "occur when assuming that "
12910 "(X + c) >= X is always true"),
12911 WARN_STRICT_OVERFLOW_ALL);
12912 return constant_boolean_node (1, type);
12915 if (TREE_CODE (arg01) == INTEGER_CST)
12917 /* Convert X + c > X and X - c < X to true for integers. */
12918 if (code == GT_EXPR
12919 && ((code0 == PLUS_EXPR && is_positive > 0)
12920 || (code0 == MINUS_EXPR && is_positive < 0)))
12922 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12923 fold_overflow_warning (("assuming signed overflow does "
12924 "not occur when assuming that "
12925 "(X + c) > X is always true"),
12926 WARN_STRICT_OVERFLOW_ALL);
12927 return constant_boolean_node (1, type);
12930 if (code == LT_EXPR
12931 && ((code0 == MINUS_EXPR && is_positive > 0)
12932 || (code0 == PLUS_EXPR && is_positive < 0)))
12934 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12935 fold_overflow_warning (("assuming signed overflow does "
12936 "not occur when assuming that "
12937 "(X - c) < X is always true"),
12938 WARN_STRICT_OVERFLOW_ALL);
12939 return constant_boolean_node (1, type);
12942 /* Convert X + c <= X and X - c >= X to false for integers. */
12943 if (code == LE_EXPR
12944 && ((code0 == PLUS_EXPR && is_positive > 0)
12945 || (code0 == MINUS_EXPR && is_positive < 0)))
12947 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12948 fold_overflow_warning (("assuming signed overflow does "
12949 "not occur when assuming that "
12950 "(X + c) <= X is always false"),
12951 WARN_STRICT_OVERFLOW_ALL);
12952 return constant_boolean_node (0, type);
12955 if (code == GE_EXPR
12956 && ((code0 == MINUS_EXPR && is_positive > 0)
12957 || (code0 == PLUS_EXPR && is_positive < 0)))
12959 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12960 fold_overflow_warning (("assuming signed overflow does "
12961 "not occur when assuming that "
12962 "(X - c) >= X is always false"),
12963 WARN_STRICT_OVERFLOW_ALL);
12964 return constant_boolean_node (0, type);
12969 /* Comparisons with the highest or lowest possible integer of
12970 the specified precision will have known values. */
12972 tree arg1_type = TREE_TYPE (arg1);
12973 unsigned int width = TYPE_PRECISION (arg1_type);
12975 if (TREE_CODE (arg1) == INTEGER_CST
12976 && width <= 2 * HOST_BITS_PER_WIDE_INT
12977 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12979 HOST_WIDE_INT signed_max_hi;
12980 unsigned HOST_WIDE_INT signed_max_lo;
12981 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12983 if (width <= HOST_BITS_PER_WIDE_INT)
12985 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12986 - 1;
12987 signed_max_hi = 0;
12988 max_hi = 0;
12990 if (TYPE_UNSIGNED (arg1_type))
12992 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12993 min_lo = 0;
12994 min_hi = 0;
12996 else
12998 max_lo = signed_max_lo;
12999 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13000 min_hi = -1;
13003 else
13005 width -= HOST_BITS_PER_WIDE_INT;
13006 signed_max_lo = -1;
13007 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13008 - 1;
13009 max_lo = -1;
13010 min_lo = 0;
13012 if (TYPE_UNSIGNED (arg1_type))
13014 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13015 min_hi = 0;
13017 else
13019 max_hi = signed_max_hi;
13020 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13024 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13025 && TREE_INT_CST_LOW (arg1) == max_lo)
13026 switch (code)
13028 case GT_EXPR:
13029 return omit_one_operand (type, integer_zero_node, arg0);
13031 case GE_EXPR:
13032 return fold_build2 (EQ_EXPR, type, op0, op1);
13034 case LE_EXPR:
13035 return omit_one_operand (type, integer_one_node, arg0);
13037 case LT_EXPR:
13038 return fold_build2 (NE_EXPR, type, op0, op1);
13040 /* The GE_EXPR and LT_EXPR cases above are not normally
13041 reached because of previous transformations. */
13043 default:
13044 break;
13046 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13047 == max_hi
13048 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13049 switch (code)
13051 case GT_EXPR:
13052 arg1 = const_binop (PLUS_EXPR, arg1,
13053 build_int_cst (TREE_TYPE (arg1), 1), 0);
13054 return fold_build2 (EQ_EXPR, type,
13055 fold_convert (TREE_TYPE (arg1), arg0),
13056 arg1);
13057 case LE_EXPR:
13058 arg1 = const_binop (PLUS_EXPR, arg1,
13059 build_int_cst (TREE_TYPE (arg1), 1), 0);
13060 return fold_build2 (NE_EXPR, type,
13061 fold_convert (TREE_TYPE (arg1), arg0),
13062 arg1);
13063 default:
13064 break;
13066 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13067 == min_hi
13068 && TREE_INT_CST_LOW (arg1) == min_lo)
13069 switch (code)
13071 case LT_EXPR:
13072 return omit_one_operand (type, integer_zero_node, arg0);
13074 case LE_EXPR:
13075 return fold_build2 (EQ_EXPR, type, op0, op1);
13077 case GE_EXPR:
13078 return omit_one_operand (type, integer_one_node, arg0);
13080 case GT_EXPR:
13081 return fold_build2 (NE_EXPR, type, op0, op1);
13083 default:
13084 break;
13086 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13087 == min_hi
13088 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13089 switch (code)
13091 case GE_EXPR:
13092 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13093 return fold_build2 (NE_EXPR, type,
13094 fold_convert (TREE_TYPE (arg1), arg0),
13095 arg1);
13096 case LT_EXPR:
13097 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13098 return fold_build2 (EQ_EXPR, type,
13099 fold_convert (TREE_TYPE (arg1), arg0),
13100 arg1);
13101 default:
13102 break;
13105 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13106 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13107 && TYPE_UNSIGNED (arg1_type)
13108 /* We will flip the signedness of the comparison operator
13109 associated with the mode of arg1, so the sign bit is
13110 specified by this mode. Check that arg1 is the signed
13111 max associated with this sign bit. */
13112 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13113 /* signed_type does not work on pointer types. */
13114 && INTEGRAL_TYPE_P (arg1_type))
13116 /* The following case also applies to X < signed_max+1
13117 and X >= signed_max+1 because previous transformations. */
13118 if (code == LE_EXPR || code == GT_EXPR)
13120 tree st;
13121 st = signed_type_for (TREE_TYPE (arg1));
13122 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
13123 type, fold_convert (st, arg0),
13124 build_int_cst (st, 0));
13130 /* If we are comparing an ABS_EXPR with a constant, we can
13131 convert all the cases into explicit comparisons, but they may
13132 well not be faster than doing the ABS and one comparison.
13133 But ABS (X) <= C is a range comparison, which becomes a subtraction
13134 and a comparison, and is probably faster. */
13135 if (code == LE_EXPR
13136 && TREE_CODE (arg1) == INTEGER_CST
13137 && TREE_CODE (arg0) == ABS_EXPR
13138 && ! TREE_SIDE_EFFECTS (arg0)
13139 && (0 != (tem = negate_expr (arg1)))
13140 && TREE_CODE (tem) == INTEGER_CST
13141 && !TREE_OVERFLOW (tem))
13142 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13143 build2 (GE_EXPR, type,
13144 TREE_OPERAND (arg0, 0), tem),
13145 build2 (LE_EXPR, type,
13146 TREE_OPERAND (arg0, 0), arg1));
13148 /* Convert ABS_EXPR<x> >= 0 to true. */
13149 strict_overflow_p = false;
13150 if (code == GE_EXPR
13151 && (integer_zerop (arg1)
13152 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13153 && real_zerop (arg1)))
13154 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13156 if (strict_overflow_p)
13157 fold_overflow_warning (("assuming signed overflow does not occur "
13158 "when simplifying comparison of "
13159 "absolute value and zero"),
13160 WARN_STRICT_OVERFLOW_CONDITIONAL);
13161 return omit_one_operand (type, integer_one_node, arg0);
13164 /* Convert ABS_EXPR<x> < 0 to false. */
13165 strict_overflow_p = false;
13166 if (code == LT_EXPR
13167 && (integer_zerop (arg1) || real_zerop (arg1))
13168 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13170 if (strict_overflow_p)
13171 fold_overflow_warning (("assuming signed overflow does not occur "
13172 "when simplifying comparison of "
13173 "absolute value and zero"),
13174 WARN_STRICT_OVERFLOW_CONDITIONAL);
13175 return omit_one_operand (type, integer_zero_node, arg0);
13178 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13179 and similarly for >= into !=. */
13180 if ((code == LT_EXPR || code == GE_EXPR)
13181 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13182 && TREE_CODE (arg1) == LSHIFT_EXPR
13183 && integer_onep (TREE_OPERAND (arg1, 0)))
13184 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13185 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13186 TREE_OPERAND (arg1, 1)),
13187 build_int_cst (TREE_TYPE (arg0), 0));
13189 if ((code == LT_EXPR || code == GE_EXPR)
13190 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13191 && CONVERT_EXPR_P (arg1)
13192 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13193 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13194 return
13195 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13196 fold_convert (TREE_TYPE (arg0),
13197 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13198 TREE_OPERAND (TREE_OPERAND (arg1, 0),
13199 1))),
13200 build_int_cst (TREE_TYPE (arg0), 0));
13202 return NULL_TREE;
13204 case UNORDERED_EXPR:
13205 case ORDERED_EXPR:
13206 case UNLT_EXPR:
13207 case UNLE_EXPR:
13208 case UNGT_EXPR:
13209 case UNGE_EXPR:
13210 case UNEQ_EXPR:
13211 case LTGT_EXPR:
13212 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13214 t1 = fold_relational_const (code, type, arg0, arg1);
13215 if (t1 != NULL_TREE)
13216 return t1;
13219 /* If the first operand is NaN, the result is constant. */
13220 if (TREE_CODE (arg0) == REAL_CST
13221 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13222 && (code != LTGT_EXPR || ! flag_trapping_math))
13224 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13225 ? integer_zero_node
13226 : integer_one_node;
13227 return omit_one_operand (type, t1, arg1);
13230 /* If the second operand is NaN, the result is constant. */
13231 if (TREE_CODE (arg1) == REAL_CST
13232 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13233 && (code != LTGT_EXPR || ! flag_trapping_math))
13235 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13236 ? integer_zero_node
13237 : integer_one_node;
13238 return omit_one_operand (type, t1, arg0);
13241 /* Simplify unordered comparison of something with itself. */
13242 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13243 && operand_equal_p (arg0, arg1, 0))
13244 return constant_boolean_node (1, type);
13246 if (code == LTGT_EXPR
13247 && !flag_trapping_math
13248 && operand_equal_p (arg0, arg1, 0))
13249 return constant_boolean_node (0, type);
13251 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13253 tree targ0 = strip_float_extensions (arg0);
13254 tree targ1 = strip_float_extensions (arg1);
13255 tree newtype = TREE_TYPE (targ0);
13257 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13258 newtype = TREE_TYPE (targ1);
13260 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13261 return fold_build2 (code, type, fold_convert (newtype, targ0),
13262 fold_convert (newtype, targ1));
13265 return NULL_TREE;
13267 case COMPOUND_EXPR:
13268 /* When pedantic, a compound expression can be neither an lvalue
13269 nor an integer constant expression. */
13270 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13271 return NULL_TREE;
13272 /* Don't let (0, 0) be null pointer constant. */
13273 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13274 : fold_convert (type, arg1);
13275 return pedantic_non_lvalue (tem);
13277 case COMPLEX_EXPR:
13278 if ((TREE_CODE (arg0) == REAL_CST
13279 && TREE_CODE (arg1) == REAL_CST)
13280 || (TREE_CODE (arg0) == INTEGER_CST
13281 && TREE_CODE (arg1) == INTEGER_CST))
13282 return build_complex (type, arg0, arg1);
13283 return NULL_TREE;
13285 case ASSERT_EXPR:
13286 /* An ASSERT_EXPR should never be passed to fold_binary. */
13287 gcc_unreachable ();
13289 default:
13290 return NULL_TREE;
13291 } /* switch (code) */
13294 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13295 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13296 of GOTO_EXPR. */
13298 static tree
13299 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13301 switch (TREE_CODE (*tp))
13303 case LABEL_EXPR:
13304 return *tp;
13306 case GOTO_EXPR:
13307 *walk_subtrees = 0;
13309 /* ... fall through ... */
13311 default:
13312 return NULL_TREE;
13316 /* Return whether the sub-tree ST contains a label which is accessible from
13317 outside the sub-tree. */
13319 static bool
13320 contains_label_p (tree st)
13322 return
13323 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13326 /* Fold a ternary expression of code CODE and type TYPE with operands
13327 OP0, OP1, and OP2. Return the folded expression if folding is
13328 successful. Otherwise, return NULL_TREE. */
13330 tree
13331 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
13333 tree tem;
13334 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13335 enum tree_code_class kind = TREE_CODE_CLASS (code);
13337 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13338 && TREE_CODE_LENGTH (code) == 3);
13340 /* Strip any conversions that don't change the mode. This is safe
13341 for every expression, except for a comparison expression because
13342 its signedness is derived from its operands. So, in the latter
13343 case, only strip conversions that don't change the signedness.
13345 Note that this is done as an internal manipulation within the
13346 constant folder, in order to find the simplest representation of
13347 the arguments so that their form can be studied. In any cases,
13348 the appropriate type conversions should be put back in the tree
13349 that will get out of the constant folder. */
13350 if (op0)
13352 arg0 = op0;
13353 STRIP_NOPS (arg0);
13356 if (op1)
13358 arg1 = op1;
13359 STRIP_NOPS (arg1);
13362 switch (code)
13364 case COMPONENT_REF:
13365 if (TREE_CODE (arg0) == CONSTRUCTOR
13366 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13368 unsigned HOST_WIDE_INT idx;
13369 tree field, value;
13370 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13371 if (field == arg1)
13372 return value;
13374 return NULL_TREE;
13376 case COND_EXPR:
13377 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13378 so all simple results must be passed through pedantic_non_lvalue. */
13379 if (TREE_CODE (arg0) == INTEGER_CST)
13381 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13382 tem = integer_zerop (arg0) ? op2 : op1;
13383 /* Only optimize constant conditions when the selected branch
13384 has the same type as the COND_EXPR. This avoids optimizing
13385 away "c ? x : throw", where the throw has a void type.
13386 Avoid throwing away that operand which contains label. */
13387 if ((!TREE_SIDE_EFFECTS (unused_op)
13388 || !contains_label_p (unused_op))
13389 && (! VOID_TYPE_P (TREE_TYPE (tem))
13390 || VOID_TYPE_P (type)))
13391 return pedantic_non_lvalue (tem);
13392 return NULL_TREE;
13394 if (operand_equal_p (arg1, op2, 0))
13395 return pedantic_omit_one_operand (type, arg1, arg0);
13397 /* If we have A op B ? A : C, we may be able to convert this to a
13398 simpler expression, depending on the operation and the values
13399 of B and C. Signed zeros prevent all of these transformations,
13400 for reasons given above each one.
13402 Also try swapping the arguments and inverting the conditional. */
13403 if (COMPARISON_CLASS_P (arg0)
13404 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13405 arg1, TREE_OPERAND (arg0, 1))
13406 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13408 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
13409 if (tem)
13410 return tem;
13413 if (COMPARISON_CLASS_P (arg0)
13414 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13415 op2,
13416 TREE_OPERAND (arg0, 1))
13417 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13419 tem = fold_truth_not_expr (arg0);
13420 if (tem && COMPARISON_CLASS_P (tem))
13422 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
13423 if (tem)
13424 return tem;
13428 /* If the second operand is simpler than the third, swap them
13429 since that produces better jump optimization results. */
13430 if (truth_value_p (TREE_CODE (arg0))
13431 && tree_swap_operands_p (op1, op2, false))
13433 /* See if this can be inverted. If it can't, possibly because
13434 it was a floating-point inequality comparison, don't do
13435 anything. */
13436 tem = fold_truth_not_expr (arg0);
13437 if (tem)
13438 return fold_build3 (code, type, tem, op2, op1);
13441 /* Convert A ? 1 : 0 to simply A. */
13442 if (integer_onep (op1)
13443 && integer_zerop (op2)
13444 /* If we try to convert OP0 to our type, the
13445 call to fold will try to move the conversion inside
13446 a COND, which will recurse. In that case, the COND_EXPR
13447 is probably the best choice, so leave it alone. */
13448 && type == TREE_TYPE (arg0))
13449 return pedantic_non_lvalue (arg0);
13451 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13452 over COND_EXPR in cases such as floating point comparisons. */
13453 if (integer_zerop (op1)
13454 && integer_onep (op2)
13455 && truth_value_p (TREE_CODE (arg0)))
13456 return pedantic_non_lvalue (fold_convert (type,
13457 invert_truthvalue (arg0)));
13459 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13460 if (TREE_CODE (arg0) == LT_EXPR
13461 && integer_zerop (TREE_OPERAND (arg0, 1))
13462 && integer_zerop (op2)
13463 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13465 /* sign_bit_p only checks ARG1 bits within A's precision.
13466 If <sign bit of A> has wider type than A, bits outside
13467 of A's precision in <sign bit of A> need to be checked.
13468 If they are all 0, this optimization needs to be done
13469 in unsigned A's type, if they are all 1 in signed A's type,
13470 otherwise this can't be done. */
13471 if (TYPE_PRECISION (TREE_TYPE (tem))
13472 < TYPE_PRECISION (TREE_TYPE (arg1))
13473 && TYPE_PRECISION (TREE_TYPE (tem))
13474 < TYPE_PRECISION (type))
13476 unsigned HOST_WIDE_INT mask_lo;
13477 HOST_WIDE_INT mask_hi;
13478 int inner_width, outer_width;
13479 tree tem_type;
13481 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13482 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13483 if (outer_width > TYPE_PRECISION (type))
13484 outer_width = TYPE_PRECISION (type);
13486 if (outer_width > HOST_BITS_PER_WIDE_INT)
13488 mask_hi = ((unsigned HOST_WIDE_INT) -1
13489 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13490 mask_lo = -1;
13492 else
13494 mask_hi = 0;
13495 mask_lo = ((unsigned HOST_WIDE_INT) -1
13496 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13498 if (inner_width > HOST_BITS_PER_WIDE_INT)
13500 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13501 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13502 mask_lo = 0;
13504 else
13505 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13506 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13508 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13509 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13511 tem_type = signed_type_for (TREE_TYPE (tem));
13512 tem = fold_convert (tem_type, tem);
13514 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13515 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13517 tem_type = unsigned_type_for (TREE_TYPE (tem));
13518 tem = fold_convert (tem_type, tem);
13520 else
13521 tem = NULL;
13524 if (tem)
13525 return fold_convert (type,
13526 fold_build2 (BIT_AND_EXPR,
13527 TREE_TYPE (tem), tem,
13528 fold_convert (TREE_TYPE (tem),
13529 arg1)));
13532 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13533 already handled above. */
13534 if (TREE_CODE (arg0) == BIT_AND_EXPR
13535 && integer_onep (TREE_OPERAND (arg0, 1))
13536 && integer_zerop (op2)
13537 && integer_pow2p (arg1))
13539 tree tem = TREE_OPERAND (arg0, 0);
13540 STRIP_NOPS (tem);
13541 if (TREE_CODE (tem) == RSHIFT_EXPR
13542 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13543 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13544 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13545 return fold_build2 (BIT_AND_EXPR, type,
13546 TREE_OPERAND (tem, 0), arg1);
13549 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13550 is probably obsolete because the first operand should be a
13551 truth value (that's why we have the two cases above), but let's
13552 leave it in until we can confirm this for all front-ends. */
13553 if (integer_zerop (op2)
13554 && TREE_CODE (arg0) == NE_EXPR
13555 && integer_zerop (TREE_OPERAND (arg0, 1))
13556 && integer_pow2p (arg1)
13557 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13558 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13559 arg1, OEP_ONLY_CONST))
13560 return pedantic_non_lvalue (fold_convert (type,
13561 TREE_OPERAND (arg0, 0)));
13563 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13564 if (integer_zerop (op2)
13565 && truth_value_p (TREE_CODE (arg0))
13566 && truth_value_p (TREE_CODE (arg1)))
13567 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13568 fold_convert (type, arg0),
13569 arg1);
13571 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13572 if (integer_onep (op2)
13573 && truth_value_p (TREE_CODE (arg0))
13574 && truth_value_p (TREE_CODE (arg1)))
13576 /* Only perform transformation if ARG0 is easily inverted. */
13577 tem = fold_truth_not_expr (arg0);
13578 if (tem)
13579 return fold_build2 (TRUTH_ORIF_EXPR, type,
13580 fold_convert (type, tem),
13581 arg1);
13584 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13585 if (integer_zerop (arg1)
13586 && truth_value_p (TREE_CODE (arg0))
13587 && truth_value_p (TREE_CODE (op2)))
13589 /* Only perform transformation if ARG0 is easily inverted. */
13590 tem = fold_truth_not_expr (arg0);
13591 if (tem)
13592 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13593 fold_convert (type, tem),
13594 op2);
13597 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13598 if (integer_onep (arg1)
13599 && truth_value_p (TREE_CODE (arg0))
13600 && truth_value_p (TREE_CODE (op2)))
13601 return fold_build2 (TRUTH_ORIF_EXPR, type,
13602 fold_convert (type, arg0),
13603 op2);
13605 return NULL_TREE;
13607 case CALL_EXPR:
13608 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13609 of fold_ternary on them. */
13610 gcc_unreachable ();
13612 case BIT_FIELD_REF:
13613 if ((TREE_CODE (arg0) == VECTOR_CST
13614 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13615 && type == TREE_TYPE (TREE_TYPE (arg0)))
13617 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13618 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13620 if (width != 0
13621 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13622 && (idx % width) == 0
13623 && (idx = idx / width)
13624 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13626 tree elements = NULL_TREE;
13628 if (TREE_CODE (arg0) == VECTOR_CST)
13629 elements = TREE_VECTOR_CST_ELTS (arg0);
13630 else
13632 unsigned HOST_WIDE_INT idx;
13633 tree value;
13635 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13636 elements = tree_cons (NULL_TREE, value, elements);
13638 while (idx-- > 0 && elements)
13639 elements = TREE_CHAIN (elements);
13640 if (elements)
13641 return TREE_VALUE (elements);
13642 else
13643 return fold_convert (type, integer_zero_node);
13647 /* A bit-field-ref that referenced the full argument can be stripped. */
13648 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13649 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13650 && integer_zerop (op2))
13651 return fold_convert (type, arg0);
13653 return NULL_TREE;
13655 default:
13656 return NULL_TREE;
13657 } /* switch (code) */
13660 /* Perform constant folding and related simplification of EXPR.
13661 The related simplifications include x*1 => x, x*0 => 0, etc.,
13662 and application of the associative law.
13663 NOP_EXPR conversions may be removed freely (as long as we
13664 are careful not to change the type of the overall expression).
13665 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13666 but we can constant-fold them if they have constant operands. */
13668 #ifdef ENABLE_FOLD_CHECKING
13669 # define fold(x) fold_1 (x)
13670 static tree fold_1 (tree);
13671 static
13672 #endif
13673 tree
13674 fold (tree expr)
13676 const tree t = expr;
13677 enum tree_code code = TREE_CODE (t);
13678 enum tree_code_class kind = TREE_CODE_CLASS (code);
13679 tree tem;
13681 /* Return right away if a constant. */
13682 if (kind == tcc_constant)
13683 return t;
13685 /* CALL_EXPR-like objects with variable numbers of operands are
13686 treated specially. */
13687 if (kind == tcc_vl_exp)
13689 if (code == CALL_EXPR)
13691 tem = fold_call_expr (expr, false);
13692 return tem ? tem : expr;
13694 return expr;
13697 if (IS_EXPR_CODE_CLASS (kind))
13699 tree type = TREE_TYPE (t);
13700 tree op0, op1, op2;
13702 switch (TREE_CODE_LENGTH (code))
13704 case 1:
13705 op0 = TREE_OPERAND (t, 0);
13706 tem = fold_unary (code, type, op0);
13707 return tem ? tem : expr;
13708 case 2:
13709 op0 = TREE_OPERAND (t, 0);
13710 op1 = TREE_OPERAND (t, 1);
13711 tem = fold_binary (code, type, op0, op1);
13712 return tem ? tem : expr;
13713 case 3:
13714 op0 = TREE_OPERAND (t, 0);
13715 op1 = TREE_OPERAND (t, 1);
13716 op2 = TREE_OPERAND (t, 2);
13717 tem = fold_ternary (code, type, op0, op1, op2);
13718 return tem ? tem : expr;
13719 default:
13720 break;
13724 switch (code)
13726 case ARRAY_REF:
13728 tree op0 = TREE_OPERAND (t, 0);
13729 tree op1 = TREE_OPERAND (t, 1);
13731 if (TREE_CODE (op1) == INTEGER_CST
13732 && TREE_CODE (op0) == CONSTRUCTOR
13733 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13735 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13736 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13737 unsigned HOST_WIDE_INT begin = 0;
13739 /* Find a matching index by means of a binary search. */
13740 while (begin != end)
13742 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13743 tree index = VEC_index (constructor_elt, elts, middle)->index;
13745 if (TREE_CODE (index) == INTEGER_CST
13746 && tree_int_cst_lt (index, op1))
13747 begin = middle + 1;
13748 else if (TREE_CODE (index) == INTEGER_CST
13749 && tree_int_cst_lt (op1, index))
13750 end = middle;
13751 else if (TREE_CODE (index) == RANGE_EXPR
13752 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13753 begin = middle + 1;
13754 else if (TREE_CODE (index) == RANGE_EXPR
13755 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13756 end = middle;
13757 else
13758 return VEC_index (constructor_elt, elts, middle)->value;
13762 return t;
13765 case CONST_DECL:
13766 return fold (DECL_INITIAL (t));
13768 default:
13769 return t;
13770 } /* switch (code) */
13773 #ifdef ENABLE_FOLD_CHECKING
13774 #undef fold
13776 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13777 static void fold_check_failed (const_tree, const_tree);
13778 void print_fold_checksum (const_tree);
13780 /* When --enable-checking=fold, compute a digest of expr before
13781 and after actual fold call to see if fold did not accidentally
13782 change original expr. */
13784 tree
13785 fold (tree expr)
13787 tree ret;
13788 struct md5_ctx ctx;
13789 unsigned char checksum_before[16], checksum_after[16];
13790 htab_t ht;
13792 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13793 md5_init_ctx (&ctx);
13794 fold_checksum_tree (expr, &ctx, ht);
13795 md5_finish_ctx (&ctx, checksum_before);
13796 htab_empty (ht);
13798 ret = fold_1 (expr);
13800 md5_init_ctx (&ctx);
13801 fold_checksum_tree (expr, &ctx, ht);
13802 md5_finish_ctx (&ctx, checksum_after);
13803 htab_delete (ht);
13805 if (memcmp (checksum_before, checksum_after, 16))
13806 fold_check_failed (expr, ret);
13808 return ret;
13811 void
13812 print_fold_checksum (const_tree expr)
13814 struct md5_ctx ctx;
13815 unsigned char checksum[16], cnt;
13816 htab_t ht;
13818 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13819 md5_init_ctx (&ctx);
13820 fold_checksum_tree (expr, &ctx, ht);
13821 md5_finish_ctx (&ctx, checksum);
13822 htab_delete (ht);
13823 for (cnt = 0; cnt < 16; ++cnt)
13824 fprintf (stderr, "%02x", checksum[cnt]);
13825 putc ('\n', stderr);
13828 static void
13829 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13831 internal_error ("fold check: original tree changed by fold");
13834 static void
13835 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13837 const void **slot;
13838 enum tree_code code;
13839 union tree_node buf;
13840 int i, len;
13842 recursive_label:
13844 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13845 <= sizeof (struct tree_function_decl))
13846 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13847 if (expr == NULL)
13848 return;
13849 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13850 if (*slot != NULL)
13851 return;
13852 *slot = expr;
13853 code = TREE_CODE (expr);
13854 if (TREE_CODE_CLASS (code) == tcc_declaration
13855 && DECL_ASSEMBLER_NAME_SET_P (expr))
13857 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13858 memcpy ((char *) &buf, expr, tree_size (expr));
13859 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13860 expr = (tree) &buf;
13862 else if (TREE_CODE_CLASS (code) == tcc_type
13863 && (TYPE_POINTER_TO (expr)
13864 || TYPE_REFERENCE_TO (expr)
13865 || TYPE_CACHED_VALUES_P (expr)
13866 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13867 || TYPE_NEXT_VARIANT (expr)))
13869 /* Allow these fields to be modified. */
13870 tree tmp;
13871 memcpy ((char *) &buf, expr, tree_size (expr));
13872 expr = tmp = (tree) &buf;
13873 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13874 TYPE_POINTER_TO (tmp) = NULL;
13875 TYPE_REFERENCE_TO (tmp) = NULL;
13876 TYPE_NEXT_VARIANT (tmp) = NULL;
13877 if (TYPE_CACHED_VALUES_P (tmp))
13879 TYPE_CACHED_VALUES_P (tmp) = 0;
13880 TYPE_CACHED_VALUES (tmp) = NULL;
13883 md5_process_bytes (expr, tree_size (expr), ctx);
13884 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13885 if (TREE_CODE_CLASS (code) != tcc_type
13886 && TREE_CODE_CLASS (code) != tcc_declaration
13887 && code != TREE_LIST
13888 && code != SSA_NAME)
13889 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13890 switch (TREE_CODE_CLASS (code))
13892 case tcc_constant:
13893 switch (code)
13895 case STRING_CST:
13896 md5_process_bytes (TREE_STRING_POINTER (expr),
13897 TREE_STRING_LENGTH (expr), ctx);
13898 break;
13899 case COMPLEX_CST:
13900 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13901 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13902 break;
13903 case VECTOR_CST:
13904 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13905 break;
13906 default:
13907 break;
13909 break;
13910 case tcc_exceptional:
13911 switch (code)
13913 case TREE_LIST:
13914 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13915 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13916 expr = TREE_CHAIN (expr);
13917 goto recursive_label;
13918 break;
13919 case TREE_VEC:
13920 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13921 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13922 break;
13923 default:
13924 break;
13926 break;
13927 case tcc_expression:
13928 case tcc_reference:
13929 case tcc_comparison:
13930 case tcc_unary:
13931 case tcc_binary:
13932 case tcc_statement:
13933 case tcc_vl_exp:
13934 len = TREE_OPERAND_LENGTH (expr);
13935 for (i = 0; i < len; ++i)
13936 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13937 break;
13938 case tcc_declaration:
13939 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13940 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13941 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13943 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13944 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13945 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13946 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13947 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13949 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13950 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13952 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13954 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13955 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13956 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13958 break;
13959 case tcc_type:
13960 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13961 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13962 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13963 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13964 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13965 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13966 if (INTEGRAL_TYPE_P (expr)
13967 || SCALAR_FLOAT_TYPE_P (expr))
13969 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13970 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13972 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13973 if (TREE_CODE (expr) == RECORD_TYPE
13974 || TREE_CODE (expr) == UNION_TYPE
13975 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13976 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13977 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13978 break;
13979 default:
13980 break;
13984 /* Helper function for outputting the checksum of a tree T. When
13985 debugging with gdb, you can "define mynext" to be "next" followed
13986 by "call debug_fold_checksum (op0)", then just trace down till the
13987 outputs differ. */
13989 void
13990 debug_fold_checksum (const_tree t)
13992 int i;
13993 unsigned char checksum[16];
13994 struct md5_ctx ctx;
13995 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13997 md5_init_ctx (&ctx);
13998 fold_checksum_tree (t, &ctx, ht);
13999 md5_finish_ctx (&ctx, checksum);
14000 htab_empty (ht);
14002 for (i = 0; i < 16; i++)
14003 fprintf (stderr, "%d ", checksum[i]);
14005 fprintf (stderr, "\n");
14008 #endif
14010 /* Fold a unary tree expression with code CODE of type TYPE with an
14011 operand OP0. Return a folded expression if successful. Otherwise,
14012 return a tree expression with code CODE of type TYPE with an
14013 operand OP0. */
14015 tree
14016 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14018 tree tem;
14019 #ifdef ENABLE_FOLD_CHECKING
14020 unsigned char checksum_before[16], checksum_after[16];
14021 struct md5_ctx ctx;
14022 htab_t ht;
14024 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14025 md5_init_ctx (&ctx);
14026 fold_checksum_tree (op0, &ctx, ht);
14027 md5_finish_ctx (&ctx, checksum_before);
14028 htab_empty (ht);
14029 #endif
14031 tem = fold_unary (code, type, op0);
14032 if (!tem)
14033 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
14035 #ifdef ENABLE_FOLD_CHECKING
14036 md5_init_ctx (&ctx);
14037 fold_checksum_tree (op0, &ctx, ht);
14038 md5_finish_ctx (&ctx, checksum_after);
14039 htab_delete (ht);
14041 if (memcmp (checksum_before, checksum_after, 16))
14042 fold_check_failed (op0, tem);
14043 #endif
14044 return tem;
14047 /* Fold a binary tree expression with code CODE of type TYPE with
14048 operands OP0 and OP1. Return a folded expression if successful.
14049 Otherwise, return a tree expression with code CODE of type TYPE
14050 with operands OP0 and OP1. */
14052 tree
14053 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
14054 MEM_STAT_DECL)
14056 tree tem;
14057 #ifdef ENABLE_FOLD_CHECKING
14058 unsigned char checksum_before_op0[16],
14059 checksum_before_op1[16],
14060 checksum_after_op0[16],
14061 checksum_after_op1[16];
14062 struct md5_ctx ctx;
14063 htab_t ht;
14065 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14066 md5_init_ctx (&ctx);
14067 fold_checksum_tree (op0, &ctx, ht);
14068 md5_finish_ctx (&ctx, checksum_before_op0);
14069 htab_empty (ht);
14071 md5_init_ctx (&ctx);
14072 fold_checksum_tree (op1, &ctx, ht);
14073 md5_finish_ctx (&ctx, checksum_before_op1);
14074 htab_empty (ht);
14075 #endif
14077 tem = fold_binary (code, type, op0, op1);
14078 if (!tem)
14079 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
14081 #ifdef ENABLE_FOLD_CHECKING
14082 md5_init_ctx (&ctx);
14083 fold_checksum_tree (op0, &ctx, ht);
14084 md5_finish_ctx (&ctx, checksum_after_op0);
14085 htab_empty (ht);
14087 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14088 fold_check_failed (op0, tem);
14090 md5_init_ctx (&ctx);
14091 fold_checksum_tree (op1, &ctx, ht);
14092 md5_finish_ctx (&ctx, checksum_after_op1);
14093 htab_delete (ht);
14095 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14096 fold_check_failed (op1, tem);
14097 #endif
14098 return tem;
14101 /* Fold a ternary tree expression with code CODE of type TYPE with
14102 operands OP0, OP1, and OP2. Return a folded expression if
14103 successful. Otherwise, return a tree expression with code CODE of
14104 type TYPE with operands OP0, OP1, and OP2. */
14106 tree
14107 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
14108 MEM_STAT_DECL)
14110 tree tem;
14111 #ifdef ENABLE_FOLD_CHECKING
14112 unsigned char checksum_before_op0[16],
14113 checksum_before_op1[16],
14114 checksum_before_op2[16],
14115 checksum_after_op0[16],
14116 checksum_after_op1[16],
14117 checksum_after_op2[16];
14118 struct md5_ctx ctx;
14119 htab_t ht;
14121 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14122 md5_init_ctx (&ctx);
14123 fold_checksum_tree (op0, &ctx, ht);
14124 md5_finish_ctx (&ctx, checksum_before_op0);
14125 htab_empty (ht);
14127 md5_init_ctx (&ctx);
14128 fold_checksum_tree (op1, &ctx, ht);
14129 md5_finish_ctx (&ctx, checksum_before_op1);
14130 htab_empty (ht);
14132 md5_init_ctx (&ctx);
14133 fold_checksum_tree (op2, &ctx, ht);
14134 md5_finish_ctx (&ctx, checksum_before_op2);
14135 htab_empty (ht);
14136 #endif
14138 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14139 tem = fold_ternary (code, type, op0, op1, op2);
14140 if (!tem)
14141 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14143 #ifdef ENABLE_FOLD_CHECKING
14144 md5_init_ctx (&ctx);
14145 fold_checksum_tree (op0, &ctx, ht);
14146 md5_finish_ctx (&ctx, checksum_after_op0);
14147 htab_empty (ht);
14149 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14150 fold_check_failed (op0, tem);
14152 md5_init_ctx (&ctx);
14153 fold_checksum_tree (op1, &ctx, ht);
14154 md5_finish_ctx (&ctx, checksum_after_op1);
14155 htab_empty (ht);
14157 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14158 fold_check_failed (op1, tem);
14160 md5_init_ctx (&ctx);
14161 fold_checksum_tree (op2, &ctx, ht);
14162 md5_finish_ctx (&ctx, checksum_after_op2);
14163 htab_delete (ht);
14165 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14166 fold_check_failed (op2, tem);
14167 #endif
14168 return tem;
14171 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14172 arguments in ARGARRAY, and a null static chain.
14173 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14174 of type TYPE from the given operands as constructed by build_call_array. */
14176 tree
14177 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
14179 tree tem;
14180 #ifdef ENABLE_FOLD_CHECKING
14181 unsigned char checksum_before_fn[16],
14182 checksum_before_arglist[16],
14183 checksum_after_fn[16],
14184 checksum_after_arglist[16];
14185 struct md5_ctx ctx;
14186 htab_t ht;
14187 int i;
14189 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14190 md5_init_ctx (&ctx);
14191 fold_checksum_tree (fn, &ctx, ht);
14192 md5_finish_ctx (&ctx, checksum_before_fn);
14193 htab_empty (ht);
14195 md5_init_ctx (&ctx);
14196 for (i = 0; i < nargs; i++)
14197 fold_checksum_tree (argarray[i], &ctx, ht);
14198 md5_finish_ctx (&ctx, checksum_before_arglist);
14199 htab_empty (ht);
14200 #endif
14202 tem = fold_builtin_call_array (type, fn, nargs, argarray);
14204 #ifdef ENABLE_FOLD_CHECKING
14205 md5_init_ctx (&ctx);
14206 fold_checksum_tree (fn, &ctx, ht);
14207 md5_finish_ctx (&ctx, checksum_after_fn);
14208 htab_empty (ht);
14210 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14211 fold_check_failed (fn, tem);
14213 md5_init_ctx (&ctx);
14214 for (i = 0; i < nargs; i++)
14215 fold_checksum_tree (argarray[i], &ctx, ht);
14216 md5_finish_ctx (&ctx, checksum_after_arglist);
14217 htab_delete (ht);
14219 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14220 fold_check_failed (NULL_TREE, tem);
14221 #endif
14222 return tem;
14225 /* Perform constant folding and related simplification of initializer
14226 expression EXPR. These behave identically to "fold_buildN" but ignore
14227 potential run-time traps and exceptions that fold must preserve. */
14229 #define START_FOLD_INIT \
14230 int saved_signaling_nans = flag_signaling_nans;\
14231 int saved_trapping_math = flag_trapping_math;\
14232 int saved_rounding_math = flag_rounding_math;\
14233 int saved_trapv = flag_trapv;\
14234 int saved_folding_initializer = folding_initializer;\
14235 flag_signaling_nans = 0;\
14236 flag_trapping_math = 0;\
14237 flag_rounding_math = 0;\
14238 flag_trapv = 0;\
14239 folding_initializer = 1;
14241 #define END_FOLD_INIT \
14242 flag_signaling_nans = saved_signaling_nans;\
14243 flag_trapping_math = saved_trapping_math;\
14244 flag_rounding_math = saved_rounding_math;\
14245 flag_trapv = saved_trapv;\
14246 folding_initializer = saved_folding_initializer;
14248 tree
14249 fold_build1_initializer (enum tree_code code, tree type, tree op)
14251 tree result;
14252 START_FOLD_INIT;
14254 result = fold_build1 (code, type, op);
14256 END_FOLD_INIT;
14257 return result;
14260 tree
14261 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
14263 tree result;
14264 START_FOLD_INIT;
14266 result = fold_build2 (code, type, op0, op1);
14268 END_FOLD_INIT;
14269 return result;
14272 tree
14273 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
14274 tree op2)
14276 tree result;
14277 START_FOLD_INIT;
14279 result = fold_build3 (code, type, op0, op1, op2);
14281 END_FOLD_INIT;
14282 return result;
14285 tree
14286 fold_build_call_array_initializer (tree type, tree fn,
14287 int nargs, tree *argarray)
14289 tree result;
14290 START_FOLD_INIT;
14292 result = fold_build_call_array (type, fn, nargs, argarray);
14294 END_FOLD_INIT;
14295 return result;
14298 #undef START_FOLD_INIT
14299 #undef END_FOLD_INIT
14301 /* Determine if first argument is a multiple of second argument. Return 0 if
14302 it is not, or we cannot easily determined it to be.
14304 An example of the sort of thing we care about (at this point; this routine
14305 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14306 fold cases do now) is discovering that
14308 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14310 is a multiple of
14312 SAVE_EXPR (J * 8)
14314 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14316 This code also handles discovering that
14318 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14320 is a multiple of 8 so we don't have to worry about dealing with a
14321 possible remainder.
14323 Note that we *look* inside a SAVE_EXPR only to determine how it was
14324 calculated; it is not safe for fold to do much of anything else with the
14325 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14326 at run time. For example, the latter example above *cannot* be implemented
14327 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14328 evaluation time of the original SAVE_EXPR is not necessarily the same at
14329 the time the new expression is evaluated. The only optimization of this
14330 sort that would be valid is changing
14332 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14334 divided by 8 to
14336 SAVE_EXPR (I) * SAVE_EXPR (J)
14338 (where the same SAVE_EXPR (J) is used in the original and the
14339 transformed version). */
14342 multiple_of_p (tree type, const_tree top, const_tree bottom)
14344 if (operand_equal_p (top, bottom, 0))
14345 return 1;
14347 if (TREE_CODE (type) != INTEGER_TYPE)
14348 return 0;
14350 switch (TREE_CODE (top))
14352 case BIT_AND_EXPR:
14353 /* Bitwise and provides a power of two multiple. If the mask is
14354 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14355 if (!integer_pow2p (bottom))
14356 return 0;
14357 /* FALLTHRU */
14359 case MULT_EXPR:
14360 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14361 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14363 case PLUS_EXPR:
14364 case MINUS_EXPR:
14365 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14366 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14368 case LSHIFT_EXPR:
14369 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14371 tree op1, t1;
14373 op1 = TREE_OPERAND (top, 1);
14374 /* const_binop may not detect overflow correctly,
14375 so check for it explicitly here. */
14376 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14377 > TREE_INT_CST_LOW (op1)
14378 && TREE_INT_CST_HIGH (op1) == 0
14379 && 0 != (t1 = fold_convert (type,
14380 const_binop (LSHIFT_EXPR,
14381 size_one_node,
14382 op1, 0)))
14383 && !TREE_OVERFLOW (t1))
14384 return multiple_of_p (type, t1, bottom);
14386 return 0;
14388 case NOP_EXPR:
14389 /* Can't handle conversions from non-integral or wider integral type. */
14390 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14391 || (TYPE_PRECISION (type)
14392 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14393 return 0;
14395 /* .. fall through ... */
14397 case SAVE_EXPR:
14398 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14400 case INTEGER_CST:
14401 if (TREE_CODE (bottom) != INTEGER_CST
14402 || integer_zerop (bottom)
14403 || (TYPE_UNSIGNED (type)
14404 && (tree_int_cst_sgn (top) < 0
14405 || tree_int_cst_sgn (bottom) < 0)))
14406 return 0;
14407 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14408 top, bottom, 0));
14410 default:
14411 return 0;
14415 /* Return true if CODE or TYPE is known to be non-negative. */
14417 static bool
14418 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14420 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14421 && truth_value_p (code))
14422 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14423 have a signed:1 type (where the value is -1 and 0). */
14424 return true;
14425 return false;
14428 /* Return true if (CODE OP0) is known to be non-negative. If the return
14429 value is based on the assumption that signed overflow is undefined,
14430 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14431 *STRICT_OVERFLOW_P. */
14433 bool
14434 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14435 bool *strict_overflow_p)
14437 if (TYPE_UNSIGNED (type))
14438 return true;
14440 switch (code)
14442 case ABS_EXPR:
14443 /* We can't return 1 if flag_wrapv is set because
14444 ABS_EXPR<INT_MIN> = INT_MIN. */
14445 if (!INTEGRAL_TYPE_P (type))
14446 return true;
14447 break;
14449 case NON_LVALUE_EXPR:
14450 case FLOAT_EXPR:
14451 case FIX_TRUNC_EXPR:
14452 return tree_expr_nonnegative_warnv_p (op0,
14453 strict_overflow_p);
14455 CASE_CONVERT:
14457 tree inner_type = TREE_TYPE (op0);
14458 tree outer_type = type;
14460 if (TREE_CODE (outer_type) == REAL_TYPE)
14462 if (TREE_CODE (inner_type) == REAL_TYPE)
14463 return tree_expr_nonnegative_warnv_p (op0,
14464 strict_overflow_p);
14465 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14467 if (TYPE_UNSIGNED (inner_type))
14468 return true;
14469 return tree_expr_nonnegative_warnv_p (op0,
14470 strict_overflow_p);
14473 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14475 if (TREE_CODE (inner_type) == REAL_TYPE)
14476 return tree_expr_nonnegative_warnv_p (op0,
14477 strict_overflow_p);
14478 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14479 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14480 && TYPE_UNSIGNED (inner_type);
14483 break;
14485 default:
14486 return tree_simple_nonnegative_warnv_p (code, type);
14489 /* We don't know sign of `t', so be conservative and return false. */
14490 return false;
14493 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14494 value is based on the assumption that signed overflow is undefined,
14495 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14496 *STRICT_OVERFLOW_P. */
14498 bool
14499 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14500 tree op1, bool *strict_overflow_p)
14502 if (TYPE_UNSIGNED (type))
14503 return true;
14505 switch (code)
14507 case POINTER_PLUSNV_EXPR:
14508 case POINTER_PLUS_EXPR:
14509 /* Pointers do not have a "sign". */
14510 return false;
14512 case PLUSNV_EXPR:
14513 if (INTEGRAL_TYPE_P (type))
14515 *strict_overflow_p = true;
14516 return (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14517 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p));
14520 /* Fallthru. */
14521 case PLUS_EXPR:
14522 if (FLOAT_TYPE_P (type))
14523 return (tree_expr_nonnegative_warnv_p (op0,
14524 strict_overflow_p)
14525 && tree_expr_nonnegative_warnv_p (op1,
14526 strict_overflow_p));
14528 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14529 both unsigned and at least 2 bits shorter than the result. */
14530 if (TREE_CODE (type) == INTEGER_TYPE
14531 && TREE_CODE (op0) == NOP_EXPR
14532 && TREE_CODE (op1) == NOP_EXPR)
14534 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14535 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14536 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14537 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14539 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14540 TYPE_PRECISION (inner2)) + 1;
14541 return prec < TYPE_PRECISION (type);
14544 break;
14546 case MULTNV_EXPR:
14547 if (INTEGRAL_TYPE_P (type))
14549 *strict_overflow_p = true;
14550 /* x * x without overflowing is always non-negative. */
14551 if (operand_equal_p (op0, op1, 0))
14552 return true;
14553 return (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14554 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p));
14557 /* Fallthru. */
14558 case MULT_EXPR:
14559 if (FLOAT_TYPE_P (type))
14561 /* x * x for floating point x is always non-negative. */
14562 if (operand_equal_p (op0, op1, 0))
14563 return true;
14564 return (tree_expr_nonnegative_warnv_p (op0,
14565 strict_overflow_p)
14566 && tree_expr_nonnegative_warnv_p (op1,
14567 strict_overflow_p));
14570 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14571 both unsigned and their total bits is shorter than the result. */
14572 if (TREE_CODE (type) == INTEGER_TYPE
14573 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14574 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14576 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14577 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14578 : TREE_TYPE (op0);
14579 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14580 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14581 : TREE_TYPE (op1);
14583 bool unsigned0 = TYPE_UNSIGNED (inner0);
14584 bool unsigned1 = TYPE_UNSIGNED (inner1);
14586 if (TREE_CODE (op0) == INTEGER_CST)
14587 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14589 if (TREE_CODE (op1) == INTEGER_CST)
14590 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14592 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14593 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14595 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14596 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14597 : TYPE_PRECISION (inner0);
14599 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14600 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14601 : TYPE_PRECISION (inner1);
14603 return precision0 + precision1 < TYPE_PRECISION (type);
14606 return false;
14608 case BIT_AND_EXPR:
14609 case MAX_EXPR:
14610 return (tree_expr_nonnegative_warnv_p (op0,
14611 strict_overflow_p)
14612 || tree_expr_nonnegative_warnv_p (op1,
14613 strict_overflow_p));
14615 case BIT_IOR_EXPR:
14616 case BIT_XOR_EXPR:
14617 case MIN_EXPR:
14618 case RDIV_EXPR:
14619 case TRUNC_DIV_EXPR:
14620 case CEIL_DIV_EXPR:
14621 case FLOOR_DIV_EXPR:
14622 case ROUND_DIV_EXPR:
14623 return (tree_expr_nonnegative_warnv_p (op0,
14624 strict_overflow_p)
14625 && tree_expr_nonnegative_warnv_p (op1,
14626 strict_overflow_p));
14628 case TRUNC_MOD_EXPR:
14629 case CEIL_MOD_EXPR:
14630 case FLOOR_MOD_EXPR:
14631 case ROUND_MOD_EXPR:
14632 return tree_expr_nonnegative_warnv_p (op0,
14633 strict_overflow_p);
14634 default:
14635 return tree_simple_nonnegative_warnv_p (code, type);
14638 /* We don't know sign of `t', so be conservative and return false. */
14639 return false;
14642 /* Return true if T is known to be non-negative. If the return
14643 value is based on the assumption that signed overflow is undefined,
14644 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14645 *STRICT_OVERFLOW_P. */
14647 bool
14648 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14650 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14651 return true;
14653 switch (TREE_CODE (t))
14655 case INTEGER_CST:
14656 return tree_int_cst_sgn (t) >= 0;
14658 case REAL_CST:
14659 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14661 case FIXED_CST:
14662 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14664 case COND_EXPR:
14665 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14666 strict_overflow_p)
14667 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14668 strict_overflow_p));
14669 default:
14670 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14671 TREE_TYPE (t));
14673 /* We don't know sign of `t', so be conservative and return false. */
14674 return false;
14677 /* Return true if T is known to be non-negative. If the return
14678 value is based on the assumption that signed overflow is undefined,
14679 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14680 *STRICT_OVERFLOW_P. */
14682 bool
14683 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14684 tree arg0, tree arg1, bool *strict_overflow_p)
14686 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14687 switch (DECL_FUNCTION_CODE (fndecl))
14689 CASE_FLT_FN (BUILT_IN_ACOS):
14690 CASE_FLT_FN (BUILT_IN_ACOSH):
14691 CASE_FLT_FN (BUILT_IN_CABS):
14692 CASE_FLT_FN (BUILT_IN_COSH):
14693 CASE_FLT_FN (BUILT_IN_ERFC):
14694 CASE_FLT_FN (BUILT_IN_EXP):
14695 CASE_FLT_FN (BUILT_IN_EXP10):
14696 CASE_FLT_FN (BUILT_IN_EXP2):
14697 CASE_FLT_FN (BUILT_IN_FABS):
14698 CASE_FLT_FN (BUILT_IN_FDIM):
14699 CASE_FLT_FN (BUILT_IN_HYPOT):
14700 CASE_FLT_FN (BUILT_IN_POW10):
14701 CASE_INT_FN (BUILT_IN_FFS):
14702 CASE_INT_FN (BUILT_IN_PARITY):
14703 CASE_INT_FN (BUILT_IN_POPCOUNT):
14704 case BUILT_IN_BSWAP32:
14705 case BUILT_IN_BSWAP64:
14706 /* Always true. */
14707 return true;
14709 CASE_FLT_FN (BUILT_IN_SQRT):
14710 /* sqrt(-0.0) is -0.0. */
14711 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14712 return true;
14713 return tree_expr_nonnegative_warnv_p (arg0,
14714 strict_overflow_p);
14716 CASE_FLT_FN (BUILT_IN_ASINH):
14717 CASE_FLT_FN (BUILT_IN_ATAN):
14718 CASE_FLT_FN (BUILT_IN_ATANH):
14719 CASE_FLT_FN (BUILT_IN_CBRT):
14720 CASE_FLT_FN (BUILT_IN_CEIL):
14721 CASE_FLT_FN (BUILT_IN_ERF):
14722 CASE_FLT_FN (BUILT_IN_EXPM1):
14723 CASE_FLT_FN (BUILT_IN_FLOOR):
14724 CASE_FLT_FN (BUILT_IN_FMOD):
14725 CASE_FLT_FN (BUILT_IN_FREXP):
14726 CASE_FLT_FN (BUILT_IN_LCEIL):
14727 CASE_FLT_FN (BUILT_IN_LDEXP):
14728 CASE_FLT_FN (BUILT_IN_LFLOOR):
14729 CASE_FLT_FN (BUILT_IN_LLCEIL):
14730 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14731 CASE_FLT_FN (BUILT_IN_LLRINT):
14732 CASE_FLT_FN (BUILT_IN_LLROUND):
14733 CASE_FLT_FN (BUILT_IN_LRINT):
14734 CASE_FLT_FN (BUILT_IN_LROUND):
14735 CASE_FLT_FN (BUILT_IN_MODF):
14736 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14737 CASE_FLT_FN (BUILT_IN_RINT):
14738 CASE_FLT_FN (BUILT_IN_ROUND):
14739 CASE_FLT_FN (BUILT_IN_SCALB):
14740 CASE_FLT_FN (BUILT_IN_SCALBLN):
14741 CASE_FLT_FN (BUILT_IN_SCALBN):
14742 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14743 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14744 CASE_FLT_FN (BUILT_IN_SINH):
14745 CASE_FLT_FN (BUILT_IN_TANH):
14746 CASE_FLT_FN (BUILT_IN_TRUNC):
14747 /* True if the 1st argument is nonnegative. */
14748 return tree_expr_nonnegative_warnv_p (arg0,
14749 strict_overflow_p);
14751 CASE_FLT_FN (BUILT_IN_FMAX):
14752 /* True if the 1st OR 2nd arguments are nonnegative. */
14753 return (tree_expr_nonnegative_warnv_p (arg0,
14754 strict_overflow_p)
14755 || (tree_expr_nonnegative_warnv_p (arg1,
14756 strict_overflow_p)));
14758 CASE_FLT_FN (BUILT_IN_FMIN):
14759 /* True if the 1st AND 2nd arguments are nonnegative. */
14760 return (tree_expr_nonnegative_warnv_p (arg0,
14761 strict_overflow_p)
14762 && (tree_expr_nonnegative_warnv_p (arg1,
14763 strict_overflow_p)));
14765 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14766 /* True if the 2nd argument is nonnegative. */
14767 return tree_expr_nonnegative_warnv_p (arg1,
14768 strict_overflow_p);
14770 CASE_FLT_FN (BUILT_IN_POWI):
14771 /* True if the 1st argument is nonnegative or the second
14772 argument is an even integer. */
14773 if (TREE_CODE (arg1) == INTEGER_CST
14774 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14775 return true;
14776 return tree_expr_nonnegative_warnv_p (arg0,
14777 strict_overflow_p);
14779 CASE_FLT_FN (BUILT_IN_POW):
14780 /* True if the 1st argument is nonnegative or the second
14781 argument is an even integer valued real. */
14782 if (TREE_CODE (arg1) == REAL_CST)
14784 REAL_VALUE_TYPE c;
14785 HOST_WIDE_INT n;
14787 c = TREE_REAL_CST (arg1);
14788 n = real_to_integer (&c);
14789 if ((n & 1) == 0)
14791 REAL_VALUE_TYPE cint;
14792 real_from_integer (&cint, VOIDmode, n,
14793 n < 0 ? -1 : 0, 0);
14794 if (real_identical (&c, &cint))
14795 return true;
14798 return tree_expr_nonnegative_warnv_p (arg0,
14799 strict_overflow_p);
14801 default:
14802 break;
14804 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14805 type);
14808 /* Return true if T is known to be non-negative. If the return
14809 value is based on the assumption that signed overflow is undefined,
14810 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14811 *STRICT_OVERFLOW_P. */
14813 bool
14814 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14816 enum tree_code code = TREE_CODE (t);
14817 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14818 return true;
14820 switch (code)
14822 case TARGET_EXPR:
14824 tree temp = TARGET_EXPR_SLOT (t);
14825 t = TARGET_EXPR_INITIAL (t);
14827 /* If the initializer is non-void, then it's a normal expression
14828 that will be assigned to the slot. */
14829 if (!VOID_TYPE_P (t))
14830 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14832 /* Otherwise, the initializer sets the slot in some way. One common
14833 way is an assignment statement at the end of the initializer. */
14834 while (1)
14836 if (TREE_CODE (t) == BIND_EXPR)
14837 t = expr_last (BIND_EXPR_BODY (t));
14838 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14839 || TREE_CODE (t) == TRY_CATCH_EXPR)
14840 t = expr_last (TREE_OPERAND (t, 0));
14841 else if (TREE_CODE (t) == STATEMENT_LIST)
14842 t = expr_last (t);
14843 else
14844 break;
14846 if (TREE_CODE (t) == MODIFY_EXPR
14847 && TREE_OPERAND (t, 0) == temp)
14848 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14849 strict_overflow_p);
14851 return false;
14854 case CALL_EXPR:
14856 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14857 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14859 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14860 get_callee_fndecl (t),
14861 arg0,
14862 arg1,
14863 strict_overflow_p);
14865 case COMPOUND_EXPR:
14866 case MODIFY_EXPR:
14867 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14868 strict_overflow_p);
14869 case BIND_EXPR:
14870 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14871 strict_overflow_p);
14872 case SAVE_EXPR:
14873 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14874 strict_overflow_p);
14876 default:
14877 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14878 TREE_TYPE (t));
14881 /* We don't know sign of `t', so be conservative and return false. */
14882 return false;
14885 /* Return true if T is known to be non-negative. If the return
14886 value is based on the assumption that signed overflow is undefined,
14887 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14888 *STRICT_OVERFLOW_P. */
14890 bool
14891 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14893 enum tree_code code;
14894 if (t == error_mark_node)
14895 return false;
14897 code = TREE_CODE (t);
14898 switch (TREE_CODE_CLASS (code))
14900 case tcc_binary:
14901 case tcc_comparison:
14902 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14903 TREE_TYPE (t),
14904 TREE_OPERAND (t, 0),
14905 TREE_OPERAND (t, 1),
14906 strict_overflow_p);
14908 case tcc_unary:
14909 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14910 TREE_TYPE (t),
14911 TREE_OPERAND (t, 0),
14912 strict_overflow_p);
14914 case tcc_constant:
14915 case tcc_declaration:
14916 case tcc_reference:
14917 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14919 default:
14920 break;
14923 switch (code)
14925 case TRUTH_AND_EXPR:
14926 case TRUTH_OR_EXPR:
14927 case TRUTH_XOR_EXPR:
14928 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14929 TREE_TYPE (t),
14930 TREE_OPERAND (t, 0),
14931 TREE_OPERAND (t, 1),
14932 strict_overflow_p);
14933 case TRUTH_NOT_EXPR:
14934 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14935 TREE_TYPE (t),
14936 TREE_OPERAND (t, 0),
14937 strict_overflow_p);
14939 case COND_EXPR:
14940 case CONSTRUCTOR:
14941 case OBJ_TYPE_REF:
14942 case ASSERT_EXPR:
14943 case ADDR_EXPR:
14944 case WITH_SIZE_EXPR:
14945 case EXC_PTR_EXPR:
14946 case SSA_NAME:
14947 case FILTER_EXPR:
14948 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14950 default:
14951 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14955 /* Return true if `t' is known to be non-negative. Handle warnings
14956 about undefined signed overflow. */
14958 bool
14959 tree_expr_nonnegative_p (tree t)
14961 bool ret, strict_overflow_p;
14963 strict_overflow_p = false;
14964 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14965 if (strict_overflow_p)
14966 fold_overflow_warning (("assuming signed overflow does not occur when "
14967 "determining that expression is always "
14968 "non-negative"),
14969 WARN_STRICT_OVERFLOW_MISC);
14970 return ret;
14974 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14975 For floating point we further ensure that T is not denormal.
14976 Similar logic is present in nonzero_address in rtlanal.h.
14978 If the return value is based on the assumption that signed overflow
14979 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14980 change *STRICT_OVERFLOW_P. */
14982 bool
14983 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14984 bool *strict_overflow_p)
14986 switch (code)
14988 case ABS_EXPR:
14989 return tree_expr_nonzero_warnv_p (op0,
14990 strict_overflow_p);
14992 case NOP_EXPR:
14994 tree inner_type = TREE_TYPE (op0);
14995 tree outer_type = type;
14997 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14998 && tree_expr_nonzero_warnv_p (op0,
14999 strict_overflow_p));
15001 break;
15003 case NON_LVALUE_EXPR:
15004 return tree_expr_nonzero_warnv_p (op0,
15005 strict_overflow_p);
15007 default:
15008 break;
15011 return false;
15014 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15015 For floating point we further ensure that T is not denormal.
15016 Similar logic is present in nonzero_address in rtlanal.h.
15018 If the return value is based on the assumption that signed overflow
15019 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15020 change *STRICT_OVERFLOW_P. */
15022 bool
15023 tree_binary_nonzero_warnv_p (enum tree_code code,
15024 tree type ATTRIBUTE_UNUSED,
15025 tree op0,
15026 tree op1, bool *strict_overflow_p)
15028 bool sub_strict_overflow_p;
15029 switch (code)
15031 case POINTER_PLUS_EXPR:
15032 case PLUS_EXPR:
15033 if (TYPE_OVERFLOW_UNDEFINED (type))
15035 /* With the presence of negative values it is hard
15036 to say something. */
15037 sub_strict_overflow_p = false;
15038 if (!tree_expr_nonnegative_warnv_p (op0,
15039 &sub_strict_overflow_p)
15040 || !tree_expr_nonnegative_warnv_p (op1,
15041 &sub_strict_overflow_p))
15042 return false;
15043 /* One of operands must be positive and the other non-negative. */
15044 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15045 overflows, on a twos-complement machine the sum of two
15046 nonnegative numbers can never be zero. */
15047 return (tree_expr_nonzero_warnv_p (op0,
15048 strict_overflow_p)
15049 || tree_expr_nonzero_warnv_p (op1,
15050 strict_overflow_p));
15052 break;
15054 case MULT_EXPR:
15055 if (TYPE_OVERFLOW_UNDEFINED (type))
15057 if (tree_expr_nonzero_warnv_p (op0,
15058 strict_overflow_p)
15059 && tree_expr_nonzero_warnv_p (op1,
15060 strict_overflow_p))
15062 *strict_overflow_p = true;
15063 return true;
15066 break;
15068 case MIN_EXPR:
15069 sub_strict_overflow_p = false;
15070 if (tree_expr_nonzero_warnv_p (op0,
15071 &sub_strict_overflow_p)
15072 && tree_expr_nonzero_warnv_p (op1,
15073 &sub_strict_overflow_p))
15075 if (sub_strict_overflow_p)
15076 *strict_overflow_p = true;
15078 break;
15080 case MAX_EXPR:
15081 sub_strict_overflow_p = false;
15082 if (tree_expr_nonzero_warnv_p (op0,
15083 &sub_strict_overflow_p))
15085 if (sub_strict_overflow_p)
15086 *strict_overflow_p = true;
15088 /* When both operands are nonzero, then MAX must be too. */
15089 if (tree_expr_nonzero_warnv_p (op1,
15090 strict_overflow_p))
15091 return true;
15093 /* MAX where operand 0 is positive is positive. */
15094 return tree_expr_nonnegative_warnv_p (op0,
15095 strict_overflow_p);
15097 /* MAX where operand 1 is positive is positive. */
15098 else if (tree_expr_nonzero_warnv_p (op1,
15099 &sub_strict_overflow_p)
15100 && tree_expr_nonnegative_warnv_p (op1,
15101 &sub_strict_overflow_p))
15103 if (sub_strict_overflow_p)
15104 *strict_overflow_p = true;
15105 return true;
15107 break;
15109 case BIT_IOR_EXPR:
15110 return (tree_expr_nonzero_warnv_p (op1,
15111 strict_overflow_p)
15112 || tree_expr_nonzero_warnv_p (op0,
15113 strict_overflow_p));
15115 default:
15116 break;
15119 return false;
15122 /* Return true when T is an address and is known to be nonzero.
15123 For floating point we further ensure that T is not denormal.
15124 Similar logic is present in nonzero_address in rtlanal.h.
15126 If the return value is based on the assumption that signed overflow
15127 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15128 change *STRICT_OVERFLOW_P. */
15130 bool
15131 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15133 bool sub_strict_overflow_p;
15134 switch (TREE_CODE (t))
15136 case INTEGER_CST:
15137 return !integer_zerop (t);
15139 case ADDR_EXPR:
15141 tree base = get_base_address (TREE_OPERAND (t, 0));
15143 if (!base)
15144 return false;
15146 /* Weak declarations may link to NULL. Other things may also be NULL
15147 so protect with -fdelete-null-pointer-checks; but not variables
15148 allocated on the stack. */
15149 if (DECL_P (base)
15150 && (flag_delete_null_pointer_checks
15151 || (TREE_CODE (base) == VAR_DECL && !TREE_STATIC (base))))
15152 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15154 /* Constants are never weak. */
15155 if (CONSTANT_CLASS_P (base))
15156 return true;
15158 return false;
15161 case COND_EXPR:
15162 sub_strict_overflow_p = false;
15163 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15164 &sub_strict_overflow_p)
15165 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15166 &sub_strict_overflow_p))
15168 if (sub_strict_overflow_p)
15169 *strict_overflow_p = true;
15170 return true;
15172 break;
15174 default:
15175 break;
15177 return false;
15180 /* Return true when T is an address and is known to be nonzero.
15181 For floating point we further ensure that T is not denormal.
15182 Similar logic is present in nonzero_address in rtlanal.h.
15184 If the return value is based on the assumption that signed overflow
15185 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15186 change *STRICT_OVERFLOW_P. */
15188 bool
15189 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15191 tree type = TREE_TYPE (t);
15192 enum tree_code code;
15194 /* Doing something useful for floating point would need more work. */
15195 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15196 return false;
15198 code = TREE_CODE (t);
15199 switch (TREE_CODE_CLASS (code))
15201 case tcc_unary:
15202 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15203 strict_overflow_p);
15204 case tcc_binary:
15205 case tcc_comparison:
15206 return tree_binary_nonzero_warnv_p (code, type,
15207 TREE_OPERAND (t, 0),
15208 TREE_OPERAND (t, 1),
15209 strict_overflow_p);
15210 case tcc_constant:
15211 case tcc_declaration:
15212 case tcc_reference:
15213 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15215 default:
15216 break;
15219 switch (code)
15221 case TRUTH_NOT_EXPR:
15222 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15223 strict_overflow_p);
15225 case TRUTH_AND_EXPR:
15226 case TRUTH_OR_EXPR:
15227 case TRUTH_XOR_EXPR:
15228 return tree_binary_nonzero_warnv_p (code, type,
15229 TREE_OPERAND (t, 0),
15230 TREE_OPERAND (t, 1),
15231 strict_overflow_p);
15233 case COND_EXPR:
15234 case CONSTRUCTOR:
15235 case OBJ_TYPE_REF:
15236 case ASSERT_EXPR:
15237 case ADDR_EXPR:
15238 case WITH_SIZE_EXPR:
15239 case EXC_PTR_EXPR:
15240 case SSA_NAME:
15241 case FILTER_EXPR:
15242 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15244 case COMPOUND_EXPR:
15245 case MODIFY_EXPR:
15246 case BIND_EXPR:
15247 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15248 strict_overflow_p);
15250 case SAVE_EXPR:
15251 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15252 strict_overflow_p);
15254 case CALL_EXPR:
15255 return alloca_call_p (t);
15257 default:
15258 break;
15260 return false;
15263 /* Return true when T is an address and is known to be nonzero.
15264 Handle warnings about undefined signed overflow. */
15266 bool
15267 tree_expr_nonzero_p (tree t)
15269 bool ret, strict_overflow_p;
15271 strict_overflow_p = false;
15272 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15273 if (strict_overflow_p)
15274 fold_overflow_warning (("assuming signed overflow does not occur when "
15275 "determining that expression is always "
15276 "non-zero"),
15277 WARN_STRICT_OVERFLOW_MISC);
15278 return ret;
15281 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15282 attempt to fold the expression to a constant without modifying TYPE,
15283 OP0 or OP1.
15285 If the expression could be simplified to a constant, then return
15286 the constant. If the expression would not be simplified to a
15287 constant, then return NULL_TREE. */
15289 tree
15290 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15292 tree tem = fold_binary (code, type, op0, op1);
15293 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15296 /* Given the components of a unary expression CODE, TYPE and OP0,
15297 attempt to fold the expression to a constant without modifying
15298 TYPE or OP0.
15300 If the expression could be simplified to a constant, then return
15301 the constant. If the expression would not be simplified to a
15302 constant, then return NULL_TREE. */
15304 tree
15305 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15307 tree tem = fold_unary (code, type, op0);
15308 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15311 /* If EXP represents referencing an element in a constant string
15312 (either via pointer arithmetic or array indexing), return the
15313 tree representing the value accessed, otherwise return NULL. */
15315 tree
15316 fold_read_from_constant_string (tree exp)
15318 if ((TREE_CODE (exp) == INDIRECT_REF
15319 || TREE_CODE (exp) == ARRAY_REF)
15320 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15322 tree exp1 = TREE_OPERAND (exp, 0);
15323 tree index;
15324 tree string;
15326 if (TREE_CODE (exp) == INDIRECT_REF)
15327 string = string_constant (exp1, &index);
15328 else
15330 tree low_bound = array_ref_low_bound (exp);
15331 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
15333 /* Optimize the special-case of a zero lower bound.
15335 We convert the low_bound to sizetype to avoid some problems
15336 with constant folding. (E.g. suppose the lower bound is 1,
15337 and its mode is QI. Without the conversion,l (ARRAY
15338 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15339 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15340 if (! integer_zerop (low_bound))
15341 index = size_diffop (index, fold_convert (sizetype, low_bound));
15343 string = exp1;
15346 if (string
15347 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15348 && TREE_CODE (string) == STRING_CST
15349 && TREE_CODE (index) == INTEGER_CST
15350 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15351 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15352 == MODE_INT)
15353 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15354 return build_int_cst_type (TREE_TYPE (exp),
15355 (TREE_STRING_POINTER (string)
15356 [TREE_INT_CST_LOW (index)]));
15358 return NULL;
15361 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15362 an integer constant, real, or fixed-point constant.
15364 TYPE is the type of the result. */
15366 static tree
15367 fold_negate_const (tree arg0, tree type)
15369 tree t = NULL_TREE;
15371 switch (TREE_CODE (arg0))
15373 case INTEGER_CST:
15375 unsigned HOST_WIDE_INT low;
15376 HOST_WIDE_INT high;
15377 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15378 TREE_INT_CST_HIGH (arg0),
15379 &low, &high);
15380 t = force_fit_type_double (type, low, high, 1,
15381 (overflow | TREE_OVERFLOW (arg0))
15382 && !TYPE_UNSIGNED (type));
15383 break;
15386 case REAL_CST:
15387 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15388 break;
15390 case FIXED_CST:
15392 FIXED_VALUE_TYPE f;
15393 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15394 &(TREE_FIXED_CST (arg0)), NULL,
15395 TYPE_SATURATING (type));
15396 t = build_fixed (type, f);
15397 /* Propagate overflow flags. */
15398 if (overflow_p | TREE_OVERFLOW (arg0))
15399 TREE_OVERFLOW (t) = 1;
15400 break;
15403 default:
15404 gcc_unreachable ();
15407 return t;
15410 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15411 an integer constant or real constant.
15413 TYPE is the type of the result. */
15415 tree
15416 fold_abs_const (tree arg0, tree type)
15418 tree t = NULL_TREE;
15420 switch (TREE_CODE (arg0))
15422 case INTEGER_CST:
15423 /* If the value is unsigned, then the absolute value is
15424 the same as the ordinary value. */
15425 if (TYPE_UNSIGNED (type))
15426 t = arg0;
15427 /* Similarly, if the value is non-negative. */
15428 else if (INT_CST_LT (integer_minus_one_node, arg0))
15429 t = arg0;
15430 /* If the value is negative, then the absolute value is
15431 its negation. */
15432 else
15434 unsigned HOST_WIDE_INT low;
15435 HOST_WIDE_INT high;
15436 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15437 TREE_INT_CST_HIGH (arg0),
15438 &low, &high);
15439 t = force_fit_type_double (type, low, high, -1,
15440 overflow | TREE_OVERFLOW (arg0));
15442 break;
15444 case REAL_CST:
15445 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15446 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15447 else
15448 t = arg0;
15449 break;
15451 default:
15452 gcc_unreachable ();
15455 return t;
15458 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15459 constant. TYPE is the type of the result. */
15461 static tree
15462 fold_not_const (tree arg0, tree type)
15464 tree t = NULL_TREE;
15466 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15468 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15469 ~TREE_INT_CST_HIGH (arg0), 0,
15470 TREE_OVERFLOW (arg0));
15472 return t;
15475 /* Given CODE, a relational operator, the target type, TYPE and two
15476 constant operands OP0 and OP1, return the result of the
15477 relational operation. If the result is not a compile time
15478 constant, then return NULL_TREE. */
15480 static tree
15481 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15483 int result, invert;
15485 /* From here on, the only cases we handle are when the result is
15486 known to be a constant. */
15488 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15490 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15491 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15493 /* Handle the cases where either operand is a NaN. */
15494 if (real_isnan (c0) || real_isnan (c1))
15496 switch (code)
15498 case EQ_EXPR:
15499 case ORDERED_EXPR:
15500 result = 0;
15501 break;
15503 case NE_EXPR:
15504 case UNORDERED_EXPR:
15505 case UNLT_EXPR:
15506 case UNLE_EXPR:
15507 case UNGT_EXPR:
15508 case UNGE_EXPR:
15509 case UNEQ_EXPR:
15510 result = 1;
15511 break;
15513 case LT_EXPR:
15514 case LE_EXPR:
15515 case GT_EXPR:
15516 case GE_EXPR:
15517 case LTGT_EXPR:
15518 if (flag_trapping_math)
15519 return NULL_TREE;
15520 result = 0;
15521 break;
15523 default:
15524 gcc_unreachable ();
15527 return constant_boolean_node (result, type);
15530 return constant_boolean_node (real_compare (code, c0, c1), type);
15533 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15535 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15536 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15537 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15540 /* Handle equality/inequality of complex constants. */
15541 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15543 tree rcond = fold_relational_const (code, type,
15544 TREE_REALPART (op0),
15545 TREE_REALPART (op1));
15546 tree icond = fold_relational_const (code, type,
15547 TREE_IMAGPART (op0),
15548 TREE_IMAGPART (op1));
15549 if (code == EQ_EXPR)
15550 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15551 else if (code == NE_EXPR)
15552 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15553 else
15554 return NULL_TREE;
15557 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15559 To compute GT, swap the arguments and do LT.
15560 To compute GE, do LT and invert the result.
15561 To compute LE, swap the arguments, do LT and invert the result.
15562 To compute NE, do EQ and invert the result.
15564 Therefore, the code below must handle only EQ and LT. */
15566 if (code == LE_EXPR || code == GT_EXPR)
15568 tree tem = op0;
15569 op0 = op1;
15570 op1 = tem;
15571 code = swap_tree_comparison (code);
15574 /* Note that it is safe to invert for real values here because we
15575 have already handled the one case that it matters. */
15577 invert = 0;
15578 if (code == NE_EXPR || code == GE_EXPR)
15580 invert = 1;
15581 code = invert_tree_comparison (code, false);
15584 /* Compute a result for LT or EQ if args permit;
15585 Otherwise return T. */
15586 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15588 if (code == EQ_EXPR)
15589 result = tree_int_cst_equal (op0, op1);
15590 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15591 result = INT_CST_LT_UNSIGNED (op0, op1);
15592 else
15593 result = INT_CST_LT (op0, op1);
15595 else
15596 return NULL_TREE;
15598 if (invert)
15599 result ^= 1;
15600 return constant_boolean_node (result, type);
15603 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15604 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15605 itself. */
15607 tree
15608 fold_build_cleanup_point_expr (tree type, tree expr)
15610 /* If the expression does not have side effects then we don't have to wrap
15611 it with a cleanup point expression. */
15612 if (!TREE_SIDE_EFFECTS (expr))
15613 return expr;
15615 /* If the expression is a return, check to see if the expression inside the
15616 return has no side effects or the right hand side of the modify expression
15617 inside the return. If either don't have side effects set we don't need to
15618 wrap the expression in a cleanup point expression. Note we don't check the
15619 left hand side of the modify because it should always be a return decl. */
15620 if (TREE_CODE (expr) == RETURN_EXPR)
15622 tree op = TREE_OPERAND (expr, 0);
15623 if (!op || !TREE_SIDE_EFFECTS (op))
15624 return expr;
15625 op = TREE_OPERAND (op, 1);
15626 if (!TREE_SIDE_EFFECTS (op))
15627 return expr;
15630 return build1 (CLEANUP_POINT_EXPR, type, expr);
15633 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15634 of an indirection through OP0, or NULL_TREE if no simplification is
15635 possible. */
15637 tree
15638 fold_indirect_ref_1 (tree type, tree op0)
15640 tree sub = op0;
15641 tree subtype;
15643 STRIP_NOPS (sub);
15644 subtype = TREE_TYPE (sub);
15645 if (!POINTER_TYPE_P (subtype))
15646 return NULL_TREE;
15648 if (TREE_CODE (sub) == ADDR_EXPR)
15650 tree op = TREE_OPERAND (sub, 0);
15651 tree optype = TREE_TYPE (op);
15652 /* *&CONST_DECL -> to the value of the const decl. */
15653 if (TREE_CODE (op) == CONST_DECL)
15654 return DECL_INITIAL (op);
15655 /* *&p => p; make sure to handle *&"str"[cst] here. */
15656 if (type == optype)
15658 tree fop = fold_read_from_constant_string (op);
15659 if (fop)
15660 return fop;
15661 else
15662 return op;
15664 /* *(foo *)&fooarray => fooarray[0] */
15665 else if (TREE_CODE (optype) == ARRAY_TYPE
15666 && type == TREE_TYPE (optype))
15668 tree type_domain = TYPE_DOMAIN (optype);
15669 tree min_val = size_zero_node;
15670 if (type_domain && TYPE_MIN_VALUE (type_domain))
15671 min_val = TYPE_MIN_VALUE (type_domain);
15672 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15674 /* *(foo *)&complexfoo => __real__ complexfoo */
15675 else if (TREE_CODE (optype) == COMPLEX_TYPE
15676 && type == TREE_TYPE (optype))
15677 return fold_build1 (REALPART_EXPR, type, op);
15678 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15679 else if (TREE_CODE (optype) == VECTOR_TYPE
15680 && type == TREE_TYPE (optype))
15682 tree part_width = TYPE_SIZE (type);
15683 tree index = bitsize_int (0);
15684 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15688 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15689 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15690 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15692 tree op00 = TREE_OPERAND (sub, 0);
15693 tree op01 = TREE_OPERAND (sub, 1);
15694 tree op00type;
15696 STRIP_NOPS (op00);
15697 op00type = TREE_TYPE (op00);
15698 if (TREE_CODE (op00) == ADDR_EXPR
15699 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15700 && type == TREE_TYPE (TREE_TYPE (op00type)))
15702 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15703 tree part_width = TYPE_SIZE (type);
15704 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15705 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15706 tree index = bitsize_int (indexi);
15708 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15709 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15710 part_width, index);
15716 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15717 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15718 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15720 tree op00 = TREE_OPERAND (sub, 0);
15721 tree op01 = TREE_OPERAND (sub, 1);
15722 tree op00type;
15724 STRIP_NOPS (op00);
15725 op00type = TREE_TYPE (op00);
15726 if (TREE_CODE (op00) == ADDR_EXPR
15727 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15728 && type == TREE_TYPE (TREE_TYPE (op00type)))
15730 tree size = TYPE_SIZE_UNIT (type);
15731 if (tree_int_cst_equal (size, op01))
15732 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15736 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15737 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15738 && type == TREE_TYPE (TREE_TYPE (subtype)))
15740 tree type_domain;
15741 tree min_val = size_zero_node;
15742 sub = build_fold_indirect_ref (sub);
15743 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15744 if (type_domain && TYPE_MIN_VALUE (type_domain))
15745 min_val = TYPE_MIN_VALUE (type_domain);
15746 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15749 return NULL_TREE;
15752 /* Builds an expression for an indirection through T, simplifying some
15753 cases. */
15755 tree
15756 build_fold_indirect_ref (tree t)
15758 tree type = TREE_TYPE (TREE_TYPE (t));
15759 tree sub = fold_indirect_ref_1 (type, t);
15761 if (sub)
15762 return sub;
15763 else
15764 return build1 (INDIRECT_REF, type, t);
15767 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15769 tree
15770 fold_indirect_ref (tree t)
15772 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15774 if (sub)
15775 return sub;
15776 else
15777 return t;
15780 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15781 whose result is ignored. The type of the returned tree need not be
15782 the same as the original expression. */
15784 tree
15785 fold_ignored_result (tree t)
15787 if (!TREE_SIDE_EFFECTS (t))
15788 return integer_zero_node;
15790 for (;;)
15791 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15793 case tcc_unary:
15794 t = TREE_OPERAND (t, 0);
15795 break;
15797 case tcc_binary:
15798 case tcc_comparison:
15799 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15800 t = TREE_OPERAND (t, 0);
15801 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15802 t = TREE_OPERAND (t, 1);
15803 else
15804 return t;
15805 break;
15807 case tcc_expression:
15808 switch (TREE_CODE (t))
15810 case COMPOUND_EXPR:
15811 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15812 return t;
15813 t = TREE_OPERAND (t, 0);
15814 break;
15816 case COND_EXPR:
15817 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15818 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15819 return t;
15820 t = TREE_OPERAND (t, 0);
15821 break;
15823 default:
15824 return t;
15826 break;
15828 default:
15829 return t;
15833 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15834 This can only be applied to objects of a sizetype. */
15836 tree
15837 round_up (tree value, int divisor)
15839 tree div = NULL_TREE;
15841 gcc_assert (divisor > 0);
15842 if (divisor == 1)
15843 return value;
15845 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15846 have to do anything. Only do this when we are not given a const,
15847 because in that case, this check is more expensive than just
15848 doing it. */
15849 if (TREE_CODE (value) != INTEGER_CST)
15851 div = build_int_cst (TREE_TYPE (value), divisor);
15853 if (multiple_of_p (TREE_TYPE (value), value, div))
15854 return value;
15857 /* If divisor is a power of two, simplify this to bit manipulation. */
15858 if (divisor == (divisor & -divisor))
15860 if (TREE_CODE (value) == INTEGER_CST)
15862 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15863 unsigned HOST_WIDE_INT high;
15864 bool overflow_p;
15866 if ((low & (divisor - 1)) == 0)
15867 return value;
15869 overflow_p = TREE_OVERFLOW (value);
15870 high = TREE_INT_CST_HIGH (value);
15871 low &= ~(divisor - 1);
15872 low += divisor;
15873 if (low == 0)
15875 high++;
15876 if (high == 0)
15877 overflow_p = true;
15880 return force_fit_type_double (TREE_TYPE (value), low, high,
15881 -1, overflow_p);
15883 else
15885 tree t;
15887 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15888 value = size_binop (PLUS_EXPR, value, t);
15889 t = build_int_cst (TREE_TYPE (value), -divisor);
15890 value = size_binop (BIT_AND_EXPR, value, t);
15893 else
15895 if (!div)
15896 div = build_int_cst (TREE_TYPE (value), divisor);
15897 value = size_binop (CEIL_DIV_EXPR, value, div);
15898 value = size_binop (MULT_EXPR, value, div);
15901 return value;
15904 /* Likewise, but round down. */
15906 tree
15907 round_down (tree value, int divisor)
15909 tree div = NULL_TREE;
15911 gcc_assert (divisor > 0);
15912 if (divisor == 1)
15913 return value;
15915 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15916 have to do anything. Only do this when we are not given a const,
15917 because in that case, this check is more expensive than just
15918 doing it. */
15919 if (TREE_CODE (value) != INTEGER_CST)
15921 div = build_int_cst (TREE_TYPE (value), divisor);
15923 if (multiple_of_p (TREE_TYPE (value), value, div))
15924 return value;
15927 /* If divisor is a power of two, simplify this to bit manipulation. */
15928 if (divisor == (divisor & -divisor))
15930 tree t;
15932 t = build_int_cst (TREE_TYPE (value), -divisor);
15933 value = size_binop (BIT_AND_EXPR, value, t);
15935 else
15937 if (!div)
15938 div = build_int_cst (TREE_TYPE (value), divisor);
15939 value = size_binop (FLOOR_DIV_EXPR, value, div);
15940 value = size_binop (MULT_EXPR, value, div);
15943 return value;
15946 /* Returns the pointer to the base of the object addressed by EXP and
15947 extracts the information about the offset of the access, storing it
15948 to PBITPOS and POFFSET. */
15950 static tree
15951 split_address_to_core_and_offset (tree exp,
15952 HOST_WIDE_INT *pbitpos, tree *poffset)
15954 tree core;
15955 enum machine_mode mode;
15956 int unsignedp, volatilep;
15957 HOST_WIDE_INT bitsize;
15959 if (TREE_CODE (exp) == ADDR_EXPR)
15961 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15962 poffset, &mode, &unsignedp, &volatilep,
15963 false);
15964 core = build_fold_addr_expr (core);
15966 else
15968 core = exp;
15969 *pbitpos = 0;
15970 *poffset = NULL_TREE;
15973 return core;
15976 /* Returns true if addresses of E1 and E2 differ by a constant, false
15977 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15979 bool
15980 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15982 tree core1, core2;
15983 HOST_WIDE_INT bitpos1, bitpos2;
15984 tree toffset1, toffset2, tdiff, type;
15986 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15987 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15989 if (bitpos1 % BITS_PER_UNIT != 0
15990 || bitpos2 % BITS_PER_UNIT != 0
15991 || !operand_equal_p (core1, core2, 0))
15992 return false;
15994 if (toffset1 && toffset2)
15996 type = TREE_TYPE (toffset1);
15997 if (type != TREE_TYPE (toffset2))
15998 toffset2 = fold_convert (type, toffset2);
16000 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16001 if (!cst_and_fits_in_hwi (tdiff))
16002 return false;
16004 *diff = int_cst_value (tdiff);
16006 else if (toffset1 || toffset2)
16008 /* If only one of the offsets is non-constant, the difference cannot
16009 be a constant. */
16010 return false;
16012 else
16013 *diff = 0;
16015 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16016 return true;
16019 /* Simplify the floating point expression EXP when the sign of the
16020 result is not significant. Return NULL_TREE if no simplification
16021 is possible. */
16023 tree
16024 fold_strip_sign_ops (tree exp)
16026 tree arg0, arg1;
16028 switch (TREE_CODE (exp))
16030 case ABS_EXPR:
16031 case NEGATE_EXPR:
16032 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16033 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16035 case MULT_EXPR:
16036 case RDIV_EXPR:
16037 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16038 return NULL_TREE;
16039 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16040 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16041 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16042 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
16043 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16044 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16045 break;
16047 case COMPOUND_EXPR:
16048 arg0 = TREE_OPERAND (exp, 0);
16049 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16050 if (arg1)
16051 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16052 break;
16054 case COND_EXPR:
16055 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16056 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16057 if (arg0 || arg1)
16058 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16059 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16060 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16061 break;
16063 case CALL_EXPR:
16065 const enum built_in_function fcode = builtin_mathfn_code (exp);
16066 switch (fcode)
16068 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16069 /* Strip copysign function call, return the 1st argument. */
16070 arg0 = CALL_EXPR_ARG (exp, 0);
16071 arg1 = CALL_EXPR_ARG (exp, 1);
16072 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
16074 default:
16075 /* Strip sign ops from the argument of "odd" math functions. */
16076 if (negate_mathfn_p (fcode))
16078 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16079 if (arg0)
16080 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
16082 break;
16085 break;
16087 default:
16088 break;
16090 return NULL_TREE;