Fix DealII type problems.
[official-gcc/Ramakrishna.git] / gcc / fold-const.c
blob40a580e77a65dfbae8f3277b794e339047b6360c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "toplev.h"
63 #include "intl.h"
64 #include "ggc.h"
65 #include "hashtab.h"
66 #include "langhooks.h"
67 #include "md5.h"
68 #include "gimple.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
71 otherwise. */
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
78 COMPCODE_FALSE = 0,
79 COMPCODE_LT = 1,
80 COMPCODE_EQ = 2,
81 COMPCODE_LE = 3,
82 COMPCODE_GT = 4,
83 COMPCODE_LTGT = 5,
84 COMPCODE_GE = 6,
85 COMPCODE_ORD = 7,
86 COMPCODE_UNORD = 8,
87 COMPCODE_UNLT = 9,
88 COMPCODE_UNEQ = 10,
89 COMPCODE_UNLE = 11,
90 COMPCODE_UNGT = 12,
91 COMPCODE_NE = 13,
92 COMPCODE_UNGE = 14,
93 COMPCODE_TRUE = 15
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
110 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (location_t, tree, tree,
112 HOST_WIDE_INT, HOST_WIDE_INT, int);
113 static tree optimize_bit_field_compare (location_t, enum tree_code,
114 tree, tree, tree);
115 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
116 HOST_WIDE_INT *,
117 enum machine_mode *, int *, int *,
118 tree *, tree *);
119 static int all_ones_mask_p (const_tree, int);
120 static tree sign_bit_p (tree, const_tree);
121 static int simple_operand_p (const_tree);
122 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
123 static tree range_predecessor (tree);
124 static tree range_successor (tree);
125 extern tree make_range (tree, int *, tree *, tree *, bool *);
126 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
127 tree, tree);
128 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
129 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
130 static tree unextend (tree, int, int, tree);
131 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
132 static tree optimize_minmax_comparison (location_t, enum tree_code,
133 tree, tree, tree);
134 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
135 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
136 static tree fold_binary_op_with_conditional_arg (location_t,
137 enum tree_code, tree,
138 tree, tree,
139 tree, tree, int);
140 static tree fold_mathfn_compare (location_t,
141 enum built_in_function, enum tree_code,
142 tree, tree, tree);
143 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
144 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
145 static bool reorder_operands_p (const_tree, const_tree);
146 static tree fold_negate_const (tree, tree);
147 static tree fold_not_const (tree, tree);
148 static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 static tree fold_convert_const (enum tree_code, tree, tree);
152 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
153 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
154 and SUM1. Then this yields nonzero if overflow occurred during the
155 addition.
157 Overflow occurs if A and B have the same sign, but A and SUM differ in
158 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
159 sign. */
160 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
162 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
163 We do that by representing the two-word integer in 4 words, with only
164 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
165 number. The value of the word is LOWPART + HIGHPART * BASE. */
167 #define LOWPART(x) \
168 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
169 #define HIGHPART(x) \
170 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
171 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
173 /* Unpack a two-word integer into 4 words.
174 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
175 WORDS points to the array of HOST_WIDE_INTs. */
177 static void
178 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
180 words[0] = LOWPART (low);
181 words[1] = HIGHPART (low);
182 words[2] = LOWPART (hi);
183 words[3] = HIGHPART (hi);
186 /* Pack an array of 4 words into a two-word integer.
187 WORDS points to the array of words.
188 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
190 static void
191 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
192 HOST_WIDE_INT *hi)
194 *low = words[0] + words[1] * BASE;
195 *hi = words[2] + words[3] * BASE;
198 /* Force the double-word integer L1, H1 to be within the range of the
199 integer type TYPE. Stores the properly truncated and sign-extended
200 double-word integer in *LV, *HV. Returns true if the operation
201 overflows, that is, argument and result are different. */
204 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
205 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
207 unsigned HOST_WIDE_INT low0 = l1;
208 HOST_WIDE_INT high0 = h1;
209 unsigned int prec = int_or_pointer_precision (type);
210 int sign_extended_type;
212 /* Size types *are* sign extended. */
213 sign_extended_type = (!TYPE_UNSIGNED (type)
214 || (TREE_CODE (type) == INTEGER_TYPE
215 && TYPE_IS_SIZETYPE (type)));
217 /* First clear all bits that are beyond the type's precision. */
218 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
220 else if (prec > HOST_BITS_PER_WIDE_INT)
221 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
222 else
224 h1 = 0;
225 if (prec < HOST_BITS_PER_WIDE_INT)
226 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
229 /* Then do sign extension if necessary. */
230 if (!sign_extended_type)
231 /* No sign extension */;
232 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
233 /* Correct width already. */;
234 else if (prec > HOST_BITS_PER_WIDE_INT)
236 /* Sign extend top half? */
237 if (h1 & ((unsigned HOST_WIDE_INT)1
238 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
239 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
241 else if (prec == HOST_BITS_PER_WIDE_INT)
243 if ((HOST_WIDE_INT)l1 < 0)
244 h1 = -1;
246 else
248 /* Sign extend bottom half? */
249 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
251 h1 = -1;
252 l1 |= (HOST_WIDE_INT)(-1) << prec;
256 *lv = l1;
257 *hv = h1;
259 /* If the value didn't fit, signal overflow. */
260 return l1 != low0 || h1 != high0;
263 /* We force the double-int HIGH:LOW to the range of the type TYPE by
264 sign or zero extending it.
265 OVERFLOWABLE indicates if we are interested
266 in overflow of the value, when >0 we are only interested in signed
267 overflow, for <0 we are interested in any overflow. OVERFLOWED
268 indicates whether overflow has already occurred. CONST_OVERFLOWED
269 indicates whether constant overflow has already occurred. We force
270 T's value to be within range of T's type (by setting to 0 or 1 all
271 the bits outside the type's range). We set TREE_OVERFLOWED if,
272 OVERFLOWED is nonzero,
273 or OVERFLOWABLE is >0 and signed overflow occurs
274 or OVERFLOWABLE is <0 and any overflow occurs
275 We return a new tree node for the extended double-int. The node
276 is shared if no overflow flags are set. */
278 tree
279 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
280 HOST_WIDE_INT high, int overflowable,
281 bool overflowed)
283 int sign_extended_type;
284 bool overflow;
286 /* Size types *are* sign extended. */
287 sign_extended_type = (!TYPE_UNSIGNED (type)
288 || (TREE_CODE (type) == INTEGER_TYPE
289 && TYPE_IS_SIZETYPE (type)));
291 overflow = fit_double_type (low, high, &low, &high, type);
293 /* If we need to set overflow flags, return a new unshared node. */
294 if (overflowed || overflow)
296 if (overflowed
297 || overflowable < 0
298 || (overflowable > 0 && sign_extended_type))
300 tree t = make_node (INTEGER_CST);
301 TREE_INT_CST_LOW (t) = low;
302 TREE_INT_CST_HIGH (t) = high;
303 TREE_TYPE (t) = type;
304 TREE_OVERFLOW (t) = 1;
305 return t;
309 /* Else build a shared node. */
310 return build_int_cst_wide (type, low, high);
313 /* Add two doubleword integers with doubleword result.
314 Return nonzero if the operation overflows according to UNSIGNED_P.
315 Each argument is given as two `HOST_WIDE_INT' pieces.
316 One argument is L1 and H1; the other, L2 and H2.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
322 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
323 bool unsigned_p)
325 unsigned HOST_WIDE_INT l;
326 HOST_WIDE_INT h;
328 l = l1 + l2;
329 h = h1 + h2 + (l < l1);
331 *lv = l;
332 *hv = h;
334 if (unsigned_p)
335 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
336 else
337 return OVERFLOW_SUM_SIGN (h1, h2, h);
340 /* Negate a doubleword integer with doubleword result.
341 Return nonzero if the operation overflows, assuming it's signed.
342 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
343 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
346 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
347 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
349 if (l1 == 0)
351 *lv = 0;
352 *hv = - h1;
353 return (*hv & h1) < 0;
355 else
357 *lv = -l1;
358 *hv = ~h1;
359 return 0;
363 /* Multiply two doubleword integers with doubleword result.
364 Return nonzero if the operation overflows according to UNSIGNED_P.
365 Each argument is given as two `HOST_WIDE_INT' pieces.
366 One argument is L1 and H1; the other, L2 and H2.
367 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
370 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
371 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
372 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
373 bool unsigned_p)
375 HOST_WIDE_INT arg1[4];
376 HOST_WIDE_INT arg2[4];
377 HOST_WIDE_INT prod[4 * 2];
378 unsigned HOST_WIDE_INT carry;
379 int i, j, k;
380 unsigned HOST_WIDE_INT toplow, neglow;
381 HOST_WIDE_INT tophigh, neghigh;
383 encode (arg1, l1, h1);
384 encode (arg2, l2, h2);
386 memset (prod, 0, sizeof prod);
388 for (i = 0; i < 4; i++)
390 carry = 0;
391 for (j = 0; j < 4; j++)
393 k = i + j;
394 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
395 carry += arg1[i] * arg2[j];
396 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
397 carry += prod[k];
398 prod[k] = LOWPART (carry);
399 carry = HIGHPART (carry);
401 prod[i + 4] = carry;
404 decode (prod, lv, hv);
405 decode (prod + 4, &toplow, &tophigh);
407 /* Unsigned overflow is immediate. */
408 if (unsigned_p)
409 return (toplow | tophigh) != 0;
411 /* Check for signed overflow by calculating the signed representation of the
412 top half of the result; it should agree with the low half's sign bit. */
413 if (h1 < 0)
415 neg_double (l2, h2, &neglow, &neghigh);
416 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
418 if (h2 < 0)
420 neg_double (l1, h1, &neglow, &neghigh);
421 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
423 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
426 /* Shift the doubleword integer in L1, H1 left by COUNT places
427 keeping only PREC bits of result.
428 Shift right if COUNT is negative.
429 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
430 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
432 void
433 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
434 HOST_WIDE_INT count, unsigned int prec,
435 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
437 unsigned HOST_WIDE_INT signmask;
439 if (count < 0)
441 rshift_double (l1, h1, -count, prec, lv, hv, arith);
442 return;
445 if (SHIFT_COUNT_TRUNCATED)
446 count %= prec;
448 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
450 /* Shifting by the host word size is undefined according to the
451 ANSI standard, so we must handle this as a special case. */
452 *hv = 0;
453 *lv = 0;
455 else if (count >= HOST_BITS_PER_WIDE_INT)
457 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
458 *lv = 0;
460 else
462 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
463 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
464 *lv = l1 << count;
467 /* Sign extend all bits that are beyond the precision. */
469 signmask = -((prec > HOST_BITS_PER_WIDE_INT
470 ? ((unsigned HOST_WIDE_INT) *hv
471 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
472 : (*lv >> (prec - 1))) & 1);
474 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
476 else if (prec >= HOST_BITS_PER_WIDE_INT)
478 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
479 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
481 else
483 *hv = signmask;
484 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
485 *lv |= signmask << prec;
489 /* Shift the doubleword integer in L1, H1 right by COUNT places
490 keeping only PREC bits of result. COUNT must be positive.
491 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
492 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
494 void
495 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
496 HOST_WIDE_INT count, unsigned int prec,
497 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
498 int arith)
500 unsigned HOST_WIDE_INT signmask;
502 signmask = (arith
503 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
504 : 0);
506 if (SHIFT_COUNT_TRUNCATED)
507 count %= prec;
509 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
511 /* Shifting by the host word size is undefined according to the
512 ANSI standard, so we must handle this as a special case. */
513 *hv = 0;
514 *lv = 0;
516 else if (count >= HOST_BITS_PER_WIDE_INT)
518 *hv = 0;
519 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
521 else
523 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
524 *lv = ((l1 >> count)
525 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
528 /* Zero / sign extend all bits that are beyond the precision. */
530 if (count >= (HOST_WIDE_INT)prec)
532 *hv = signmask;
533 *lv = signmask;
535 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
537 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
539 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
540 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
542 else
544 *hv = signmask;
545 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
546 *lv |= signmask << (prec - count);
550 /* Rotate the doubleword integer in L1, H1 left by COUNT places
551 keeping only PREC bits of result.
552 Rotate right if COUNT is negative.
553 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
555 void
556 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
557 HOST_WIDE_INT count, unsigned int prec,
558 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
560 unsigned HOST_WIDE_INT s1l, s2l;
561 HOST_WIDE_INT s1h, s2h;
563 count %= prec;
564 if (count < 0)
565 count += prec;
567 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
568 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
569 *lv = s1l | s2l;
570 *hv = s1h | s2h;
573 /* Rotate the doubleword integer in L1, H1 left by COUNT places
574 keeping only PREC bits of result. COUNT must be positive.
575 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
577 void
578 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
579 HOST_WIDE_INT count, unsigned int prec,
580 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
582 unsigned HOST_WIDE_INT s1l, s2l;
583 HOST_WIDE_INT s1h, s2h;
585 count %= prec;
586 if (count < 0)
587 count += prec;
589 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
590 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
591 *lv = s1l | s2l;
592 *hv = s1h | s2h;
595 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
596 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
597 CODE is a tree code for a kind of division, one of
598 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
599 or EXACT_DIV_EXPR
600 It controls how the quotient is rounded to an integer.
601 Return nonzero if the operation overflows.
602 UNS nonzero says do unsigned division. */
605 div_and_round_double (enum tree_code code, int uns,
606 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
607 HOST_WIDE_INT hnum_orig,
608 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
609 HOST_WIDE_INT hden_orig,
610 unsigned HOST_WIDE_INT *lquo,
611 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
612 HOST_WIDE_INT *hrem)
614 int quo_neg = 0;
615 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
616 HOST_WIDE_INT den[4], quo[4];
617 int i, j;
618 unsigned HOST_WIDE_INT work;
619 unsigned HOST_WIDE_INT carry = 0;
620 unsigned HOST_WIDE_INT lnum = lnum_orig;
621 HOST_WIDE_INT hnum = hnum_orig;
622 unsigned HOST_WIDE_INT lden = lden_orig;
623 HOST_WIDE_INT hden = hden_orig;
624 int overflow = 0;
626 if (hden == 0 && lden == 0)
627 overflow = 1, lden = 1;
629 /* Calculate quotient sign and convert operands to unsigned. */
630 if (!uns)
632 if (hnum < 0)
634 quo_neg = ~ quo_neg;
635 /* (minimum integer) / (-1) is the only overflow case. */
636 if (neg_double (lnum, hnum, &lnum, &hnum)
637 && ((HOST_WIDE_INT) lden & hden) == -1)
638 overflow = 1;
640 if (hden < 0)
642 quo_neg = ~ quo_neg;
643 neg_double (lden, hden, &lden, &hden);
647 if (hnum == 0 && hden == 0)
648 { /* single precision */
649 *hquo = *hrem = 0;
650 /* This unsigned division rounds toward zero. */
651 *lquo = lnum / lden;
652 goto finish_up;
655 if (hnum == 0)
656 { /* trivial case: dividend < divisor */
657 /* hden != 0 already checked. */
658 *hquo = *lquo = 0;
659 *hrem = hnum;
660 *lrem = lnum;
661 goto finish_up;
664 memset (quo, 0, sizeof quo);
666 memset (num, 0, sizeof num); /* to zero 9th element */
667 memset (den, 0, sizeof den);
669 encode (num, lnum, hnum);
670 encode (den, lden, hden);
672 /* Special code for when the divisor < BASE. */
673 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
675 /* hnum != 0 already checked. */
676 for (i = 4 - 1; i >= 0; i--)
678 work = num[i] + carry * BASE;
679 quo[i] = work / lden;
680 carry = work % lden;
683 else
685 /* Full double precision division,
686 with thanks to Don Knuth's "Seminumerical Algorithms". */
687 int num_hi_sig, den_hi_sig;
688 unsigned HOST_WIDE_INT quo_est, scale;
690 /* Find the highest nonzero divisor digit. */
691 for (i = 4 - 1;; i--)
692 if (den[i] != 0)
694 den_hi_sig = i;
695 break;
698 /* Insure that the first digit of the divisor is at least BASE/2.
699 This is required by the quotient digit estimation algorithm. */
701 scale = BASE / (den[den_hi_sig] + 1);
702 if (scale > 1)
703 { /* scale divisor and dividend */
704 carry = 0;
705 for (i = 0; i <= 4 - 1; i++)
707 work = (num[i] * scale) + carry;
708 num[i] = LOWPART (work);
709 carry = HIGHPART (work);
712 num[4] = carry;
713 carry = 0;
714 for (i = 0; i <= 4 - 1; i++)
716 work = (den[i] * scale) + carry;
717 den[i] = LOWPART (work);
718 carry = HIGHPART (work);
719 if (den[i] != 0) den_hi_sig = i;
723 num_hi_sig = 4;
725 /* Main loop */
726 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
728 /* Guess the next quotient digit, quo_est, by dividing the first
729 two remaining dividend digits by the high order quotient digit.
730 quo_est is never low and is at most 2 high. */
731 unsigned HOST_WIDE_INT tmp;
733 num_hi_sig = i + den_hi_sig + 1;
734 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
735 if (num[num_hi_sig] != den[den_hi_sig])
736 quo_est = work / den[den_hi_sig];
737 else
738 quo_est = BASE - 1;
740 /* Refine quo_est so it's usually correct, and at most one high. */
741 tmp = work - quo_est * den[den_hi_sig];
742 if (tmp < BASE
743 && (den[den_hi_sig - 1] * quo_est
744 > (tmp * BASE + num[num_hi_sig - 2])))
745 quo_est--;
747 /* Try QUO_EST as the quotient digit, by multiplying the
748 divisor by QUO_EST and subtracting from the remaining dividend.
749 Keep in mind that QUO_EST is the I - 1st digit. */
751 carry = 0;
752 for (j = 0; j <= den_hi_sig; j++)
754 work = quo_est * den[j] + carry;
755 carry = HIGHPART (work);
756 work = num[i + j] - LOWPART (work);
757 num[i + j] = LOWPART (work);
758 carry += HIGHPART (work) != 0;
761 /* If quo_est was high by one, then num[i] went negative and
762 we need to correct things. */
763 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
765 quo_est--;
766 carry = 0; /* add divisor back in */
767 for (j = 0; j <= den_hi_sig; j++)
769 work = num[i + j] + den[j] + carry;
770 carry = HIGHPART (work);
771 num[i + j] = LOWPART (work);
774 num [num_hi_sig] += carry;
777 /* Store the quotient digit. */
778 quo[i] = quo_est;
782 decode (quo, lquo, hquo);
784 finish_up:
785 /* If result is negative, make it so. */
786 if (quo_neg)
787 neg_double (*lquo, *hquo, lquo, hquo);
789 /* Compute trial remainder: rem = num - (quo * den) */
790 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
791 neg_double (*lrem, *hrem, lrem, hrem);
792 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
794 switch (code)
796 case TRUNC_DIV_EXPR:
797 case TRUNC_MOD_EXPR: /* round toward zero */
798 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
799 return overflow;
801 case FLOOR_DIV_EXPR:
802 case FLOOR_MOD_EXPR: /* round toward negative infinity */
803 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
805 /* quo = quo - 1; */
806 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
807 lquo, hquo);
809 else
810 return overflow;
811 break;
813 case CEIL_DIV_EXPR:
814 case CEIL_MOD_EXPR: /* round toward positive infinity */
815 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
817 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
818 lquo, hquo);
820 else
821 return overflow;
822 break;
824 case ROUND_DIV_EXPR:
825 case ROUND_MOD_EXPR: /* round to closest integer */
827 unsigned HOST_WIDE_INT labs_rem = *lrem;
828 HOST_WIDE_INT habs_rem = *hrem;
829 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
830 HOST_WIDE_INT habs_den = hden, htwice;
832 /* Get absolute values. */
833 if (*hrem < 0)
834 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
835 if (hden < 0)
836 neg_double (lden, hden, &labs_den, &habs_den);
838 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
839 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
840 labs_rem, habs_rem, &ltwice, &htwice);
842 if (((unsigned HOST_WIDE_INT) habs_den
843 < (unsigned HOST_WIDE_INT) htwice)
844 || (((unsigned HOST_WIDE_INT) habs_den
845 == (unsigned HOST_WIDE_INT) htwice)
846 && (labs_den <= ltwice)))
848 if (*hquo < 0)
849 /* quo = quo - 1; */
850 add_double (*lquo, *hquo,
851 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
852 else
853 /* quo = quo + 1; */
854 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
855 lquo, hquo);
857 else
858 return overflow;
860 break;
862 default:
863 gcc_unreachable ();
866 /* Compute true remainder: rem = num - (quo * den) */
867 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
868 neg_double (*lrem, *hrem, lrem, hrem);
869 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
870 return overflow;
873 /* If ARG2 divides ARG1 with zero remainder, carries out the division
874 of type CODE and returns the quotient.
875 Otherwise returns NULL_TREE. */
877 tree
878 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
880 unsigned HOST_WIDE_INT int1l, int2l;
881 HOST_WIDE_INT int1h, int2h;
882 unsigned HOST_WIDE_INT quol, reml;
883 HOST_WIDE_INT quoh, remh;
884 tree type = TREE_TYPE (arg1);
885 int uns = TYPE_UNSIGNED (type);
887 int1l = TREE_INT_CST_LOW (arg1);
888 int1h = TREE_INT_CST_HIGH (arg1);
889 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
890 &obj[some_exotic_number]. */
891 if (POINTER_TYPE_P (type))
893 uns = false;
894 type = signed_type_for (type);
895 fit_double_type (int1l, int1h, &int1l, &int1h,
896 type);
898 else
899 fit_double_type (int1l, int1h, &int1l, &int1h, type);
900 int2l = TREE_INT_CST_LOW (arg2);
901 int2h = TREE_INT_CST_HIGH (arg2);
903 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
904 &quol, &quoh, &reml, &remh);
905 if (remh != 0 || reml != 0)
906 return NULL_TREE;
908 return build_int_cst_wide (type, quol, quoh);
911 /* This is nonzero if we should defer warnings about undefined
912 overflow. This facility exists because these warnings are a
913 special case. The code to estimate loop iterations does not want
914 to issue any warnings, since it works with expressions which do not
915 occur in user code. Various bits of cleanup code call fold(), but
916 only use the result if it has certain characteristics (e.g., is a
917 constant); that code only wants to issue a warning if the result is
918 used. */
920 static int fold_deferring_overflow_warnings;
922 /* If a warning about undefined overflow is deferred, this is the
923 warning. Note that this may cause us to turn two warnings into
924 one, but that is fine since it is sufficient to only give one
925 warning per expression. */
927 static const char* fold_deferred_overflow_warning;
929 /* If a warning about undefined overflow is deferred, this is the
930 level at which the warning should be emitted. */
932 static enum warn_strict_overflow_code fold_deferred_overflow_code;
934 /* Start deferring overflow warnings. We could use a stack here to
935 permit nested calls, but at present it is not necessary. */
937 void
938 fold_defer_overflow_warnings (void)
940 ++fold_deferring_overflow_warnings;
943 /* Stop deferring overflow warnings. If there is a pending warning,
944 and ISSUE is true, then issue the warning if appropriate. STMT is
945 the statement with which the warning should be associated (used for
946 location information); STMT may be NULL. CODE is the level of the
947 warning--a warn_strict_overflow_code value. This function will use
948 the smaller of CODE and the deferred code when deciding whether to
949 issue the warning. CODE may be zero to mean to always use the
950 deferred code. */
952 void
953 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
955 const char *warnmsg;
956 location_t locus;
958 gcc_assert (fold_deferring_overflow_warnings > 0);
959 --fold_deferring_overflow_warnings;
960 if (fold_deferring_overflow_warnings > 0)
962 if (fold_deferred_overflow_warning != NULL
963 && code != 0
964 && code < (int) fold_deferred_overflow_code)
965 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
966 return;
969 warnmsg = fold_deferred_overflow_warning;
970 fold_deferred_overflow_warning = NULL;
972 if (!issue || warnmsg == NULL)
973 return;
975 if (gimple_no_warning_p (stmt))
976 return;
978 /* Use the smallest code level when deciding to issue the
979 warning. */
980 if (code == 0 || code > (int) fold_deferred_overflow_code)
981 code = fold_deferred_overflow_code;
983 if (!issue_strict_overflow_warning (code))
984 return;
986 if (stmt == NULL)
987 locus = input_location;
988 else
989 locus = gimple_location (stmt);
990 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
993 /* Stop deferring overflow warnings, ignoring any deferred
994 warnings. */
996 void
997 fold_undefer_and_ignore_overflow_warnings (void)
999 fold_undefer_overflow_warnings (false, NULL, 0);
1002 /* Whether we are deferring overflow warnings. */
1004 bool
1005 fold_deferring_overflow_warnings_p (void)
1007 return fold_deferring_overflow_warnings > 0;
1010 /* This is called when we fold something based on the fact that signed
1011 overflow is undefined. */
1013 static void
1014 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1016 if (fold_deferring_overflow_warnings > 0)
1018 if (fold_deferred_overflow_warning == NULL
1019 || wc < fold_deferred_overflow_code)
1021 fold_deferred_overflow_warning = gmsgid;
1022 fold_deferred_overflow_code = wc;
1025 else if (issue_strict_overflow_warning (wc))
1026 warning (OPT_Wstrict_overflow, gmsgid);
1029 /* Return true if the built-in mathematical function specified by CODE
1030 is odd, i.e. -f(x) == f(-x). */
1032 static bool
1033 negate_mathfn_p (enum built_in_function code)
1035 switch (code)
1037 CASE_FLT_FN (BUILT_IN_ASIN):
1038 CASE_FLT_FN (BUILT_IN_ASINH):
1039 CASE_FLT_FN (BUILT_IN_ATAN):
1040 CASE_FLT_FN (BUILT_IN_ATANH):
1041 CASE_FLT_FN (BUILT_IN_CASIN):
1042 CASE_FLT_FN (BUILT_IN_CASINH):
1043 CASE_FLT_FN (BUILT_IN_CATAN):
1044 CASE_FLT_FN (BUILT_IN_CATANH):
1045 CASE_FLT_FN (BUILT_IN_CBRT):
1046 CASE_FLT_FN (BUILT_IN_CPROJ):
1047 CASE_FLT_FN (BUILT_IN_CSIN):
1048 CASE_FLT_FN (BUILT_IN_CSINH):
1049 CASE_FLT_FN (BUILT_IN_CTAN):
1050 CASE_FLT_FN (BUILT_IN_CTANH):
1051 CASE_FLT_FN (BUILT_IN_ERF):
1052 CASE_FLT_FN (BUILT_IN_LLROUND):
1053 CASE_FLT_FN (BUILT_IN_LROUND):
1054 CASE_FLT_FN (BUILT_IN_ROUND):
1055 CASE_FLT_FN (BUILT_IN_SIN):
1056 CASE_FLT_FN (BUILT_IN_SINH):
1057 CASE_FLT_FN (BUILT_IN_TAN):
1058 CASE_FLT_FN (BUILT_IN_TANH):
1059 CASE_FLT_FN (BUILT_IN_TRUNC):
1060 return true;
1062 CASE_FLT_FN (BUILT_IN_LLRINT):
1063 CASE_FLT_FN (BUILT_IN_LRINT):
1064 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1065 CASE_FLT_FN (BUILT_IN_RINT):
1066 return !flag_rounding_math;
1068 default:
1069 break;
1071 return false;
1074 /* Check whether we may negate an integer constant T without causing
1075 overflow. */
1077 bool
1078 may_negate_without_overflow_p (const_tree t)
1080 unsigned HOST_WIDE_INT val;
1081 unsigned int prec;
1082 tree type;
1084 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1086 type = TREE_TYPE (t);
1087 if (TYPE_UNSIGNED (type))
1088 return false;
1090 prec = TYPE_PRECISION (type);
1091 if (prec > HOST_BITS_PER_WIDE_INT)
1093 if (TREE_INT_CST_LOW (t) != 0)
1094 return true;
1095 prec -= HOST_BITS_PER_WIDE_INT;
1096 val = TREE_INT_CST_HIGH (t);
1098 else
1099 val = TREE_INT_CST_LOW (t);
1100 if (prec < HOST_BITS_PER_WIDE_INT)
1101 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1102 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1105 /* Determine whether an expression T can be cheaply negated using
1106 the function negate_expr without introducing undefined overflow. */
1108 static bool
1109 negate_expr_p (tree t)
1111 tree type;
1113 if (t == 0)
1114 return false;
1116 type = TREE_TYPE (t);
1118 STRIP_SIGN_NOPS (t);
1119 switch (TREE_CODE (t))
1121 case INTEGER_CST:
1122 if (TYPE_OVERFLOW_WRAPS (type))
1123 return true;
1125 /* Check that -CST will not overflow type. */
1126 return may_negate_without_overflow_p (t);
1127 case BIT_NOT_EXPR:
1128 return (INTEGRAL_TYPE_P (type)
1129 && TYPE_OVERFLOW_WRAPS (type));
1131 case FIXED_CST:
1132 case REAL_CST:
1133 case NEGATE_EXPR:
1134 return true;
1136 case COMPLEX_CST:
1137 return negate_expr_p (TREE_REALPART (t))
1138 && negate_expr_p (TREE_IMAGPART (t));
1140 case COMPLEX_EXPR:
1141 return negate_expr_p (TREE_OPERAND (t, 0))
1142 && negate_expr_p (TREE_OPERAND (t, 1));
1144 case CONJ_EXPR:
1145 return negate_expr_p (TREE_OPERAND (t, 0));
1147 case PLUS_EXPR:
1148 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1149 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1150 return false;
1151 /* -(A + B) -> (-B) - A. */
1152 if (negate_expr_p (TREE_OPERAND (t, 1))
1153 && reorder_operands_p (TREE_OPERAND (t, 0),
1154 TREE_OPERAND (t, 1)))
1155 return true;
1156 /* -(A + B) -> (-A) - B. */
1157 return negate_expr_p (TREE_OPERAND (t, 0));
1159 case MINUS_EXPR:
1160 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1161 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1162 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1163 && reorder_operands_p (TREE_OPERAND (t, 0),
1164 TREE_OPERAND (t, 1));
1166 case MULT_EXPR:
1167 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1168 break;
1170 /* Fall through. */
1172 case RDIV_EXPR:
1173 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1174 return negate_expr_p (TREE_OPERAND (t, 1))
1175 || negate_expr_p (TREE_OPERAND (t, 0));
1176 break;
1178 case TRUNC_DIV_EXPR:
1179 case ROUND_DIV_EXPR:
1180 case FLOOR_DIV_EXPR:
1181 case CEIL_DIV_EXPR:
1182 case EXACT_DIV_EXPR:
1183 /* In general we can't negate A / B, because if A is INT_MIN and
1184 B is 1, we may turn this into INT_MIN / -1 which is undefined
1185 and actually traps on some architectures. But if overflow is
1186 undefined, we can negate, because - (INT_MIN / 1) is an
1187 overflow. */
1188 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1189 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1190 break;
1191 return negate_expr_p (TREE_OPERAND (t, 1))
1192 || negate_expr_p (TREE_OPERAND (t, 0));
1194 case NOP_EXPR:
1195 /* Negate -((double)float) as (double)(-float). */
1196 if (TREE_CODE (type) == REAL_TYPE)
1198 tree tem = strip_float_extensions (t);
1199 if (tem != t)
1200 return negate_expr_p (tem);
1202 break;
1204 case CALL_EXPR:
1205 /* Negate -f(x) as f(-x). */
1206 if (negate_mathfn_p (builtin_mathfn_code (t)))
1207 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1208 break;
1210 case RSHIFT_EXPR:
1211 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1212 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1214 tree op1 = TREE_OPERAND (t, 1);
1215 if (TREE_INT_CST_HIGH (op1) == 0
1216 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1217 == TREE_INT_CST_LOW (op1))
1218 return true;
1220 break;
1222 default:
1223 break;
1225 return false;
1228 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1229 simplification is possible.
1230 If negate_expr_p would return true for T, NULL_TREE will never be
1231 returned. */
1233 static tree
1234 fold_negate_expr (location_t loc, tree t)
1236 tree type = TREE_TYPE (t);
1237 tree tem;
1239 switch (TREE_CODE (t))
1241 /* Convert - (~A) to A + 1. */
1242 case BIT_NOT_EXPR:
1243 if (INTEGRAL_TYPE_P (type))
1244 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
1245 build_int_cst (type, 1));
1246 break;
1248 case INTEGER_CST:
1249 tem = fold_negate_const (t, type);
1250 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1251 || !TYPE_OVERFLOW_TRAPS (type))
1252 return tem;
1253 break;
1255 case REAL_CST:
1256 tem = fold_negate_const (t, type);
1257 /* Two's complement FP formats, such as c4x, may overflow. */
1258 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1259 return tem;
1260 break;
1262 case FIXED_CST:
1263 tem = fold_negate_const (t, type);
1264 return tem;
1266 case COMPLEX_CST:
1268 tree rpart = negate_expr (TREE_REALPART (t));
1269 tree ipart = negate_expr (TREE_IMAGPART (t));
1271 if ((TREE_CODE (rpart) == REAL_CST
1272 && TREE_CODE (ipart) == REAL_CST)
1273 || (TREE_CODE (rpart) == INTEGER_CST
1274 && TREE_CODE (ipart) == INTEGER_CST))
1275 return build_complex (type, rpart, ipart);
1277 break;
1279 case COMPLEX_EXPR:
1280 if (negate_expr_p (t))
1281 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1282 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
1283 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
1284 break;
1286 case CONJ_EXPR:
1287 if (negate_expr_p (t))
1288 return fold_build1_loc (loc, CONJ_EXPR, type,
1289 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
1290 break;
1292 case NEGATE_EXPR:
1293 return TREE_OPERAND (t, 0);
1295 case PLUS_EXPR:
1296 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1297 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1299 /* -(A + B) -> (-B) - A. */
1300 if (negate_expr_p (TREE_OPERAND (t, 1))
1301 && reorder_operands_p (TREE_OPERAND (t, 0),
1302 TREE_OPERAND (t, 1)))
1304 tem = negate_expr (TREE_OPERAND (t, 1));
1305 return fold_build2_loc (loc, MINUS_EXPR, type,
1306 tem, TREE_OPERAND (t, 0));
1309 /* -(A + B) -> (-A) - B. */
1310 if (negate_expr_p (TREE_OPERAND (t, 0)))
1312 tem = negate_expr (TREE_OPERAND (t, 0));
1313 return fold_build2_loc (loc, MINUS_EXPR, type,
1314 tem, TREE_OPERAND (t, 1));
1317 break;
1319 case MINUS_EXPR:
1320 /* - (A - B) -> B - A */
1321 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1322 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1323 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1324 return fold_build2_loc (loc, MINUS_EXPR, type,
1325 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1326 break;
1328 case MULT_EXPR:
1329 if (TYPE_UNSIGNED (type))
1330 break;
1332 /* Fall through. */
1334 case RDIV_EXPR:
1335 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1337 tem = TREE_OPERAND (t, 1);
1338 if (negate_expr_p (tem))
1339 return fold_build2_loc (loc, TREE_CODE (t), type,
1340 TREE_OPERAND (t, 0), negate_expr (tem));
1341 tem = TREE_OPERAND (t, 0);
1342 if (negate_expr_p (tem))
1343 return fold_build2_loc (loc, TREE_CODE (t), type,
1344 negate_expr (tem), TREE_OPERAND (t, 1));
1346 break;
1348 case TRUNC_DIV_EXPR:
1349 case ROUND_DIV_EXPR:
1350 case FLOOR_DIV_EXPR:
1351 case CEIL_DIV_EXPR:
1352 case EXACT_DIV_EXPR:
1353 /* In general we can't negate A / B, because if A is INT_MIN and
1354 B is 1, we may turn this into INT_MIN / -1 which is undefined
1355 and actually traps on some architectures. But if overflow is
1356 undefined, we can negate, because - (INT_MIN / 1) is an
1357 overflow. */
1358 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1360 const char * const warnmsg = G_("assuming signed overflow does not "
1361 "occur when negating a division");
1362 tem = TREE_OPERAND (t, 1);
1363 if (negate_expr_p (tem))
1365 if (INTEGRAL_TYPE_P (type)
1366 && (TREE_CODE (tem) != INTEGER_CST
1367 || integer_onep (tem)))
1368 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1369 return fold_build2_loc (loc, TREE_CODE (t), type,
1370 TREE_OPERAND (t, 0), negate_expr (tem));
1372 tem = TREE_OPERAND (t, 0);
1373 if (negate_expr_p (tem))
1375 if (INTEGRAL_TYPE_P (type)
1376 && (TREE_CODE (tem) != INTEGER_CST
1377 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1378 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1379 return fold_build2_loc (loc, TREE_CODE (t), type,
1380 negate_expr (tem), TREE_OPERAND (t, 1));
1383 break;
1385 case NOP_EXPR:
1386 /* Convert -((double)float) into (double)(-float). */
1387 if (TREE_CODE (type) == REAL_TYPE)
1389 tem = strip_float_extensions (t);
1390 if (tem != t && negate_expr_p (tem))
1391 return fold_convert_loc (loc, type, negate_expr (tem));
1393 break;
1395 case CALL_EXPR:
1396 /* Negate -f(x) as f(-x). */
1397 if (negate_mathfn_p (builtin_mathfn_code (t))
1398 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1400 tree fndecl, arg;
1402 fndecl = get_callee_fndecl (t);
1403 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1404 return build_call_expr_loc (loc, fndecl, 1, arg);
1406 break;
1408 case RSHIFT_EXPR:
1409 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1410 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1412 tree op1 = TREE_OPERAND (t, 1);
1413 if (TREE_INT_CST_HIGH (op1) == 0
1414 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1415 == TREE_INT_CST_LOW (op1))
1417 tree ntype = TYPE_UNSIGNED (type)
1418 ? signed_type_for (type)
1419 : unsigned_type_for (type);
1420 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
1421 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
1422 return fold_convert_loc (loc, type, temp);
1425 break;
1427 default:
1428 break;
1431 return NULL_TREE;
1434 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1435 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1436 return NULL_TREE. */
1438 static tree
1439 negate_expr (tree t)
1441 tree type, tem;
1442 location_t loc;
1444 if (t == NULL_TREE)
1445 return NULL_TREE;
1447 loc = EXPR_LOCATION (t);
1448 type = TREE_TYPE (t);
1449 STRIP_SIGN_NOPS (t);
1451 tem = fold_negate_expr (loc, t);
1452 if (!tem)
1454 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1455 SET_EXPR_LOCATION (tem, loc);
1457 return fold_convert_loc (loc, type, tem);
1460 /* Split a tree IN into a constant, literal and variable parts that could be
1461 combined with CODE to make IN. "constant" means an expression with
1462 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1463 commutative arithmetic operation. Store the constant part into *CONP,
1464 the literal in *LITP and return the variable part. If a part isn't
1465 present, set it to null. If the tree does not decompose in this way,
1466 return the entire tree as the variable part and the other parts as null.
1468 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1469 case, we negate an operand that was subtracted. Except if it is a
1470 literal for which we use *MINUS_LITP instead.
1472 If NEGATE_P is true, we are negating all of IN, again except a literal
1473 for which we use *MINUS_LITP instead.
1475 If IN is itself a literal or constant, return it as appropriate.
1477 Note that we do not guarantee that any of the three values will be the
1478 same type as IN, but they will have the same signedness and mode. */
1480 static tree
1481 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1482 tree *minus_litp, int negate_p)
1484 tree var = 0;
1486 *conp = 0;
1487 *litp = 0;
1488 *minus_litp = 0;
1490 /* Strip any conversions that don't change the machine mode or signedness. */
1491 STRIP_SIGN_NOPS (in);
1493 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1494 || TREE_CODE (in) == FIXED_CST)
1495 *litp = in;
1496 else if (TREE_CODE (in) == code
1497 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1498 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1499 /* We can associate addition and subtraction together (even
1500 though the C standard doesn't say so) for integers because
1501 the value is not affected. For reals, the value might be
1502 affected, so we can't. */
1503 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1504 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1506 tree op0 = TREE_OPERAND (in, 0);
1507 tree op1 = TREE_OPERAND (in, 1);
1508 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1509 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1511 /* First see if either of the operands is a literal, then a constant. */
1512 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1513 || TREE_CODE (op0) == FIXED_CST)
1514 *litp = op0, op0 = 0;
1515 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1516 || TREE_CODE (op1) == FIXED_CST)
1517 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1519 if (op0 != 0 && TREE_CONSTANT (op0))
1520 *conp = op0, op0 = 0;
1521 else if (op1 != 0 && TREE_CONSTANT (op1))
1522 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1524 /* If we haven't dealt with either operand, this is not a case we can
1525 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1526 if (op0 != 0 && op1 != 0)
1527 var = in;
1528 else if (op0 != 0)
1529 var = op0;
1530 else
1531 var = op1, neg_var_p = neg1_p;
1533 /* Now do any needed negations. */
1534 if (neg_litp_p)
1535 *minus_litp = *litp, *litp = 0;
1536 if (neg_conp_p)
1537 *conp = negate_expr (*conp);
1538 if (neg_var_p)
1539 var = negate_expr (var);
1541 else if (TREE_CONSTANT (in))
1542 *conp = in;
1543 else
1544 var = in;
1546 if (negate_p)
1548 if (*litp)
1549 *minus_litp = *litp, *litp = 0;
1550 else if (*minus_litp)
1551 *litp = *minus_litp, *minus_litp = 0;
1552 *conp = negate_expr (*conp);
1553 var = negate_expr (var);
1556 return var;
1559 /* Re-associate trees split by the above function. T1 and T2 are
1560 either expressions to associate or null. Return the new
1561 expression, if any. LOC is the location of the new expression. If
1562 we build an operation, do it in TYPE and with CODE. */
1564 static tree
1565 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1567 tree tem;
1569 if (t1 == 0)
1570 return t2;
1571 else if (t2 == 0)
1572 return t1;
1574 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1575 try to fold this since we will have infinite recursion. But do
1576 deal with any NEGATE_EXPRs. */
1577 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1578 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1580 if (code == PLUS_EXPR)
1582 if (TREE_CODE (t1) == NEGATE_EXPR)
1583 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
1584 fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
1585 else if (TREE_CODE (t2) == NEGATE_EXPR)
1586 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
1587 fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
1588 else if (integer_zerop (t2))
1589 return fold_convert_loc (loc, type, t1);
1591 else if (code == MINUS_EXPR)
1593 if (integer_zerop (t2))
1594 return fold_convert_loc (loc, type, t1);
1597 tem = build2 (code, type, fold_convert_loc (loc, type, t1),
1598 fold_convert_loc (loc, type, t2));
1599 goto associate_trees_exit;
1602 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1603 fold_convert_loc (loc, type, t2));
1604 associate_trees_exit:
1605 protected_set_expr_location (tem, loc);
1606 return tem;
1609 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1610 for use in int_const_binop, size_binop and size_diffop. */
1612 static bool
1613 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1615 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1616 return false;
1617 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1618 return false;
1620 switch (code)
1622 case LSHIFT_EXPR:
1623 case RSHIFT_EXPR:
1624 case LROTATE_EXPR:
1625 case RROTATE_EXPR:
1626 return true;
1628 default:
1629 break;
1632 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1633 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1634 && TYPE_MODE (type1) == TYPE_MODE (type2);
1638 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1639 to produce a new constant. Return NULL_TREE if we don't know how
1640 to evaluate CODE at compile-time.
1642 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1644 tree
1645 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1647 unsigned HOST_WIDE_INT int1l, int2l;
1648 HOST_WIDE_INT int1h, int2h;
1649 unsigned HOST_WIDE_INT low;
1650 HOST_WIDE_INT hi;
1651 unsigned HOST_WIDE_INT garbagel;
1652 HOST_WIDE_INT garbageh;
1653 tree t;
1654 tree type = TREE_TYPE (arg1);
1655 int uns = TYPE_UNSIGNED (type);
1656 int is_sizetype
1657 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1658 int overflow = 0;
1660 int1l = TREE_INT_CST_LOW (arg1);
1661 int1h = TREE_INT_CST_HIGH (arg1);
1662 int2l = TREE_INT_CST_LOW (arg2);
1663 int2h = TREE_INT_CST_HIGH (arg2);
1665 switch (code)
1667 case BIT_IOR_EXPR:
1668 low = int1l | int2l, hi = int1h | int2h;
1669 break;
1671 case BIT_XOR_EXPR:
1672 low = int1l ^ int2l, hi = int1h ^ int2h;
1673 break;
1675 case BIT_AND_EXPR:
1676 low = int1l & int2l, hi = int1h & int2h;
1677 break;
1679 case RSHIFT_EXPR:
1680 int2l = -int2l;
1681 case LSHIFT_EXPR:
1682 /* It's unclear from the C standard whether shifts can overflow.
1683 The following code ignores overflow; perhaps a C standard
1684 interpretation ruling is needed. */
1685 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1686 &low, &hi, !uns);
1687 break;
1689 case RROTATE_EXPR:
1690 int2l = - int2l;
1691 case LROTATE_EXPR:
1692 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1693 &low, &hi);
1694 break;
1696 case PLUS_EXPR:
1697 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1698 break;
1700 case MINUS_EXPR:
1701 neg_double (int2l, int2h, &low, &hi);
1702 add_double (int1l, int1h, low, hi, &low, &hi);
1703 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1704 break;
1706 case MULT_EXPR:
1707 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1708 break;
1710 case TRUNC_DIV_EXPR:
1711 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1712 case EXACT_DIV_EXPR:
1713 /* This is a shortcut for a common special case. */
1714 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1715 && !TREE_OVERFLOW (arg1)
1716 && !TREE_OVERFLOW (arg2)
1717 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1719 if (code == CEIL_DIV_EXPR)
1720 int1l += int2l - 1;
1722 low = int1l / int2l, hi = 0;
1723 break;
1726 /* ... fall through ... */
1728 case ROUND_DIV_EXPR:
1729 if (int2h == 0 && int2l == 0)
1730 return NULL_TREE;
1731 if (int2h == 0 && int2l == 1)
1733 low = int1l, hi = int1h;
1734 break;
1736 if (int1l == int2l && int1h == int2h
1737 && ! (int1l == 0 && int1h == 0))
1739 low = 1, hi = 0;
1740 break;
1742 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1743 &low, &hi, &garbagel, &garbageh);
1744 break;
1746 case TRUNC_MOD_EXPR:
1747 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1748 /* This is a shortcut for a common special case. */
1749 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1750 && !TREE_OVERFLOW (arg1)
1751 && !TREE_OVERFLOW (arg2)
1752 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1754 if (code == CEIL_MOD_EXPR)
1755 int1l += int2l - 1;
1756 low = int1l % int2l, hi = 0;
1757 break;
1760 /* ... fall through ... */
1762 case ROUND_MOD_EXPR:
1763 if (int2h == 0 && int2l == 0)
1764 return NULL_TREE;
1765 overflow = div_and_round_double (code, uns,
1766 int1l, int1h, int2l, int2h,
1767 &garbagel, &garbageh, &low, &hi);
1768 break;
1770 case MIN_EXPR:
1771 case MAX_EXPR:
1772 if (uns)
1773 low = (((unsigned HOST_WIDE_INT) int1h
1774 < (unsigned HOST_WIDE_INT) int2h)
1775 || (((unsigned HOST_WIDE_INT) int1h
1776 == (unsigned HOST_WIDE_INT) int2h)
1777 && int1l < int2l));
1778 else
1779 low = (int1h < int2h
1780 || (int1h == int2h && int1l < int2l));
1782 if (low == (code == MIN_EXPR))
1783 low = int1l, hi = int1h;
1784 else
1785 low = int2l, hi = int2h;
1786 break;
1788 default:
1789 return NULL_TREE;
1792 if (notrunc)
1794 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1796 /* Propagate overflow flags ourselves. */
1797 if (((!uns || is_sizetype) && overflow)
1798 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1800 t = copy_node (t);
1801 TREE_OVERFLOW (t) = 1;
1804 else
1805 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1806 ((!uns || is_sizetype) && overflow)
1807 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1809 return t;
1812 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1813 constant. We assume ARG1 and ARG2 have the same data type, or at least
1814 are the same kind of constant and the same machine mode. Return zero if
1815 combining the constants is not allowed in the current operating mode.
1817 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1819 static tree
1820 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1822 /* Sanity check for the recursive cases. */
1823 if (!arg1 || !arg2)
1824 return NULL_TREE;
1826 STRIP_NOPS (arg1);
1827 STRIP_NOPS (arg2);
1829 if (TREE_CODE (arg1) == INTEGER_CST)
1830 return int_const_binop (code, arg1, arg2, notrunc);
1832 if (TREE_CODE (arg1) == REAL_CST)
1834 enum machine_mode mode;
1835 REAL_VALUE_TYPE d1;
1836 REAL_VALUE_TYPE d2;
1837 REAL_VALUE_TYPE value;
1838 REAL_VALUE_TYPE result;
1839 bool inexact;
1840 tree t, type;
1842 /* The following codes are handled by real_arithmetic. */
1843 switch (code)
1845 case PLUS_EXPR:
1846 case MINUS_EXPR:
1847 case MULT_EXPR:
1848 case RDIV_EXPR:
1849 case MIN_EXPR:
1850 case MAX_EXPR:
1851 break;
1853 default:
1854 return NULL_TREE;
1857 d1 = TREE_REAL_CST (arg1);
1858 d2 = TREE_REAL_CST (arg2);
1860 type = TREE_TYPE (arg1);
1861 mode = TYPE_MODE (type);
1863 /* Don't perform operation if we honor signaling NaNs and
1864 either operand is a NaN. */
1865 if (HONOR_SNANS (mode)
1866 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1867 return NULL_TREE;
1869 /* Don't perform operation if it would raise a division
1870 by zero exception. */
1871 if (code == RDIV_EXPR
1872 && REAL_VALUES_EQUAL (d2, dconst0)
1873 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1874 return NULL_TREE;
1876 /* If either operand is a NaN, just return it. Otherwise, set up
1877 for floating-point trap; we return an overflow. */
1878 if (REAL_VALUE_ISNAN (d1))
1879 return arg1;
1880 else if (REAL_VALUE_ISNAN (d2))
1881 return arg2;
1883 inexact = real_arithmetic (&value, code, &d1, &d2);
1884 real_convert (&result, mode, &value);
1886 /* Don't constant fold this floating point operation if
1887 the result has overflowed and flag_trapping_math. */
1888 if (flag_trapping_math
1889 && MODE_HAS_INFINITIES (mode)
1890 && REAL_VALUE_ISINF (result)
1891 && !REAL_VALUE_ISINF (d1)
1892 && !REAL_VALUE_ISINF (d2))
1893 return NULL_TREE;
1895 /* Don't constant fold this floating point operation if the
1896 result may dependent upon the run-time rounding mode and
1897 flag_rounding_math is set, or if GCC's software emulation
1898 is unable to accurately represent the result. */
1899 if ((flag_rounding_math
1900 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1901 && (inexact || !real_identical (&result, &value)))
1902 return NULL_TREE;
1904 t = build_real (type, result);
1906 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1907 return t;
1910 if (TREE_CODE (arg1) == FIXED_CST)
1912 FIXED_VALUE_TYPE f1;
1913 FIXED_VALUE_TYPE f2;
1914 FIXED_VALUE_TYPE result;
1915 tree t, type;
1916 int sat_p;
1917 bool overflow_p;
1919 /* The following codes are handled by fixed_arithmetic. */
1920 switch (code)
1922 case PLUS_EXPR:
1923 case MINUS_EXPR:
1924 case MULT_EXPR:
1925 case TRUNC_DIV_EXPR:
1926 f2 = TREE_FIXED_CST (arg2);
1927 break;
1929 case LSHIFT_EXPR:
1930 case RSHIFT_EXPR:
1931 f2.data.high = TREE_INT_CST_HIGH (arg2);
1932 f2.data.low = TREE_INT_CST_LOW (arg2);
1933 f2.mode = SImode;
1934 break;
1936 default:
1937 return NULL_TREE;
1940 f1 = TREE_FIXED_CST (arg1);
1941 type = TREE_TYPE (arg1);
1942 sat_p = TYPE_SATURATING (type);
1943 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1944 t = build_fixed (type, result);
1945 /* Propagate overflow flags. */
1946 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1947 TREE_OVERFLOW (t) = 1;
1948 return t;
1951 if (TREE_CODE (arg1) == COMPLEX_CST)
1953 tree type = TREE_TYPE (arg1);
1954 tree r1 = TREE_REALPART (arg1);
1955 tree i1 = TREE_IMAGPART (arg1);
1956 tree r2 = TREE_REALPART (arg2);
1957 tree i2 = TREE_IMAGPART (arg2);
1958 tree real, imag;
1960 switch (code)
1962 case PLUS_EXPR:
1963 case MINUS_EXPR:
1964 real = const_binop (code, r1, r2, notrunc);
1965 imag = const_binop (code, i1, i2, notrunc);
1966 break;
1968 case MULT_EXPR:
1969 #ifdef HAVE_mpc
1970 if (COMPLEX_FLOAT_TYPE_P (type))
1971 return do_mpc_arg2 (arg1, arg2, type,
1972 /* do_nonfinite= */ folding_initializer,
1973 mpc_mul);
1974 #endif
1976 real = const_binop (MINUS_EXPR,
1977 const_binop (MULT_EXPR, r1, r2, notrunc),
1978 const_binop (MULT_EXPR, i1, i2, notrunc),
1979 notrunc);
1980 imag = const_binop (PLUS_EXPR,
1981 const_binop (MULT_EXPR, r1, i2, notrunc),
1982 const_binop (MULT_EXPR, i1, r2, notrunc),
1983 notrunc);
1984 break;
1986 case RDIV_EXPR:
1987 #ifdef HAVE_mpc
1988 if (COMPLEX_FLOAT_TYPE_P (type))
1989 return do_mpc_arg2 (arg1, arg2, type,
1990 /* do_nonfinite= */ folding_initializer,
1991 mpc_div);
1992 /* Fallthru ... */
1993 #endif
1995 case TRUNC_DIV_EXPR:
1996 case CEIL_DIV_EXPR:
1997 case FLOOR_DIV_EXPR:
1998 case ROUND_DIV_EXPR:
1999 if (flag_complex_method == 0)
2001 /* Keep this algorithm in sync with
2002 tree-complex.c:expand_complex_div_straight().
2004 Expand complex division to scalars, straightforward algorithm.
2005 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
2006 t = br*br + bi*bi
2008 tree magsquared
2009 = const_binop (PLUS_EXPR,
2010 const_binop (MULT_EXPR, r2, r2, notrunc),
2011 const_binop (MULT_EXPR, i2, i2, notrunc),
2012 notrunc);
2013 tree t1
2014 = const_binop (PLUS_EXPR,
2015 const_binop (MULT_EXPR, r1, r2, notrunc),
2016 const_binop (MULT_EXPR, i1, i2, notrunc),
2017 notrunc);
2018 tree t2
2019 = const_binop (MINUS_EXPR,
2020 const_binop (MULT_EXPR, i1, r2, notrunc),
2021 const_binop (MULT_EXPR, r1, i2, notrunc),
2022 notrunc);
2024 real = const_binop (code, t1, magsquared, notrunc);
2025 imag = const_binop (code, t2, magsquared, notrunc);
2027 else
2029 /* Keep this algorithm in sync with
2030 tree-complex.c:expand_complex_div_wide().
2032 Expand complex division to scalars, modified algorithm to minimize
2033 overflow with wide input ranges. */
2034 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
2035 fold_abs_const (r2, TREE_TYPE (type)),
2036 fold_abs_const (i2, TREE_TYPE (type)));
2038 if (integer_nonzerop (compare))
2040 /* In the TRUE branch, we compute
2041 ratio = br/bi;
2042 div = (br * ratio) + bi;
2043 tr = (ar * ratio) + ai;
2044 ti = (ai * ratio) - ar;
2045 tr = tr / div;
2046 ti = ti / div; */
2047 tree ratio = const_binop (code, r2, i2, notrunc);
2048 tree div = const_binop (PLUS_EXPR, i2,
2049 const_binop (MULT_EXPR, r2, ratio,
2050 notrunc),
2051 notrunc);
2052 real = const_binop (MULT_EXPR, r1, ratio, notrunc);
2053 real = const_binop (PLUS_EXPR, real, i1, notrunc);
2054 real = const_binop (code, real, div, notrunc);
2056 imag = const_binop (MULT_EXPR, i1, ratio, notrunc);
2057 imag = const_binop (MINUS_EXPR, imag, r1, notrunc);
2058 imag = const_binop (code, imag, div, notrunc);
2060 else
2062 /* In the FALSE branch, we compute
2063 ratio = d/c;
2064 divisor = (d * ratio) + c;
2065 tr = (b * ratio) + a;
2066 ti = b - (a * ratio);
2067 tr = tr / div;
2068 ti = ti / div; */
2069 tree ratio = const_binop (code, i2, r2, notrunc);
2070 tree div = const_binop (PLUS_EXPR, r2,
2071 const_binop (MULT_EXPR, i2, ratio,
2072 notrunc),
2073 notrunc);
2075 real = const_binop (MULT_EXPR, i1, ratio, notrunc);
2076 real = const_binop (PLUS_EXPR, real, r1, notrunc);
2077 real = const_binop (code, real, div, notrunc);
2079 imag = const_binop (MULT_EXPR, r1, ratio, notrunc);
2080 imag = const_binop (MINUS_EXPR, i1, imag, notrunc);
2081 imag = const_binop (code, imag, div, notrunc);
2084 break;
2086 default:
2087 return NULL_TREE;
2090 if (real && imag)
2091 return build_complex (type, real, imag);
2094 if (TREE_CODE (arg1) == VECTOR_CST)
2096 tree type = TREE_TYPE(arg1);
2097 int count = TYPE_VECTOR_SUBPARTS (type), i;
2098 tree elements1, elements2, list = NULL_TREE;
2100 if(TREE_CODE(arg2) != VECTOR_CST)
2101 return NULL_TREE;
2103 elements1 = TREE_VECTOR_CST_ELTS (arg1);
2104 elements2 = TREE_VECTOR_CST_ELTS (arg2);
2106 for (i = 0; i < count; i++)
2108 tree elem1, elem2, elem;
2110 /* The trailing elements can be empty and should be treated as 0 */
2111 if(!elements1)
2112 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2113 else
2115 elem1 = TREE_VALUE(elements1);
2116 elements1 = TREE_CHAIN (elements1);
2119 if(!elements2)
2120 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2121 else
2123 elem2 = TREE_VALUE(elements2);
2124 elements2 = TREE_CHAIN (elements2);
2127 elem = const_binop (code, elem1, elem2, notrunc);
2129 /* It is possible that const_binop cannot handle the given
2130 code and return NULL_TREE */
2131 if(elem == NULL_TREE)
2132 return NULL_TREE;
2134 list = tree_cons (NULL_TREE, elem, list);
2136 return build_vector(type, nreverse(list));
2138 return NULL_TREE;
2141 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2142 indicates which particular sizetype to create. */
2144 tree
2145 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2147 return build_int_cst (sizetype_tab[(int) kind], number);
2150 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2151 is a tree code. The type of the result is taken from the operands.
2152 Both must be equivalent integer types, ala int_binop_types_match_p.
2153 If the operands are constant, so is the result. */
2155 tree
2156 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2158 tree type = TREE_TYPE (arg0);
2160 if (arg0 == error_mark_node || arg1 == error_mark_node)
2161 return error_mark_node;
2163 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2164 TREE_TYPE (arg1)));
2166 /* Handle the special case of two integer constants faster. */
2167 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2169 /* And some specific cases even faster than that. */
2170 if (code == PLUS_EXPR)
2172 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2173 return arg1;
2174 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2175 return arg0;
2177 else if (code == MINUS_EXPR)
2179 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2180 return arg0;
2182 else if (code == MULT_EXPR)
2184 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2185 return arg1;
2188 /* Handle general case of two integer constants. */
2189 return int_const_binop (code, arg0, arg1, 0);
2192 return fold_build2_loc (loc, code, type, arg0, arg1);
2195 /* Given two values, either both of sizetype or both of bitsizetype,
2196 compute the difference between the two values. Return the value
2197 in signed type corresponding to the type of the operands. */
2199 tree
2200 size_diffop_loc (location_t loc, tree arg0, tree arg1)
2202 tree type = TREE_TYPE (arg0);
2203 tree ctype;
2205 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2206 TREE_TYPE (arg1)));
2208 /* If the type is already signed, just do the simple thing. */
2209 if (!TYPE_UNSIGNED (type))
2210 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2212 if (type == sizetype)
2213 ctype = ssizetype;
2214 else if (type == bitsizetype)
2215 ctype = sbitsizetype;
2216 else
2217 ctype = signed_type_for (type);
2219 /* If either operand is not a constant, do the conversions to the signed
2220 type and subtract. The hardware will do the right thing with any
2221 overflow in the subtraction. */
2222 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2223 return size_binop_loc (loc, MINUS_EXPR,
2224 fold_convert_loc (loc, ctype, arg0),
2225 fold_convert_loc (loc, ctype, arg1));
2227 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2228 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2229 overflow) and negate (which can't either). Special-case a result
2230 of zero while we're here. */
2231 if (tree_int_cst_equal (arg0, arg1))
2232 return build_int_cst (ctype, 0);
2233 else if (tree_int_cst_lt (arg1, arg0))
2234 return fold_convert_loc (loc, ctype,
2235 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2236 else
2237 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2238 fold_convert_loc (loc, ctype,
2239 size_binop_loc (loc,
2240 MINUS_EXPR,
2241 arg1, arg0)));
2244 /* A subroutine of fold_convert_const handling conversions of an
2245 INTEGER_CST to another integer type. */
2247 static tree
2248 fold_convert_const_int_from_int (tree type, const_tree arg1)
2250 tree t;
2252 /* Given an integer constant, make new constant with new type,
2253 appropriately sign-extended or truncated. */
2254 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2255 TREE_INT_CST_HIGH (arg1),
2256 /* Don't set the overflow when
2257 converting from a pointer, */
2258 !POINTER_TYPE_P (TREE_TYPE (arg1))
2259 /* or to a sizetype with same signedness
2260 and the precision is unchanged.
2261 ??? sizetype is always sign-extended,
2262 but its signedness depends on the
2263 frontend. Thus we see spurious overflows
2264 here if we do not check this. */
2265 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2266 == TYPE_PRECISION (type))
2267 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2268 == TYPE_UNSIGNED (type))
2269 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2270 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2271 || (TREE_CODE (type) == INTEGER_TYPE
2272 && TYPE_IS_SIZETYPE (type)))),
2273 (TREE_INT_CST_HIGH (arg1) < 0
2274 && (TYPE_UNSIGNED (type)
2275 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2276 | TREE_OVERFLOW (arg1));
2278 return t;
2281 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2282 to an integer type. */
2284 static tree
2285 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2287 int overflow = 0;
2288 tree t;
2290 /* The following code implements the floating point to integer
2291 conversion rules required by the Java Language Specification,
2292 that IEEE NaNs are mapped to zero and values that overflow
2293 the target precision saturate, i.e. values greater than
2294 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2295 are mapped to INT_MIN. These semantics are allowed by the
2296 C and C++ standards that simply state that the behavior of
2297 FP-to-integer conversion is unspecified upon overflow. */
2299 HOST_WIDE_INT high, low;
2300 REAL_VALUE_TYPE r;
2301 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2303 switch (code)
2305 case FIX_TRUNC_EXPR:
2306 real_trunc (&r, VOIDmode, &x);
2307 break;
2309 default:
2310 gcc_unreachable ();
2313 /* If R is NaN, return zero and show we have an overflow. */
2314 if (REAL_VALUE_ISNAN (r))
2316 overflow = 1;
2317 high = 0;
2318 low = 0;
2321 /* See if R is less than the lower bound or greater than the
2322 upper bound. */
2324 if (! overflow)
2326 tree lt = TYPE_MIN_VALUE (type);
2327 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2328 if (REAL_VALUES_LESS (r, l))
2330 overflow = 1;
2331 high = TREE_INT_CST_HIGH (lt);
2332 low = TREE_INT_CST_LOW (lt);
2336 if (! overflow)
2338 tree ut = TYPE_MAX_VALUE (type);
2339 if (ut)
2341 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2342 if (REAL_VALUES_LESS (u, r))
2344 overflow = 1;
2345 high = TREE_INT_CST_HIGH (ut);
2346 low = TREE_INT_CST_LOW (ut);
2351 if (! overflow)
2352 REAL_VALUE_TO_INT (&low, &high, r);
2354 t = force_fit_type_double (type, low, high, -1,
2355 overflow | TREE_OVERFLOW (arg1));
2356 return t;
2359 /* A subroutine of fold_convert_const handling conversions of a
2360 FIXED_CST to an integer type. */
2362 static tree
2363 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2365 tree t;
2366 double_int temp, temp_trunc;
2367 unsigned int mode;
2369 /* Right shift FIXED_CST to temp by fbit. */
2370 temp = TREE_FIXED_CST (arg1).data;
2371 mode = TREE_FIXED_CST (arg1).mode;
2372 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2374 lshift_double (temp.low, temp.high,
2375 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2376 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2378 /* Left shift temp to temp_trunc by fbit. */
2379 lshift_double (temp.low, temp.high,
2380 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2381 &temp_trunc.low, &temp_trunc.high,
2382 SIGNED_FIXED_POINT_MODE_P (mode));
2384 else
2386 temp.low = 0;
2387 temp.high = 0;
2388 temp_trunc.low = 0;
2389 temp_trunc.high = 0;
2392 /* If FIXED_CST is negative, we need to round the value toward 0.
2393 By checking if the fractional bits are not zero to add 1 to temp. */
2394 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2395 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2397 double_int one;
2398 one.low = 1;
2399 one.high = 0;
2400 temp = double_int_add (temp, one);
2403 /* Given a fixed-point constant, make new constant with new type,
2404 appropriately sign-extended or truncated. */
2405 t = force_fit_type_double (type, temp.low, temp.high, -1,
2406 (temp.high < 0
2407 && (TYPE_UNSIGNED (type)
2408 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2409 | TREE_OVERFLOW (arg1));
2411 return t;
2414 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2415 to another floating point type. */
2417 static tree
2418 fold_convert_const_real_from_real (tree type, const_tree arg1)
2420 REAL_VALUE_TYPE value;
2421 tree t;
2423 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2424 t = build_real (type, value);
2426 /* If converting an infinity or NAN to a representation that doesn't
2427 have one, set the overflow bit so that we can produce some kind of
2428 error message at the appropriate point if necessary. It's not the
2429 most user-friendly message, but it's better than nothing. */
2430 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2431 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2432 TREE_OVERFLOW (t) = 1;
2433 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2434 && !MODE_HAS_NANS (TYPE_MODE (type)))
2435 TREE_OVERFLOW (t) = 1;
2436 /* Regular overflow, conversion produced an infinity in a mode that
2437 can't represent them. */
2438 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2439 && REAL_VALUE_ISINF (value)
2440 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2441 TREE_OVERFLOW (t) = 1;
2442 else
2443 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2444 return t;
2447 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2448 to a floating point type. */
2450 static tree
2451 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2453 REAL_VALUE_TYPE value;
2454 tree t;
2456 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2457 t = build_real (type, value);
2459 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2460 return t;
2463 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2464 to another fixed-point type. */
2466 static tree
2467 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2469 FIXED_VALUE_TYPE value;
2470 tree t;
2471 bool overflow_p;
2473 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2474 TYPE_SATURATING (type));
2475 t = build_fixed (type, value);
2477 /* Propagate overflow flags. */
2478 if (overflow_p | TREE_OVERFLOW (arg1))
2479 TREE_OVERFLOW (t) = 1;
2480 return t;
2483 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2484 to a fixed-point type. */
2486 static tree
2487 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2489 FIXED_VALUE_TYPE value;
2490 tree t;
2491 bool overflow_p;
2493 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2494 TREE_INT_CST (arg1),
2495 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2496 TYPE_SATURATING (type));
2497 t = build_fixed (type, value);
2499 /* Propagate overflow flags. */
2500 if (overflow_p | TREE_OVERFLOW (arg1))
2501 TREE_OVERFLOW (t) = 1;
2502 return t;
2505 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2506 to a fixed-point type. */
2508 static tree
2509 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2511 FIXED_VALUE_TYPE value;
2512 tree t;
2513 bool overflow_p;
2515 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2516 &TREE_REAL_CST (arg1),
2517 TYPE_SATURATING (type));
2518 t = build_fixed (type, value);
2520 /* Propagate overflow flags. */
2521 if (overflow_p | TREE_OVERFLOW (arg1))
2522 TREE_OVERFLOW (t) = 1;
2523 return t;
2526 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2527 type TYPE. If no simplification can be done return NULL_TREE. */
2529 static tree
2530 fold_convert_const (enum tree_code code, tree type, tree arg1)
2532 if (TREE_TYPE (arg1) == type)
2533 return arg1;
2535 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2536 || TREE_CODE (type) == OFFSET_TYPE)
2538 if (TREE_CODE (arg1) == INTEGER_CST)
2539 return fold_convert_const_int_from_int (type, arg1);
2540 else if (TREE_CODE (arg1) == REAL_CST)
2541 return fold_convert_const_int_from_real (code, type, arg1);
2542 else if (TREE_CODE (arg1) == FIXED_CST)
2543 return fold_convert_const_int_from_fixed (type, arg1);
2545 else if (TREE_CODE (type) == REAL_TYPE)
2547 if (TREE_CODE (arg1) == INTEGER_CST)
2548 return build_real_from_int_cst (type, arg1);
2549 else if (TREE_CODE (arg1) == REAL_CST)
2550 return fold_convert_const_real_from_real (type, arg1);
2551 else if (TREE_CODE (arg1) == FIXED_CST)
2552 return fold_convert_const_real_from_fixed (type, arg1);
2554 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2556 if (TREE_CODE (arg1) == FIXED_CST)
2557 return fold_convert_const_fixed_from_fixed (type, arg1);
2558 else if (TREE_CODE (arg1) == INTEGER_CST)
2559 return fold_convert_const_fixed_from_int (type, arg1);
2560 else if (TREE_CODE (arg1) == REAL_CST)
2561 return fold_convert_const_fixed_from_real (type, arg1);
2563 return NULL_TREE;
2566 /* Construct a vector of zero elements of vector type TYPE. */
2568 static tree
2569 build_zero_vector (tree type)
2571 tree elem, list;
2572 int i, units;
2574 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2575 units = TYPE_VECTOR_SUBPARTS (type);
2577 list = NULL_TREE;
2578 for (i = 0; i < units; i++)
2579 list = tree_cons (NULL_TREE, elem, list);
2580 return build_vector (type, list);
2583 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2585 bool
2586 fold_convertible_p (const_tree type, const_tree arg)
2588 tree orig = TREE_TYPE (arg);
2590 if (type == orig)
2591 return true;
2593 if (TREE_CODE (arg) == ERROR_MARK
2594 || TREE_CODE (type) == ERROR_MARK
2595 || TREE_CODE (orig) == ERROR_MARK)
2596 return false;
2598 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2599 return true;
2601 switch (TREE_CODE (type))
2603 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2604 case POINTER_TYPE: case REFERENCE_TYPE:
2605 case OFFSET_TYPE:
2606 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2607 || TREE_CODE (orig) == OFFSET_TYPE)
2608 return true;
2609 return (TREE_CODE (orig) == VECTOR_TYPE
2610 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2612 case REAL_TYPE:
2613 case FIXED_POINT_TYPE:
2614 case COMPLEX_TYPE:
2615 case VECTOR_TYPE:
2616 case VOID_TYPE:
2617 return TREE_CODE (type) == TREE_CODE (orig);
2619 default:
2620 return false;
2624 /* Convert expression ARG to type TYPE. Used by the middle-end for
2625 simple conversions in preference to calling the front-end's convert. */
2627 tree
2628 fold_convert_loc (location_t loc, tree type, tree arg)
2630 tree orig = TREE_TYPE (arg);
2631 tree tem;
2633 if (type == orig)
2634 return arg;
2636 if (TREE_CODE (arg) == ERROR_MARK
2637 || TREE_CODE (type) == ERROR_MARK
2638 || TREE_CODE (orig) == ERROR_MARK)
2639 return error_mark_node;
2641 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2642 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2644 switch (TREE_CODE (type))
2646 case POINTER_TYPE:
2647 case REFERENCE_TYPE:
2648 /* Handle conversions between pointers to different address spaces. */
2649 if (POINTER_TYPE_P (orig)
2650 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2651 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2652 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2653 /* fall through */
2655 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2656 case OFFSET_TYPE:
2657 if (TREE_CODE (arg) == INTEGER_CST)
2659 tem = fold_convert_const (NOP_EXPR, type, arg);
2660 if (tem != NULL_TREE)
2661 return tem;
2663 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2664 || TREE_CODE (orig) == OFFSET_TYPE)
2665 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2666 if (TREE_CODE (orig) == COMPLEX_TYPE)
2667 return fold_convert_loc (loc, type,
2668 fold_build1_loc (loc, REALPART_EXPR,
2669 TREE_TYPE (orig), arg));
2670 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2671 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2672 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2674 case REAL_TYPE:
2675 if (TREE_CODE (arg) == INTEGER_CST)
2677 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2678 if (tem != NULL_TREE)
2679 return tem;
2681 else if (TREE_CODE (arg) == REAL_CST)
2683 tem = fold_convert_const (NOP_EXPR, type, arg);
2684 if (tem != NULL_TREE)
2685 return tem;
2687 else if (TREE_CODE (arg) == FIXED_CST)
2689 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2690 if (tem != NULL_TREE)
2691 return tem;
2694 switch (TREE_CODE (orig))
2696 case INTEGER_TYPE:
2697 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2698 case POINTER_TYPE: case REFERENCE_TYPE:
2699 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2701 case REAL_TYPE:
2702 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2704 case FIXED_POINT_TYPE:
2705 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2707 case COMPLEX_TYPE:
2708 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2709 return fold_convert_loc (loc, type, tem);
2711 default:
2712 gcc_unreachable ();
2715 case FIXED_POINT_TYPE:
2716 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2717 || TREE_CODE (arg) == REAL_CST)
2719 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2720 if (tem != NULL_TREE)
2721 goto fold_convert_exit;
2724 switch (TREE_CODE (orig))
2726 case FIXED_POINT_TYPE:
2727 case INTEGER_TYPE:
2728 case ENUMERAL_TYPE:
2729 case BOOLEAN_TYPE:
2730 case REAL_TYPE:
2731 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2733 case COMPLEX_TYPE:
2734 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2735 return fold_convert_loc (loc, type, tem);
2737 default:
2738 gcc_unreachable ();
2741 case COMPLEX_TYPE:
2742 switch (TREE_CODE (orig))
2744 case INTEGER_TYPE:
2745 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2746 case POINTER_TYPE: case REFERENCE_TYPE:
2747 case REAL_TYPE:
2748 case FIXED_POINT_TYPE:
2749 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2750 fold_convert_loc (loc, TREE_TYPE (type), arg),
2751 fold_convert_loc (loc, TREE_TYPE (type),
2752 integer_zero_node));
2753 case COMPLEX_TYPE:
2755 tree rpart, ipart;
2757 if (TREE_CODE (arg) == COMPLEX_EXPR)
2759 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2760 TREE_OPERAND (arg, 0));
2761 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2762 TREE_OPERAND (arg, 1));
2763 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2766 arg = save_expr (arg);
2767 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2768 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2769 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2770 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2771 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2774 default:
2775 gcc_unreachable ();
2778 case VECTOR_TYPE:
2779 if (integer_zerop (arg))
2780 return build_zero_vector (type);
2781 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2782 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2783 || TREE_CODE (orig) == VECTOR_TYPE);
2784 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2786 case VOID_TYPE:
2787 tem = fold_ignored_result (arg);
2788 if (TREE_CODE (tem) == MODIFY_EXPR)
2789 goto fold_convert_exit;
2790 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2792 default:
2793 gcc_unreachable ();
2795 fold_convert_exit:
2796 protected_set_expr_location (tem, loc);
2797 return tem;
2800 /* Return false if expr can be assumed not to be an lvalue, true
2801 otherwise. */
2803 static bool
2804 maybe_lvalue_p (const_tree x)
2806 /* We only need to wrap lvalue tree codes. */
2807 switch (TREE_CODE (x))
2809 case VAR_DECL:
2810 case PARM_DECL:
2811 case RESULT_DECL:
2812 case LABEL_DECL:
2813 case FUNCTION_DECL:
2814 case SSA_NAME:
2816 case COMPONENT_REF:
2817 case INDIRECT_REF:
2818 case ALIGN_INDIRECT_REF:
2819 case MISALIGNED_INDIRECT_REF:
2820 case ARRAY_REF:
2821 case ARRAY_RANGE_REF:
2822 case BIT_FIELD_REF:
2823 case OBJ_TYPE_REF:
2825 case REALPART_EXPR:
2826 case IMAGPART_EXPR:
2827 case PREINCREMENT_EXPR:
2828 case PREDECREMENT_EXPR:
2829 case SAVE_EXPR:
2830 case TRY_CATCH_EXPR:
2831 case WITH_CLEANUP_EXPR:
2832 case COMPOUND_EXPR:
2833 case MODIFY_EXPR:
2834 case TARGET_EXPR:
2835 case COND_EXPR:
2836 case BIND_EXPR:
2837 break;
2839 default:
2840 /* Assume the worst for front-end tree codes. */
2841 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2842 break;
2843 return false;
2846 return true;
2849 /* Return an expr equal to X but certainly not valid as an lvalue. */
2851 tree
2852 non_lvalue_loc (location_t loc, tree x)
2854 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2855 us. */
2856 if (in_gimple_form)
2857 return x;
2859 if (! maybe_lvalue_p (x))
2860 return x;
2861 x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2862 SET_EXPR_LOCATION (x, loc);
2863 return x;
2866 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2867 Zero means allow extended lvalues. */
2869 int pedantic_lvalues;
2871 /* When pedantic, return an expr equal to X but certainly not valid as a
2872 pedantic lvalue. Otherwise, return X. */
2874 static tree
2875 pedantic_non_lvalue_loc (location_t loc, tree x)
2877 if (pedantic_lvalues)
2878 return non_lvalue_loc (loc, x);
2879 protected_set_expr_location (x, loc);
2880 return x;
2883 /* Given a tree comparison code, return the code that is the logical inverse
2884 of the given code. It is not safe to do this for floating-point
2885 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2886 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2888 enum tree_code
2889 invert_tree_comparison (enum tree_code code, bool honor_nans)
2891 if (honor_nans && flag_trapping_math)
2892 return ERROR_MARK;
2894 switch (code)
2896 case EQ_EXPR:
2897 return NE_EXPR;
2898 case NE_EXPR:
2899 return EQ_EXPR;
2900 case GT_EXPR:
2901 return honor_nans ? UNLE_EXPR : LE_EXPR;
2902 case GE_EXPR:
2903 return honor_nans ? UNLT_EXPR : LT_EXPR;
2904 case LT_EXPR:
2905 return honor_nans ? UNGE_EXPR : GE_EXPR;
2906 case LE_EXPR:
2907 return honor_nans ? UNGT_EXPR : GT_EXPR;
2908 case LTGT_EXPR:
2909 return UNEQ_EXPR;
2910 case UNEQ_EXPR:
2911 return LTGT_EXPR;
2912 case UNGT_EXPR:
2913 return LE_EXPR;
2914 case UNGE_EXPR:
2915 return LT_EXPR;
2916 case UNLT_EXPR:
2917 return GE_EXPR;
2918 case UNLE_EXPR:
2919 return GT_EXPR;
2920 case ORDERED_EXPR:
2921 return UNORDERED_EXPR;
2922 case UNORDERED_EXPR:
2923 return ORDERED_EXPR;
2924 default:
2925 gcc_unreachable ();
2929 /* Similar, but return the comparison that results if the operands are
2930 swapped. This is safe for floating-point. */
2932 enum tree_code
2933 swap_tree_comparison (enum tree_code code)
2935 switch (code)
2937 case EQ_EXPR:
2938 case NE_EXPR:
2939 case ORDERED_EXPR:
2940 case UNORDERED_EXPR:
2941 case LTGT_EXPR:
2942 case UNEQ_EXPR:
2943 return code;
2944 case GT_EXPR:
2945 return LT_EXPR;
2946 case GE_EXPR:
2947 return LE_EXPR;
2948 case LT_EXPR:
2949 return GT_EXPR;
2950 case LE_EXPR:
2951 return GE_EXPR;
2952 case UNGT_EXPR:
2953 return UNLT_EXPR;
2954 case UNGE_EXPR:
2955 return UNLE_EXPR;
2956 case UNLT_EXPR:
2957 return UNGT_EXPR;
2958 case UNLE_EXPR:
2959 return UNGE_EXPR;
2960 default:
2961 gcc_unreachable ();
2966 /* Convert a comparison tree code from an enum tree_code representation
2967 into a compcode bit-based encoding. This function is the inverse of
2968 compcode_to_comparison. */
2970 static enum comparison_code
2971 comparison_to_compcode (enum tree_code code)
2973 switch (code)
2975 case LT_EXPR:
2976 return COMPCODE_LT;
2977 case EQ_EXPR:
2978 return COMPCODE_EQ;
2979 case LE_EXPR:
2980 return COMPCODE_LE;
2981 case GT_EXPR:
2982 return COMPCODE_GT;
2983 case NE_EXPR:
2984 return COMPCODE_NE;
2985 case GE_EXPR:
2986 return COMPCODE_GE;
2987 case ORDERED_EXPR:
2988 return COMPCODE_ORD;
2989 case UNORDERED_EXPR:
2990 return COMPCODE_UNORD;
2991 case UNLT_EXPR:
2992 return COMPCODE_UNLT;
2993 case UNEQ_EXPR:
2994 return COMPCODE_UNEQ;
2995 case UNLE_EXPR:
2996 return COMPCODE_UNLE;
2997 case UNGT_EXPR:
2998 return COMPCODE_UNGT;
2999 case LTGT_EXPR:
3000 return COMPCODE_LTGT;
3001 case UNGE_EXPR:
3002 return COMPCODE_UNGE;
3003 default:
3004 gcc_unreachable ();
3008 /* Convert a compcode bit-based encoding of a comparison operator back
3009 to GCC's enum tree_code representation. This function is the
3010 inverse of comparison_to_compcode. */
3012 static enum tree_code
3013 compcode_to_comparison (enum comparison_code code)
3015 switch (code)
3017 case COMPCODE_LT:
3018 return LT_EXPR;
3019 case COMPCODE_EQ:
3020 return EQ_EXPR;
3021 case COMPCODE_LE:
3022 return LE_EXPR;
3023 case COMPCODE_GT:
3024 return GT_EXPR;
3025 case COMPCODE_NE:
3026 return NE_EXPR;
3027 case COMPCODE_GE:
3028 return GE_EXPR;
3029 case COMPCODE_ORD:
3030 return ORDERED_EXPR;
3031 case COMPCODE_UNORD:
3032 return UNORDERED_EXPR;
3033 case COMPCODE_UNLT:
3034 return UNLT_EXPR;
3035 case COMPCODE_UNEQ:
3036 return UNEQ_EXPR;
3037 case COMPCODE_UNLE:
3038 return UNLE_EXPR;
3039 case COMPCODE_UNGT:
3040 return UNGT_EXPR;
3041 case COMPCODE_LTGT:
3042 return LTGT_EXPR;
3043 case COMPCODE_UNGE:
3044 return UNGE_EXPR;
3045 default:
3046 gcc_unreachable ();
3050 /* Return a tree for the comparison which is the combination of
3051 doing the AND or OR (depending on CODE) of the two operations LCODE
3052 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
3053 the possibility of trapping if the mode has NaNs, and return NULL_TREE
3054 if this makes the transformation invalid. */
3056 tree
3057 combine_comparisons (location_t loc,
3058 enum tree_code code, enum tree_code lcode,
3059 enum tree_code rcode, tree truth_type,
3060 tree ll_arg, tree lr_arg)
3062 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
3063 enum comparison_code lcompcode = comparison_to_compcode (lcode);
3064 enum comparison_code rcompcode = comparison_to_compcode (rcode);
3065 int compcode;
3067 switch (code)
3069 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
3070 compcode = lcompcode & rcompcode;
3071 break;
3073 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
3074 compcode = lcompcode | rcompcode;
3075 break;
3077 default:
3078 return NULL_TREE;
3081 if (!honor_nans)
3083 /* Eliminate unordered comparisons, as well as LTGT and ORD
3084 which are not used unless the mode has NaNs. */
3085 compcode &= ~COMPCODE_UNORD;
3086 if (compcode == COMPCODE_LTGT)
3087 compcode = COMPCODE_NE;
3088 else if (compcode == COMPCODE_ORD)
3089 compcode = COMPCODE_TRUE;
3091 else if (flag_trapping_math)
3093 /* Check that the original operation and the optimized ones will trap
3094 under the same condition. */
3095 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
3096 && (lcompcode != COMPCODE_EQ)
3097 && (lcompcode != COMPCODE_ORD);
3098 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3099 && (rcompcode != COMPCODE_EQ)
3100 && (rcompcode != COMPCODE_ORD);
3101 bool trap = (compcode & COMPCODE_UNORD) == 0
3102 && (compcode != COMPCODE_EQ)
3103 && (compcode != COMPCODE_ORD);
3105 /* In a short-circuited boolean expression the LHS might be
3106 such that the RHS, if evaluated, will never trap. For
3107 example, in ORD (x, y) && (x < y), we evaluate the RHS only
3108 if neither x nor y is NaN. (This is a mixed blessing: for
3109 example, the expression above will never trap, hence
3110 optimizing it to x < y would be invalid). */
3111 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3112 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3113 rtrap = false;
3115 /* If the comparison was short-circuited, and only the RHS
3116 trapped, we may now generate a spurious trap. */
3117 if (rtrap && !ltrap
3118 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3119 return NULL_TREE;
3121 /* If we changed the conditions that cause a trap, we lose. */
3122 if ((ltrap || rtrap) != trap)
3123 return NULL_TREE;
3126 if (compcode == COMPCODE_TRUE)
3127 return constant_boolean_node (true, truth_type);
3128 else if (compcode == COMPCODE_FALSE)
3129 return constant_boolean_node (false, truth_type);
3130 else
3132 enum tree_code tcode;
3134 tcode = compcode_to_comparison ((enum comparison_code) compcode);
3135 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3139 /* Return nonzero if two operands (typically of the same tree node)
3140 are necessarily equal. If either argument has side-effects this
3141 function returns zero. FLAGS modifies behavior as follows:
3143 If OEP_ONLY_CONST is set, only return nonzero for constants.
3144 This function tests whether the operands are indistinguishable;
3145 it does not test whether they are equal using C's == operation.
3146 The distinction is important for IEEE floating point, because
3147 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3148 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3150 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3151 even though it may hold multiple values during a function.
3152 This is because a GCC tree node guarantees that nothing else is
3153 executed between the evaluation of its "operands" (which may often
3154 be evaluated in arbitrary order). Hence if the operands themselves
3155 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3156 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3157 unset means assuming isochronic (or instantaneous) tree equivalence.
3158 Unless comparing arbitrary expression trees, such as from different
3159 statements, this flag can usually be left unset.
3161 If OEP_PURE_SAME is set, then pure functions with identical arguments
3162 are considered the same. It is used when the caller has other ways
3163 to ensure that global memory is unchanged in between. */
3166 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3168 /* If either is ERROR_MARK, they aren't equal. */
3169 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3170 return 0;
3172 /* Check equality of integer constants before bailing out due to
3173 precision differences. */
3174 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3175 return tree_int_cst_equal (arg0, arg1);
3177 /* If both types don't have the same signedness, then we can't consider
3178 them equal. We must check this before the STRIP_NOPS calls
3179 because they may change the signedness of the arguments. As pointers
3180 strictly don't have a signedness, require either two pointers or
3181 two non-pointers as well. */
3182 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3183 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3184 return 0;
3186 /* We cannot consider pointers to different address space equal. */
3187 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
3188 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3189 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3190 return 0;
3192 /* If both types don't have the same precision, then it is not safe
3193 to strip NOPs. */
3194 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3195 return 0;
3197 STRIP_NOPS (arg0);
3198 STRIP_NOPS (arg1);
3200 /* In case both args are comparisons but with different comparison
3201 code, try to swap the comparison operands of one arg to produce
3202 a match and compare that variant. */
3203 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3204 && COMPARISON_CLASS_P (arg0)
3205 && COMPARISON_CLASS_P (arg1))
3207 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3209 if (TREE_CODE (arg0) == swap_code)
3210 return operand_equal_p (TREE_OPERAND (arg0, 0),
3211 TREE_OPERAND (arg1, 1), flags)
3212 && operand_equal_p (TREE_OPERAND (arg0, 1),
3213 TREE_OPERAND (arg1, 0), flags);
3216 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3217 /* This is needed for conversions and for COMPONENT_REF.
3218 Might as well play it safe and always test this. */
3219 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3220 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3221 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3222 return 0;
3224 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3225 We don't care about side effects in that case because the SAVE_EXPR
3226 takes care of that for us. In all other cases, two expressions are
3227 equal if they have no side effects. If we have two identical
3228 expressions with side effects that should be treated the same due
3229 to the only side effects being identical SAVE_EXPR's, that will
3230 be detected in the recursive calls below. */
3231 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3232 && (TREE_CODE (arg0) == SAVE_EXPR
3233 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3234 return 1;
3236 /* Next handle constant cases, those for which we can return 1 even
3237 if ONLY_CONST is set. */
3238 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3239 switch (TREE_CODE (arg0))
3241 case INTEGER_CST:
3242 return tree_int_cst_equal (arg0, arg1);
3244 case FIXED_CST:
3245 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3246 TREE_FIXED_CST (arg1));
3248 case REAL_CST:
3249 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3250 TREE_REAL_CST (arg1)))
3251 return 1;
3254 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3256 /* If we do not distinguish between signed and unsigned zero,
3257 consider them equal. */
3258 if (real_zerop (arg0) && real_zerop (arg1))
3259 return 1;
3261 return 0;
3263 case VECTOR_CST:
3265 tree v1, v2;
3267 v1 = TREE_VECTOR_CST_ELTS (arg0);
3268 v2 = TREE_VECTOR_CST_ELTS (arg1);
3269 while (v1 && v2)
3271 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3272 flags))
3273 return 0;
3274 v1 = TREE_CHAIN (v1);
3275 v2 = TREE_CHAIN (v2);
3278 return v1 == v2;
3281 case COMPLEX_CST:
3282 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3283 flags)
3284 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3285 flags));
3287 case STRING_CST:
3288 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3289 && ! memcmp (TREE_STRING_POINTER (arg0),
3290 TREE_STRING_POINTER (arg1),
3291 TREE_STRING_LENGTH (arg0)));
3293 case ADDR_EXPR:
3294 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3296 default:
3297 break;
3300 if (flags & OEP_ONLY_CONST)
3301 return 0;
3303 /* Define macros to test an operand from arg0 and arg1 for equality and a
3304 variant that allows null and views null as being different from any
3305 non-null value. In the latter case, if either is null, the both
3306 must be; otherwise, do the normal comparison. */
3307 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3308 TREE_OPERAND (arg1, N), flags)
3310 #define OP_SAME_WITH_NULL(N) \
3311 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3312 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3314 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3316 case tcc_unary:
3317 /* Two conversions are equal only if signedness and modes match. */
3318 switch (TREE_CODE (arg0))
3320 CASE_CONVERT:
3321 case FIX_TRUNC_EXPR:
3322 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3323 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3324 return 0;
3325 break;
3326 default:
3327 break;
3330 return OP_SAME (0);
3333 case tcc_comparison:
3334 case tcc_binary:
3335 if (OP_SAME (0) && OP_SAME (1))
3336 return 1;
3338 /* For commutative ops, allow the other order. */
3339 return (commutative_tree_code (TREE_CODE (arg0))
3340 && operand_equal_p (TREE_OPERAND (arg0, 0),
3341 TREE_OPERAND (arg1, 1), flags)
3342 && operand_equal_p (TREE_OPERAND (arg0, 1),
3343 TREE_OPERAND (arg1, 0), flags));
3345 case tcc_reference:
3346 /* If either of the pointer (or reference) expressions we are
3347 dereferencing contain a side effect, these cannot be equal. */
3348 if (TREE_SIDE_EFFECTS (arg0)
3349 || TREE_SIDE_EFFECTS (arg1))
3350 return 0;
3352 switch (TREE_CODE (arg0))
3354 case INDIRECT_REF:
3355 case ALIGN_INDIRECT_REF:
3356 case MISALIGNED_INDIRECT_REF:
3357 case REALPART_EXPR:
3358 case IMAGPART_EXPR:
3359 return OP_SAME (0);
3361 case ARRAY_REF:
3362 case ARRAY_RANGE_REF:
3363 /* Operands 2 and 3 may be null.
3364 Compare the array index by value if it is constant first as we
3365 may have different types but same value here. */
3366 return (OP_SAME (0)
3367 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3368 TREE_OPERAND (arg1, 1))
3369 || OP_SAME (1))
3370 && OP_SAME_WITH_NULL (2)
3371 && OP_SAME_WITH_NULL (3));
3373 case COMPONENT_REF:
3374 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3375 may be NULL when we're called to compare MEM_EXPRs. */
3376 return OP_SAME_WITH_NULL (0)
3377 && OP_SAME (1)
3378 && OP_SAME_WITH_NULL (2);
3380 case BIT_FIELD_REF:
3381 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3383 default:
3384 return 0;
3387 case tcc_expression:
3388 switch (TREE_CODE (arg0))
3390 case ADDR_EXPR:
3391 case TRUTH_NOT_EXPR:
3392 return OP_SAME (0);
3394 case TRUTH_ANDIF_EXPR:
3395 case TRUTH_ORIF_EXPR:
3396 return OP_SAME (0) && OP_SAME (1);
3398 case TRUTH_AND_EXPR:
3399 case TRUTH_OR_EXPR:
3400 case TRUTH_XOR_EXPR:
3401 if (OP_SAME (0) && OP_SAME (1))
3402 return 1;
3404 /* Otherwise take into account this is a commutative operation. */
3405 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3406 TREE_OPERAND (arg1, 1), flags)
3407 && operand_equal_p (TREE_OPERAND (arg0, 1),
3408 TREE_OPERAND (arg1, 0), flags));
3410 case COND_EXPR:
3411 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3413 default:
3414 return 0;
3417 case tcc_vl_exp:
3418 switch (TREE_CODE (arg0))
3420 case CALL_EXPR:
3421 /* If the CALL_EXPRs call different functions, then they
3422 clearly can not be equal. */
3423 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3424 flags))
3425 return 0;
3428 unsigned int cef = call_expr_flags (arg0);
3429 if (flags & OEP_PURE_SAME)
3430 cef &= ECF_CONST | ECF_PURE;
3431 else
3432 cef &= ECF_CONST;
3433 if (!cef)
3434 return 0;
3437 /* Now see if all the arguments are the same. */
3439 const_call_expr_arg_iterator iter0, iter1;
3440 const_tree a0, a1;
3441 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3442 a1 = first_const_call_expr_arg (arg1, &iter1);
3443 a0 && a1;
3444 a0 = next_const_call_expr_arg (&iter0),
3445 a1 = next_const_call_expr_arg (&iter1))
3446 if (! operand_equal_p (a0, a1, flags))
3447 return 0;
3449 /* If we get here and both argument lists are exhausted
3450 then the CALL_EXPRs are equal. */
3451 return ! (a0 || a1);
3453 default:
3454 return 0;
3457 case tcc_declaration:
3458 /* Consider __builtin_sqrt equal to sqrt. */
3459 return (TREE_CODE (arg0) == FUNCTION_DECL
3460 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3461 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3462 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3464 default:
3465 return 0;
3468 #undef OP_SAME
3469 #undef OP_SAME_WITH_NULL
3472 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3473 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3475 When in doubt, return 0. */
3477 static int
3478 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3480 int unsignedp1, unsignedpo;
3481 tree primarg0, primarg1, primother;
3482 unsigned int correct_width;
3484 if (operand_equal_p (arg0, arg1, 0))
3485 return 1;
3487 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3488 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3489 return 0;
3491 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3492 and see if the inner values are the same. This removes any
3493 signedness comparison, which doesn't matter here. */
3494 primarg0 = arg0, primarg1 = arg1;
3495 STRIP_NOPS (primarg0);
3496 STRIP_NOPS (primarg1);
3497 if (operand_equal_p (primarg0, primarg1, 0))
3498 return 1;
3500 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3501 actual comparison operand, ARG0.
3503 First throw away any conversions to wider types
3504 already present in the operands. */
3506 primarg1 = get_narrower (arg1, &unsignedp1);
3507 primother = get_narrower (other, &unsignedpo);
3509 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3510 if (unsignedp1 == unsignedpo
3511 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3512 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3514 tree type = TREE_TYPE (arg0);
3516 /* Make sure shorter operand is extended the right way
3517 to match the longer operand. */
3518 primarg1 = fold_convert (signed_or_unsigned_type_for
3519 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3521 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3522 return 1;
3525 return 0;
3528 /* See if ARG is an expression that is either a comparison or is performing
3529 arithmetic on comparisons. The comparisons must only be comparing
3530 two different values, which will be stored in *CVAL1 and *CVAL2; if
3531 they are nonzero it means that some operands have already been found.
3532 No variables may be used anywhere else in the expression except in the
3533 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3534 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3536 If this is true, return 1. Otherwise, return zero. */
3538 static int
3539 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3541 enum tree_code code = TREE_CODE (arg);
3542 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3544 /* We can handle some of the tcc_expression cases here. */
3545 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3546 tclass = tcc_unary;
3547 else if (tclass == tcc_expression
3548 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3549 || code == COMPOUND_EXPR))
3550 tclass = tcc_binary;
3552 else if (tclass == tcc_expression && code == SAVE_EXPR
3553 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3555 /* If we've already found a CVAL1 or CVAL2, this expression is
3556 two complex to handle. */
3557 if (*cval1 || *cval2)
3558 return 0;
3560 tclass = tcc_unary;
3561 *save_p = 1;
3564 switch (tclass)
3566 case tcc_unary:
3567 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3569 case tcc_binary:
3570 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3571 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3572 cval1, cval2, save_p));
3574 case tcc_constant:
3575 return 1;
3577 case tcc_expression:
3578 if (code == COND_EXPR)
3579 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3580 cval1, cval2, save_p)
3581 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3582 cval1, cval2, save_p)
3583 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3584 cval1, cval2, save_p));
3585 return 0;
3587 case tcc_comparison:
3588 /* First see if we can handle the first operand, then the second. For
3589 the second operand, we know *CVAL1 can't be zero. It must be that
3590 one side of the comparison is each of the values; test for the
3591 case where this isn't true by failing if the two operands
3592 are the same. */
3594 if (operand_equal_p (TREE_OPERAND (arg, 0),
3595 TREE_OPERAND (arg, 1), 0))
3596 return 0;
3598 if (*cval1 == 0)
3599 *cval1 = TREE_OPERAND (arg, 0);
3600 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3602 else if (*cval2 == 0)
3603 *cval2 = TREE_OPERAND (arg, 0);
3604 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3606 else
3607 return 0;
3609 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3611 else if (*cval2 == 0)
3612 *cval2 = TREE_OPERAND (arg, 1);
3613 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3615 else
3616 return 0;
3618 return 1;
3620 default:
3621 return 0;
3625 /* ARG is a tree that is known to contain just arithmetic operations and
3626 comparisons. Evaluate the operations in the tree substituting NEW0 for
3627 any occurrence of OLD0 as an operand of a comparison and likewise for
3628 NEW1 and OLD1. */
3630 static tree
3631 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3632 tree old1, tree new1)
3634 tree type = TREE_TYPE (arg);
3635 enum tree_code code = TREE_CODE (arg);
3636 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3638 /* We can handle some of the tcc_expression cases here. */
3639 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3640 tclass = tcc_unary;
3641 else if (tclass == tcc_expression
3642 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3643 tclass = tcc_binary;
3645 switch (tclass)
3647 case tcc_unary:
3648 return fold_build1_loc (loc, code, type,
3649 eval_subst (loc, TREE_OPERAND (arg, 0),
3650 old0, new0, old1, new1));
3652 case tcc_binary:
3653 return fold_build2_loc (loc, code, type,
3654 eval_subst (loc, TREE_OPERAND (arg, 0),
3655 old0, new0, old1, new1),
3656 eval_subst (loc, TREE_OPERAND (arg, 1),
3657 old0, new0, old1, new1));
3659 case tcc_expression:
3660 switch (code)
3662 case SAVE_EXPR:
3663 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3664 old1, new1);
3666 case COMPOUND_EXPR:
3667 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3668 old1, new1);
3670 case COND_EXPR:
3671 return fold_build3_loc (loc, code, type,
3672 eval_subst (loc, TREE_OPERAND (arg, 0),
3673 old0, new0, old1, new1),
3674 eval_subst (loc, TREE_OPERAND (arg, 1),
3675 old0, new0, old1, new1),
3676 eval_subst (loc, TREE_OPERAND (arg, 2),
3677 old0, new0, old1, new1));
3678 default:
3679 break;
3681 /* Fall through - ??? */
3683 case tcc_comparison:
3685 tree arg0 = TREE_OPERAND (arg, 0);
3686 tree arg1 = TREE_OPERAND (arg, 1);
3688 /* We need to check both for exact equality and tree equality. The
3689 former will be true if the operand has a side-effect. In that
3690 case, we know the operand occurred exactly once. */
3692 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3693 arg0 = new0;
3694 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3695 arg0 = new1;
3697 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3698 arg1 = new0;
3699 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3700 arg1 = new1;
3702 return fold_build2_loc (loc, code, type, arg0, arg1);
3705 default:
3706 return arg;
3710 /* Return a tree for the case when the result of an expression is RESULT
3711 converted to TYPE and OMITTED was previously an operand of the expression
3712 but is now not needed (e.g., we folded OMITTED * 0).
3714 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3715 the conversion of RESULT to TYPE. */
3717 tree
3718 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3720 tree t = fold_convert_loc (loc, type, result);
3722 /* If the resulting operand is an empty statement, just return the omitted
3723 statement casted to void. */
3724 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3726 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3727 goto omit_one_operand_exit;
3730 if (TREE_SIDE_EFFECTS (omitted))
3732 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3733 goto omit_one_operand_exit;
3736 return non_lvalue_loc (loc, t);
3738 omit_one_operand_exit:
3739 protected_set_expr_location (t, loc);
3740 return t;
3743 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3745 static tree
3746 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3747 tree omitted)
3749 tree t = fold_convert_loc (loc, type, result);
3751 /* If the resulting operand is an empty statement, just return the omitted
3752 statement casted to void. */
3753 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3755 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3756 goto pedantic_omit_one_operand_exit;
3759 if (TREE_SIDE_EFFECTS (omitted))
3761 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3762 goto pedantic_omit_one_operand_exit;
3765 return pedantic_non_lvalue_loc (loc, t);
3767 pedantic_omit_one_operand_exit:
3768 protected_set_expr_location (t, loc);
3769 return t;
3772 /* Return a tree for the case when the result of an expression is RESULT
3773 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3774 of the expression but are now not needed.
3776 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3777 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3778 evaluated before OMITTED2. Otherwise, if neither has side effects,
3779 just do the conversion of RESULT to TYPE. */
3781 tree
3782 omit_two_operands_loc (location_t loc, tree type, tree result,
3783 tree omitted1, tree omitted2)
3785 tree t = fold_convert_loc (loc, type, result);
3787 if (TREE_SIDE_EFFECTS (omitted2))
3789 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3790 SET_EXPR_LOCATION (t, loc);
3792 if (TREE_SIDE_EFFECTS (omitted1))
3794 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3795 SET_EXPR_LOCATION (t, loc);
3798 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3802 /* Return a simplified tree node for the truth-negation of ARG. This
3803 never alters ARG itself. We assume that ARG is an operation that
3804 returns a truth value (0 or 1).
3806 FIXME: one would think we would fold the result, but it causes
3807 problems with the dominator optimizer. */
3809 tree
3810 fold_truth_not_expr (location_t loc, tree arg)
3812 tree t, type = TREE_TYPE (arg);
3813 enum tree_code code = TREE_CODE (arg);
3814 location_t loc1, loc2;
3816 /* If this is a comparison, we can simply invert it, except for
3817 floating-point non-equality comparisons, in which case we just
3818 enclose a TRUTH_NOT_EXPR around what we have. */
3820 if (TREE_CODE_CLASS (code) == tcc_comparison)
3822 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3823 if (FLOAT_TYPE_P (op_type)
3824 && flag_trapping_math
3825 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3826 && code != NE_EXPR && code != EQ_EXPR)
3827 return NULL_TREE;
3829 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3830 if (code == ERROR_MARK)
3831 return NULL_TREE;
3833 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3834 SET_EXPR_LOCATION (t, loc);
3835 return t;
3838 switch (code)
3840 case INTEGER_CST:
3841 return constant_boolean_node (integer_zerop (arg), type);
3843 case TRUTH_AND_EXPR:
3844 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3845 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3846 if (loc1 == UNKNOWN_LOCATION)
3847 loc1 = loc;
3848 if (loc2 == UNKNOWN_LOCATION)
3849 loc2 = loc;
3850 t = build2 (TRUTH_OR_EXPR, type,
3851 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3852 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3853 break;
3855 case TRUTH_OR_EXPR:
3856 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3857 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3858 if (loc1 == UNKNOWN_LOCATION)
3859 loc1 = loc;
3860 if (loc2 == UNKNOWN_LOCATION)
3861 loc2 = loc;
3862 t = build2 (TRUTH_AND_EXPR, type,
3863 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3864 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3865 break;
3867 case TRUTH_XOR_EXPR:
3868 /* Here we can invert either operand. We invert the first operand
3869 unless the second operand is a TRUTH_NOT_EXPR in which case our
3870 result is the XOR of the first operand with the inside of the
3871 negation of the second operand. */
3873 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3874 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3875 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3876 else
3877 t = build2 (TRUTH_XOR_EXPR, type,
3878 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3879 TREE_OPERAND (arg, 1));
3880 break;
3882 case TRUTH_ANDIF_EXPR:
3883 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3884 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3885 if (loc1 == UNKNOWN_LOCATION)
3886 loc1 = loc;
3887 if (loc2 == UNKNOWN_LOCATION)
3888 loc2 = loc;
3889 t = build2 (TRUTH_ORIF_EXPR, type,
3890 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3891 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3892 break;
3894 case TRUTH_ORIF_EXPR:
3895 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3896 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3897 if (loc1 == UNKNOWN_LOCATION)
3898 loc1 = loc;
3899 if (loc2 == UNKNOWN_LOCATION)
3900 loc2 = loc;
3901 t = build2 (TRUTH_ANDIF_EXPR, type,
3902 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3903 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3904 break;
3906 case TRUTH_NOT_EXPR:
3907 return TREE_OPERAND (arg, 0);
3909 case COND_EXPR:
3911 tree arg1 = TREE_OPERAND (arg, 1);
3912 tree arg2 = TREE_OPERAND (arg, 2);
3914 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3915 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3916 if (loc1 == UNKNOWN_LOCATION)
3917 loc1 = loc;
3918 if (loc2 == UNKNOWN_LOCATION)
3919 loc2 = loc;
3921 /* A COND_EXPR may have a throw as one operand, which
3922 then has void type. Just leave void operands
3923 as they are. */
3924 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3925 VOID_TYPE_P (TREE_TYPE (arg1))
3926 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3927 VOID_TYPE_P (TREE_TYPE (arg2))
3928 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3929 break;
3932 case COMPOUND_EXPR:
3933 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3934 if (loc1 == UNKNOWN_LOCATION)
3935 loc1 = loc;
3936 t = build2 (COMPOUND_EXPR, type,
3937 TREE_OPERAND (arg, 0),
3938 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3939 break;
3941 case NON_LVALUE_EXPR:
3942 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3943 if (loc1 == UNKNOWN_LOCATION)
3944 loc1 = loc;
3945 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3947 CASE_CONVERT:
3948 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3950 t = build1 (TRUTH_NOT_EXPR, type, arg);
3951 break;
3954 /* ... fall through ... */
3956 case FLOAT_EXPR:
3957 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3958 if (loc1 == UNKNOWN_LOCATION)
3959 loc1 = loc;
3960 t = build1 (TREE_CODE (arg), type,
3961 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3962 break;
3964 case BIT_AND_EXPR:
3965 if (!integer_onep (TREE_OPERAND (arg, 1)))
3966 return NULL_TREE;
3967 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3968 break;
3970 case SAVE_EXPR:
3971 t = build1 (TRUTH_NOT_EXPR, type, arg);
3972 break;
3974 case CLEANUP_POINT_EXPR:
3975 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3976 if (loc1 == UNKNOWN_LOCATION)
3977 loc1 = loc;
3978 t = build1 (CLEANUP_POINT_EXPR, type,
3979 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3980 break;
3982 default:
3983 t = NULL_TREE;
3984 break;
3987 if (t)
3988 SET_EXPR_LOCATION (t, loc);
3990 return t;
3993 /* Return a simplified tree node for the truth-negation of ARG. This
3994 never alters ARG itself. We assume that ARG is an operation that
3995 returns a truth value (0 or 1).
3997 FIXME: one would think we would fold the result, but it causes
3998 problems with the dominator optimizer. */
4000 tree
4001 invert_truthvalue_loc (location_t loc, tree arg)
4003 tree tem;
4005 if (TREE_CODE (arg) == ERROR_MARK)
4006 return arg;
4008 tem = fold_truth_not_expr (loc, arg);
4009 if (!tem)
4011 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
4012 SET_EXPR_LOCATION (tem, loc);
4015 return tem;
4018 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
4019 operands are another bit-wise operation with a common input. If so,
4020 distribute the bit operations to save an operation and possibly two if
4021 constants are involved. For example, convert
4022 (A | B) & (A | C) into A | (B & C)
4023 Further simplification will occur if B and C are constants.
4025 If this optimization cannot be done, 0 will be returned. */
4027 static tree
4028 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
4029 tree arg0, tree arg1)
4031 tree common;
4032 tree left, right;
4034 if (TREE_CODE (arg0) != TREE_CODE (arg1)
4035 || TREE_CODE (arg0) == code
4036 || (TREE_CODE (arg0) != BIT_AND_EXPR
4037 && TREE_CODE (arg0) != BIT_IOR_EXPR))
4038 return 0;
4040 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
4042 common = TREE_OPERAND (arg0, 0);
4043 left = TREE_OPERAND (arg0, 1);
4044 right = TREE_OPERAND (arg1, 1);
4046 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
4048 common = TREE_OPERAND (arg0, 0);
4049 left = TREE_OPERAND (arg0, 1);
4050 right = TREE_OPERAND (arg1, 0);
4052 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
4054 common = TREE_OPERAND (arg0, 1);
4055 left = TREE_OPERAND (arg0, 0);
4056 right = TREE_OPERAND (arg1, 1);
4058 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
4060 common = TREE_OPERAND (arg0, 1);
4061 left = TREE_OPERAND (arg0, 0);
4062 right = TREE_OPERAND (arg1, 0);
4064 else
4065 return 0;
4067 common = fold_convert_loc (loc, type, common);
4068 left = fold_convert_loc (loc, type, left);
4069 right = fold_convert_loc (loc, type, right);
4070 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
4071 fold_build2_loc (loc, code, type, left, right));
4074 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
4075 with code CODE. This optimization is unsafe. */
4076 static tree
4077 distribute_real_division (location_t loc, enum tree_code code, tree type,
4078 tree arg0, tree arg1)
4080 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
4081 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
4083 /* (A / C) +- (B / C) -> (A +- B) / C. */
4084 if (mul0 == mul1
4085 && operand_equal_p (TREE_OPERAND (arg0, 1),
4086 TREE_OPERAND (arg1, 1), 0))
4087 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
4088 fold_build2_loc (loc, code, type,
4089 TREE_OPERAND (arg0, 0),
4090 TREE_OPERAND (arg1, 0)),
4091 TREE_OPERAND (arg0, 1));
4093 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
4094 if (operand_equal_p (TREE_OPERAND (arg0, 0),
4095 TREE_OPERAND (arg1, 0), 0)
4096 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
4097 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
4099 REAL_VALUE_TYPE r0, r1;
4100 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
4101 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
4102 if (!mul0)
4103 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
4104 if (!mul1)
4105 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
4106 real_arithmetic (&r0, code, &r0, &r1);
4107 return fold_build2_loc (loc, MULT_EXPR, type,
4108 TREE_OPERAND (arg0, 0),
4109 build_real (type, r0));
4112 return NULL_TREE;
4115 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4116 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
4118 static tree
4119 make_bit_field_ref (location_t loc, tree inner, tree type,
4120 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
4122 tree result, bftype;
4124 if (bitpos == 0)
4126 tree size = TYPE_SIZE (TREE_TYPE (inner));
4127 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4128 || POINTER_TYPE_P (TREE_TYPE (inner)))
4129 && host_integerp (size, 0)
4130 && tree_low_cst (size, 0) == bitsize)
4131 return fold_convert_loc (loc, type, inner);
4134 bftype = type;
4135 if (TYPE_PRECISION (bftype) != bitsize
4136 || TYPE_UNSIGNED (bftype) == !unsignedp)
4137 bftype = build_nonstandard_integer_type (bitsize, 0);
4139 result = build3 (BIT_FIELD_REF, bftype, inner,
4140 size_int (bitsize), bitsize_int (bitpos));
4141 SET_EXPR_LOCATION (result, loc);
4143 if (bftype != type)
4144 result = fold_convert_loc (loc, type, result);
4146 return result;
4149 /* Optimize a bit-field compare.
4151 There are two cases: First is a compare against a constant and the
4152 second is a comparison of two items where the fields are at the same
4153 bit position relative to the start of a chunk (byte, halfword, word)
4154 large enough to contain it. In these cases we can avoid the shift
4155 implicit in bitfield extractions.
4157 For constants, we emit a compare of the shifted constant with the
4158 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4159 compared. For two fields at the same position, we do the ANDs with the
4160 similar mask and compare the result of the ANDs.
4162 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4163 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4164 are the left and right operands of the comparison, respectively.
4166 If the optimization described above can be done, we return the resulting
4167 tree. Otherwise we return zero. */
4169 static tree
4170 optimize_bit_field_compare (location_t loc, enum tree_code code,
4171 tree compare_type, tree lhs, tree rhs)
4173 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
4174 tree type = TREE_TYPE (lhs);
4175 tree signed_type, unsigned_type;
4176 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4177 enum machine_mode lmode, rmode, nmode;
4178 int lunsignedp, runsignedp;
4179 int lvolatilep = 0, rvolatilep = 0;
4180 tree linner, rinner = NULL_TREE;
4181 tree mask;
4182 tree offset;
4184 /* Get all the information about the extractions being done. If the bit size
4185 if the same as the size of the underlying object, we aren't doing an
4186 extraction at all and so can do nothing. We also don't want to
4187 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4188 then will no longer be able to replace it. */
4189 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
4190 &lunsignedp, &lvolatilep, false);
4191 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
4192 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
4193 return 0;
4195 if (!const_p)
4197 /* If this is not a constant, we can only do something if bit positions,
4198 sizes, and signedness are the same. */
4199 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4200 &runsignedp, &rvolatilep, false);
4202 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
4203 || lunsignedp != runsignedp || offset != 0
4204 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
4205 return 0;
4208 /* See if we can find a mode to refer to this field. We should be able to,
4209 but fail if we can't. */
4210 nmode = get_best_mode (lbitsize, lbitpos,
4211 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4212 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4213 TYPE_ALIGN (TREE_TYPE (rinner))),
4214 word_mode, lvolatilep || rvolatilep);
4215 if (nmode == VOIDmode)
4216 return 0;
4218 /* Set signed and unsigned types of the precision of this mode for the
4219 shifts below. */
4220 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
4221 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4223 /* Compute the bit position and size for the new reference and our offset
4224 within it. If the new reference is the same size as the original, we
4225 won't optimize anything, so return zero. */
4226 nbitsize = GET_MODE_BITSIZE (nmode);
4227 nbitpos = lbitpos & ~ (nbitsize - 1);
4228 lbitpos -= nbitpos;
4229 if (nbitsize == lbitsize)
4230 return 0;
4232 if (BYTES_BIG_ENDIAN)
4233 lbitpos = nbitsize - lbitsize - lbitpos;
4235 /* Make the mask to be used against the extracted field. */
4236 mask = build_int_cst_type (unsigned_type, -1);
4237 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
4238 mask = const_binop (RSHIFT_EXPR, mask,
4239 size_int (nbitsize - lbitsize - lbitpos), 0);
4241 if (! const_p)
4242 /* If not comparing with constant, just rework the comparison
4243 and return. */
4244 return fold_build2_loc (loc, code, compare_type,
4245 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4246 make_bit_field_ref (loc, linner,
4247 unsigned_type,
4248 nbitsize, nbitpos,
4250 mask),
4251 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4252 make_bit_field_ref (loc, rinner,
4253 unsigned_type,
4254 nbitsize, nbitpos,
4256 mask));
4258 /* Otherwise, we are handling the constant case. See if the constant is too
4259 big for the field. Warn and return a tree of for 0 (false) if so. We do
4260 this not only for its own sake, but to avoid having to test for this
4261 error case below. If we didn't, we might generate wrong code.
4263 For unsigned fields, the constant shifted right by the field length should
4264 be all zero. For signed fields, the high-order bits should agree with
4265 the sign bit. */
4267 if (lunsignedp)
4269 if (! integer_zerop (const_binop (RSHIFT_EXPR,
4270 fold_convert_loc (loc,
4271 unsigned_type, rhs),
4272 size_int (lbitsize), 0)))
4274 warning (0, "comparison is always %d due to width of bit-field",
4275 code == NE_EXPR);
4276 return constant_boolean_node (code == NE_EXPR, compare_type);
4279 else
4281 tree tem = const_binop (RSHIFT_EXPR,
4282 fold_convert_loc (loc, signed_type, rhs),
4283 size_int (lbitsize - 1), 0);
4284 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4286 warning (0, "comparison is always %d due to width of bit-field",
4287 code == NE_EXPR);
4288 return constant_boolean_node (code == NE_EXPR, compare_type);
4292 /* Single-bit compares should always be against zero. */
4293 if (lbitsize == 1 && ! integer_zerop (rhs))
4295 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4296 rhs = build_int_cst (type, 0);
4299 /* Make a new bitfield reference, shift the constant over the
4300 appropriate number of bits and mask it with the computed mask
4301 (in case this was a signed field). If we changed it, make a new one. */
4302 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
4303 if (lvolatilep)
4305 TREE_SIDE_EFFECTS (lhs) = 1;
4306 TREE_THIS_VOLATILE (lhs) = 1;
4309 rhs = const_binop (BIT_AND_EXPR,
4310 const_binop (LSHIFT_EXPR,
4311 fold_convert_loc (loc, unsigned_type, rhs),
4312 size_int (lbitpos), 0),
4313 mask, 0);
4315 lhs = build2 (code, compare_type,
4316 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4317 rhs);
4318 SET_EXPR_LOCATION (lhs, loc);
4319 return lhs;
4322 /* Subroutine for fold_truthop: decode a field reference.
4324 If EXP is a comparison reference, we return the innermost reference.
4326 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4327 set to the starting bit number.
4329 If the innermost field can be completely contained in a mode-sized
4330 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4332 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4333 otherwise it is not changed.
4335 *PUNSIGNEDP is set to the signedness of the field.
4337 *PMASK is set to the mask used. This is either contained in a
4338 BIT_AND_EXPR or derived from the width of the field.
4340 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4342 Return 0 if this is not a component reference or is one that we can't
4343 do anything with. */
4345 static tree
4346 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
4347 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4348 int *punsignedp, int *pvolatilep,
4349 tree *pmask, tree *pand_mask)
4351 tree outer_type = 0;
4352 tree and_mask = 0;
4353 tree mask, inner, offset;
4354 tree unsigned_type;
4355 unsigned int precision;
4357 /* All the optimizations using this function assume integer fields.
4358 There are problems with FP fields since the type_for_size call
4359 below can fail for, e.g., XFmode. */
4360 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4361 return 0;
4363 /* We are interested in the bare arrangement of bits, so strip everything
4364 that doesn't affect the machine mode. However, record the type of the
4365 outermost expression if it may matter below. */
4366 if (CONVERT_EXPR_P (exp)
4367 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4368 outer_type = TREE_TYPE (exp);
4369 STRIP_NOPS (exp);
4371 if (TREE_CODE (exp) == BIT_AND_EXPR)
4373 and_mask = TREE_OPERAND (exp, 1);
4374 exp = TREE_OPERAND (exp, 0);
4375 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4376 if (TREE_CODE (and_mask) != INTEGER_CST)
4377 return 0;
4380 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4381 punsignedp, pvolatilep, false);
4382 if ((inner == exp && and_mask == 0)
4383 || *pbitsize < 0 || offset != 0
4384 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4385 return 0;
4387 /* If the number of bits in the reference is the same as the bitsize of
4388 the outer type, then the outer type gives the signedness. Otherwise
4389 (in case of a small bitfield) the signedness is unchanged. */
4390 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4391 *punsignedp = TYPE_UNSIGNED (outer_type);
4393 /* Compute the mask to access the bitfield. */
4394 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4395 precision = TYPE_PRECISION (unsigned_type);
4397 mask = build_int_cst_type (unsigned_type, -1);
4399 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4400 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4402 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4403 if (and_mask != 0)
4404 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4405 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4407 *pmask = mask;
4408 *pand_mask = and_mask;
4409 return inner;
4412 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4413 bit positions. */
4415 static int
4416 all_ones_mask_p (const_tree mask, int size)
4418 tree type = TREE_TYPE (mask);
4419 unsigned int precision = TYPE_PRECISION (type);
4420 tree tmask;
4422 tmask = build_int_cst_type (signed_type_for (type), -1);
4424 return
4425 tree_int_cst_equal (mask,
4426 const_binop (RSHIFT_EXPR,
4427 const_binop (LSHIFT_EXPR, tmask,
4428 size_int (precision - size),
4430 size_int (precision - size), 0));
4433 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4434 represents the sign bit of EXP's type. If EXP represents a sign
4435 or zero extension, also test VAL against the unextended type.
4436 The return value is the (sub)expression whose sign bit is VAL,
4437 or NULL_TREE otherwise. */
4439 static tree
4440 sign_bit_p (tree exp, const_tree val)
4442 unsigned HOST_WIDE_INT mask_lo, lo;
4443 HOST_WIDE_INT mask_hi, hi;
4444 int width;
4445 tree t;
4447 /* Tree EXP must have an integral type. */
4448 t = TREE_TYPE (exp);
4449 if (! INTEGRAL_TYPE_P (t))
4450 return NULL_TREE;
4452 /* Tree VAL must be an integer constant. */
4453 if (TREE_CODE (val) != INTEGER_CST
4454 || TREE_OVERFLOW (val))
4455 return NULL_TREE;
4457 width = TYPE_PRECISION (t);
4458 if (width > HOST_BITS_PER_WIDE_INT)
4460 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4461 lo = 0;
4463 mask_hi = ((unsigned HOST_WIDE_INT) -1
4464 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4465 mask_lo = -1;
4467 else
4469 hi = 0;
4470 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4472 mask_hi = 0;
4473 mask_lo = ((unsigned HOST_WIDE_INT) -1
4474 >> (HOST_BITS_PER_WIDE_INT - width));
4477 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4478 treat VAL as if it were unsigned. */
4479 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4480 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4481 return exp;
4483 /* Handle extension from a narrower type. */
4484 if (TREE_CODE (exp) == NOP_EXPR
4485 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4486 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4488 return NULL_TREE;
4491 /* Subroutine for fold_truthop: determine if an operand is simple enough
4492 to be evaluated unconditionally. */
4494 static int
4495 simple_operand_p (const_tree exp)
4497 /* Strip any conversions that don't change the machine mode. */
4498 STRIP_NOPS (exp);
4500 return (CONSTANT_CLASS_P (exp)
4501 || TREE_CODE (exp) == SSA_NAME
4502 || (DECL_P (exp)
4503 && ! TREE_ADDRESSABLE (exp)
4504 && ! TREE_THIS_VOLATILE (exp)
4505 && ! DECL_NONLOCAL (exp)
4506 /* Don't regard global variables as simple. They may be
4507 allocated in ways unknown to the compiler (shared memory,
4508 #pragma weak, etc). */
4509 && ! TREE_PUBLIC (exp)
4510 && ! DECL_EXTERNAL (exp)
4511 /* Loading a static variable is unduly expensive, but global
4512 registers aren't expensive. */
4513 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4516 /* The following functions are subroutines to fold_range_test and allow it to
4517 try to change a logical combination of comparisons into a range test.
4519 For example, both
4520 X == 2 || X == 3 || X == 4 || X == 5
4522 X >= 2 && X <= 5
4523 are converted to
4524 (unsigned) (X - 2) <= 3
4526 We describe each set of comparisons as being either inside or outside
4527 a range, using a variable named like IN_P, and then describe the
4528 range with a lower and upper bound. If one of the bounds is omitted,
4529 it represents either the highest or lowest value of the type.
4531 In the comments below, we represent a range by two numbers in brackets
4532 preceded by a "+" to designate being inside that range, or a "-" to
4533 designate being outside that range, so the condition can be inverted by
4534 flipping the prefix. An omitted bound is represented by a "-". For
4535 example, "- [-, 10]" means being outside the range starting at the lowest
4536 possible value and ending at 10, in other words, being greater than 10.
4537 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4538 always false.
4540 We set up things so that the missing bounds are handled in a consistent
4541 manner so neither a missing bound nor "true" and "false" need to be
4542 handled using a special case. */
4544 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4545 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4546 and UPPER1_P are nonzero if the respective argument is an upper bound
4547 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4548 must be specified for a comparison. ARG1 will be converted to ARG0's
4549 type if both are specified. */
4551 static tree
4552 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4553 tree arg1, int upper1_p)
4555 tree tem;
4556 int result;
4557 int sgn0, sgn1;
4559 /* If neither arg represents infinity, do the normal operation.
4560 Else, if not a comparison, return infinity. Else handle the special
4561 comparison rules. Note that most of the cases below won't occur, but
4562 are handled for consistency. */
4564 if (arg0 != 0 && arg1 != 0)
4566 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4567 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4568 STRIP_NOPS (tem);
4569 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4572 if (TREE_CODE_CLASS (code) != tcc_comparison)
4573 return 0;
4575 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4576 for neither. In real maths, we cannot assume open ended ranges are
4577 the same. But, this is computer arithmetic, where numbers are finite.
4578 We can therefore make the transformation of any unbounded range with
4579 the value Z, Z being greater than any representable number. This permits
4580 us to treat unbounded ranges as equal. */
4581 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4582 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4583 switch (code)
4585 case EQ_EXPR:
4586 result = sgn0 == sgn1;
4587 break;
4588 case NE_EXPR:
4589 result = sgn0 != sgn1;
4590 break;
4591 case LT_EXPR:
4592 result = sgn0 < sgn1;
4593 break;
4594 case LE_EXPR:
4595 result = sgn0 <= sgn1;
4596 break;
4597 case GT_EXPR:
4598 result = sgn0 > sgn1;
4599 break;
4600 case GE_EXPR:
4601 result = sgn0 >= sgn1;
4602 break;
4603 default:
4604 gcc_unreachable ();
4607 return constant_boolean_node (result, type);
4610 /* Given EXP, a logical expression, set the range it is testing into
4611 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4612 actually being tested. *PLOW and *PHIGH will be made of the same
4613 type as the returned expression. If EXP is not a comparison, we
4614 will most likely not be returning a useful value and range. Set
4615 *STRICT_OVERFLOW_P to true if the return value is only valid
4616 because signed overflow is undefined; otherwise, do not change
4617 *STRICT_OVERFLOW_P. */
4619 tree
4620 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4621 bool *strict_overflow_p)
4623 enum tree_code code;
4624 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4625 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4626 int in_p, n_in_p;
4627 tree low, high, n_low, n_high;
4628 location_t loc = EXPR_LOCATION (exp);
4630 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4631 and see if we can refine the range. Some of the cases below may not
4632 happen, but it doesn't seem worth worrying about this. We "continue"
4633 the outer loop when we've changed something; otherwise we "break"
4634 the switch, which will "break" the while. */
4636 in_p = 0;
4637 low = high = build_int_cst (TREE_TYPE (exp), 0);
4639 while (1)
4641 code = TREE_CODE (exp);
4642 exp_type = TREE_TYPE (exp);
4644 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4646 if (TREE_OPERAND_LENGTH (exp) > 0)
4647 arg0 = TREE_OPERAND (exp, 0);
4648 if (TREE_CODE_CLASS (code) == tcc_comparison
4649 || TREE_CODE_CLASS (code) == tcc_unary
4650 || TREE_CODE_CLASS (code) == tcc_binary)
4651 arg0_type = TREE_TYPE (arg0);
4652 if (TREE_CODE_CLASS (code) == tcc_binary
4653 || TREE_CODE_CLASS (code) == tcc_comparison
4654 || (TREE_CODE_CLASS (code) == tcc_expression
4655 && TREE_OPERAND_LENGTH (exp) > 1))
4656 arg1 = TREE_OPERAND (exp, 1);
4659 switch (code)
4661 case TRUTH_NOT_EXPR:
4662 in_p = ! in_p, exp = arg0;
4663 continue;
4665 case EQ_EXPR: case NE_EXPR:
4666 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4667 /* We can only do something if the range is testing for zero
4668 and if the second operand is an integer constant. Note that
4669 saying something is "in" the range we make is done by
4670 complementing IN_P since it will set in the initial case of
4671 being not equal to zero; "out" is leaving it alone. */
4672 if (low == 0 || high == 0
4673 || ! integer_zerop (low) || ! integer_zerop (high)
4674 || TREE_CODE (arg1) != INTEGER_CST)
4675 break;
4677 switch (code)
4679 case NE_EXPR: /* - [c, c] */
4680 low = high = arg1;
4681 break;
4682 case EQ_EXPR: /* + [c, c] */
4683 in_p = ! in_p, low = high = arg1;
4684 break;
4685 case GT_EXPR: /* - [-, c] */
4686 low = 0, high = arg1;
4687 break;
4688 case GE_EXPR: /* + [c, -] */
4689 in_p = ! in_p, low = arg1, high = 0;
4690 break;
4691 case LT_EXPR: /* - [c, -] */
4692 low = arg1, high = 0;
4693 break;
4694 case LE_EXPR: /* + [-, c] */
4695 in_p = ! in_p, low = 0, high = arg1;
4696 break;
4697 default:
4698 gcc_unreachable ();
4701 /* If this is an unsigned comparison, we also know that EXP is
4702 greater than or equal to zero. We base the range tests we make
4703 on that fact, so we record it here so we can parse existing
4704 range tests. We test arg0_type since often the return type
4705 of, e.g. EQ_EXPR, is boolean. */
4706 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4708 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4709 in_p, low, high, 1,
4710 build_int_cst (arg0_type, 0),
4711 NULL_TREE))
4712 break;
4714 in_p = n_in_p, low = n_low, high = n_high;
4716 /* If the high bound is missing, but we have a nonzero low
4717 bound, reverse the range so it goes from zero to the low bound
4718 minus 1. */
4719 if (high == 0 && low && ! integer_zerop (low))
4721 in_p = ! in_p;
4722 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4723 integer_one_node, 0);
4724 low = build_int_cst (arg0_type, 0);
4728 exp = arg0;
4729 continue;
4731 case NEGATE_EXPR:
4732 /* (-x) IN [a,b] -> x in [-b, -a] */
4733 n_low = range_binop (MINUS_EXPR, exp_type,
4734 build_int_cst (exp_type, 0),
4735 0, high, 1);
4736 n_high = range_binop (MINUS_EXPR, exp_type,
4737 build_int_cst (exp_type, 0),
4738 0, low, 0);
4739 low = n_low, high = n_high;
4740 exp = arg0;
4741 continue;
4743 case BIT_NOT_EXPR:
4744 /* ~ X -> -X - 1 */
4745 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4746 build_int_cst (exp_type, 1));
4747 SET_EXPR_LOCATION (exp, loc);
4748 continue;
4750 case PLUS_EXPR: case MINUS_EXPR:
4751 if (TREE_CODE (arg1) != INTEGER_CST)
4752 break;
4754 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4755 move a constant to the other side. */
4756 if (!TYPE_UNSIGNED (arg0_type)
4757 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4758 break;
4760 /* If EXP is signed, any overflow in the computation is undefined,
4761 so we don't worry about it so long as our computations on
4762 the bounds don't overflow. For unsigned, overflow is defined
4763 and this is exactly the right thing. */
4764 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4765 arg0_type, low, 0, arg1, 0);
4766 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4767 arg0_type, high, 1, arg1, 0);
4768 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4769 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4770 break;
4772 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4773 *strict_overflow_p = true;
4775 /* Check for an unsigned range which has wrapped around the maximum
4776 value thus making n_high < n_low, and normalize it. */
4777 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4779 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4780 integer_one_node, 0);
4781 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4782 integer_one_node, 0);
4784 /* If the range is of the form +/- [ x+1, x ], we won't
4785 be able to normalize it. But then, it represents the
4786 whole range or the empty set, so make it
4787 +/- [ -, - ]. */
4788 if (tree_int_cst_equal (n_low, low)
4789 && tree_int_cst_equal (n_high, high))
4790 low = high = 0;
4791 else
4792 in_p = ! in_p;
4794 else
4795 low = n_low, high = n_high;
4797 exp = arg0;
4798 continue;
4800 CASE_CONVERT: case NON_LVALUE_EXPR:
4801 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4802 break;
4804 if (! INTEGRAL_TYPE_P (arg0_type)
4805 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4806 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4807 break;
4809 n_low = low, n_high = high;
4811 if (n_low != 0)
4812 n_low = fold_convert_loc (loc, arg0_type, n_low);
4814 if (n_high != 0)
4815 n_high = fold_convert_loc (loc, arg0_type, n_high);
4818 /* If we're converting arg0 from an unsigned type, to exp,
4819 a signed type, we will be doing the comparison as unsigned.
4820 The tests above have already verified that LOW and HIGH
4821 are both positive.
4823 So we have to ensure that we will handle large unsigned
4824 values the same way that the current signed bounds treat
4825 negative values. */
4827 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4829 tree high_positive;
4830 tree equiv_type;
4831 /* For fixed-point modes, we need to pass the saturating flag
4832 as the 2nd parameter. */
4833 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4834 equiv_type = lang_hooks.types.type_for_mode
4835 (TYPE_MODE (arg0_type),
4836 TYPE_SATURATING (arg0_type));
4837 else
4838 equiv_type = lang_hooks.types.type_for_mode
4839 (TYPE_MODE (arg0_type), 1);
4841 /* A range without an upper bound is, naturally, unbounded.
4842 Since convert would have cropped a very large value, use
4843 the max value for the destination type. */
4844 high_positive
4845 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4846 : TYPE_MAX_VALUE (arg0_type);
4848 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4849 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4850 fold_convert_loc (loc, arg0_type,
4851 high_positive),
4852 build_int_cst (arg0_type, 1));
4854 /* If the low bound is specified, "and" the range with the
4855 range for which the original unsigned value will be
4856 positive. */
4857 if (low != 0)
4859 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4860 1, n_low, n_high, 1,
4861 fold_convert_loc (loc, arg0_type,
4862 integer_zero_node),
4863 high_positive))
4864 break;
4866 in_p = (n_in_p == in_p);
4868 else
4870 /* Otherwise, "or" the range with the range of the input
4871 that will be interpreted as negative. */
4872 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4873 0, n_low, n_high, 1,
4874 fold_convert_loc (loc, arg0_type,
4875 integer_zero_node),
4876 high_positive))
4877 break;
4879 in_p = (in_p != n_in_p);
4883 exp = arg0;
4884 low = n_low, high = n_high;
4885 continue;
4887 default:
4888 break;
4891 break;
4894 /* If EXP is a constant, we can evaluate whether this is true or false. */
4895 if (TREE_CODE (exp) == INTEGER_CST)
4897 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4898 exp, 0, low, 0))
4899 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4900 exp, 1, high, 1)));
4901 low = high = 0;
4902 exp = 0;
4905 *pin_p = in_p, *plow = low, *phigh = high;
4906 return exp;
4909 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4910 type, TYPE, return an expression to test if EXP is in (or out of, depending
4911 on IN_P) the range. Return 0 if the test couldn't be created. */
4913 tree
4914 build_range_check (location_t loc, tree type, tree exp, int in_p,
4915 tree low, tree high)
4917 tree etype = TREE_TYPE (exp), value;
4919 #ifdef HAVE_canonicalize_funcptr_for_compare
4920 /* Disable this optimization for function pointer expressions
4921 on targets that require function pointer canonicalization. */
4922 if (HAVE_canonicalize_funcptr_for_compare
4923 && TREE_CODE (etype) == POINTER_TYPE
4924 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4925 return NULL_TREE;
4926 #endif
4928 if (! in_p)
4930 value = build_range_check (loc, type, exp, 1, low, high);
4931 if (value != 0)
4932 return invert_truthvalue_loc (loc, value);
4934 return 0;
4937 if (low == 0 && high == 0)
4938 return build_int_cst (type, 1);
4940 if (low == 0)
4941 return fold_build2_loc (loc, LE_EXPR, type, exp,
4942 fold_convert_loc (loc, etype, high));
4944 if (high == 0)
4945 return fold_build2_loc (loc, GE_EXPR, type, exp,
4946 fold_convert_loc (loc, etype, low));
4948 if (operand_equal_p (low, high, 0))
4949 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4950 fold_convert_loc (loc, etype, low));
4952 if (integer_zerop (low))
4954 if (! TYPE_UNSIGNED (etype))
4956 etype = unsigned_type_for (etype);
4957 high = fold_convert_loc (loc, etype, high);
4958 exp = fold_convert_loc (loc, etype, exp);
4960 return build_range_check (loc, type, exp, 1, 0, high);
4963 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4964 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4966 unsigned HOST_WIDE_INT lo;
4967 HOST_WIDE_INT hi;
4968 int prec;
4970 prec = TYPE_PRECISION (etype);
4971 if (prec <= HOST_BITS_PER_WIDE_INT)
4973 hi = 0;
4974 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4976 else
4978 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4979 lo = (unsigned HOST_WIDE_INT) -1;
4982 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4984 if (TYPE_UNSIGNED (etype))
4986 tree signed_etype = signed_type_for (etype);
4987 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4988 etype
4989 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4990 else
4991 etype = signed_etype;
4992 exp = fold_convert_loc (loc, etype, exp);
4994 return fold_build2_loc (loc, GT_EXPR, type, exp,
4995 build_int_cst (etype, 0));
4999 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5000 This requires wrap-around arithmetics for the type of the expression.
5001 First make sure that arithmetics in this type is valid, then make sure
5002 that it wraps around. */
5003 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5004 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
5005 TYPE_UNSIGNED (etype));
5007 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
5009 tree utype, minv, maxv;
5011 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5012 for the type in question, as we rely on this here. */
5013 utype = unsigned_type_for (etype);
5014 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
5015 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5016 integer_one_node, 1);
5017 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
5019 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5020 minv, 1, maxv, 1)))
5021 etype = utype;
5022 else
5023 return 0;
5026 high = fold_convert_loc (loc, etype, high);
5027 low = fold_convert_loc (loc, etype, low);
5028 exp = fold_convert_loc (loc, etype, exp);
5030 value = const_binop (MINUS_EXPR, high, low, 0);
5033 if (POINTER_TYPE_P (etype))
5035 if (value != 0 && !TREE_OVERFLOW (value))
5037 low = fold_convert_loc (loc, sizetype, low);
5038 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
5039 return build_range_check (loc, type,
5040 fold_build2_loc (loc, POINTER_PLUS_EXPR,
5041 etype, exp, low),
5042 1, build_int_cst (etype, 0), value);
5044 return 0;
5047 if (value != 0 && !TREE_OVERFLOW (value))
5048 return build_range_check (loc, type,
5049 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5050 1, build_int_cst (etype, 0), value);
5052 return 0;
5055 /* Return the predecessor of VAL in its type, handling the infinite case. */
5057 static tree
5058 range_predecessor (tree val)
5060 tree type = TREE_TYPE (val);
5062 if (INTEGRAL_TYPE_P (type)
5063 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5064 return 0;
5065 else
5066 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
5069 /* Return the successor of VAL in its type, handling the infinite case. */
5071 static tree
5072 range_successor (tree val)
5074 tree type = TREE_TYPE (val);
5076 if (INTEGRAL_TYPE_P (type)
5077 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5078 return 0;
5079 else
5080 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
5083 /* Given two ranges, see if we can merge them into one. Return 1 if we
5084 can, 0 if we can't. Set the output range into the specified parameters. */
5086 bool
5087 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5088 tree high0, int in1_p, tree low1, tree high1)
5090 int no_overlap;
5091 int subset;
5092 int temp;
5093 tree tem;
5094 int in_p;
5095 tree low, high;
5096 int lowequal = ((low0 == 0 && low1 == 0)
5097 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5098 low0, 0, low1, 0)));
5099 int highequal = ((high0 == 0 && high1 == 0)
5100 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5101 high0, 1, high1, 1)));
5103 /* Make range 0 be the range that starts first, or ends last if they
5104 start at the same value. Swap them if it isn't. */
5105 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5106 low0, 0, low1, 0))
5107 || (lowequal
5108 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5109 high1, 1, high0, 1))))
5111 temp = in0_p, in0_p = in1_p, in1_p = temp;
5112 tem = low0, low0 = low1, low1 = tem;
5113 tem = high0, high0 = high1, high1 = tem;
5116 /* Now flag two cases, whether the ranges are disjoint or whether the
5117 second range is totally subsumed in the first. Note that the tests
5118 below are simplified by the ones above. */
5119 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5120 high0, 1, low1, 0));
5121 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5122 high1, 1, high0, 1));
5124 /* We now have four cases, depending on whether we are including or
5125 excluding the two ranges. */
5126 if (in0_p && in1_p)
5128 /* If they don't overlap, the result is false. If the second range
5129 is a subset it is the result. Otherwise, the range is from the start
5130 of the second to the end of the first. */
5131 if (no_overlap)
5132 in_p = 0, low = high = 0;
5133 else if (subset)
5134 in_p = 1, low = low1, high = high1;
5135 else
5136 in_p = 1, low = low1, high = high0;
5139 else if (in0_p && ! in1_p)
5141 /* If they don't overlap, the result is the first range. If they are
5142 equal, the result is false. If the second range is a subset of the
5143 first, and the ranges begin at the same place, we go from just after
5144 the end of the second range to the end of the first. If the second
5145 range is not a subset of the first, or if it is a subset and both
5146 ranges end at the same place, the range starts at the start of the
5147 first range and ends just before the second range.
5148 Otherwise, we can't describe this as a single range. */
5149 if (no_overlap)
5150 in_p = 1, low = low0, high = high0;
5151 else if (lowequal && highequal)
5152 in_p = 0, low = high = 0;
5153 else if (subset && lowequal)
5155 low = range_successor (high1);
5156 high = high0;
5157 in_p = 1;
5158 if (low == 0)
5160 /* We are in the weird situation where high0 > high1 but
5161 high1 has no successor. Punt. */
5162 return 0;
5165 else if (! subset || highequal)
5167 low = low0;
5168 high = range_predecessor (low1);
5169 in_p = 1;
5170 if (high == 0)
5172 /* low0 < low1 but low1 has no predecessor. Punt. */
5173 return 0;
5176 else
5177 return 0;
5180 else if (! in0_p && in1_p)
5182 /* If they don't overlap, the result is the second range. If the second
5183 is a subset of the first, the result is false. Otherwise,
5184 the range starts just after the first range and ends at the
5185 end of the second. */
5186 if (no_overlap)
5187 in_p = 1, low = low1, high = high1;
5188 else if (subset || highequal)
5189 in_p = 0, low = high = 0;
5190 else
5192 low = range_successor (high0);
5193 high = high1;
5194 in_p = 1;
5195 if (low == 0)
5197 /* high1 > high0 but high0 has no successor. Punt. */
5198 return 0;
5203 else
5205 /* The case where we are excluding both ranges. Here the complex case
5206 is if they don't overlap. In that case, the only time we have a
5207 range is if they are adjacent. If the second is a subset of the
5208 first, the result is the first. Otherwise, the range to exclude
5209 starts at the beginning of the first range and ends at the end of the
5210 second. */
5211 if (no_overlap)
5213 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5214 range_successor (high0),
5215 1, low1, 0)))
5216 in_p = 0, low = low0, high = high1;
5217 else
5219 /* Canonicalize - [min, x] into - [-, x]. */
5220 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5221 switch (TREE_CODE (TREE_TYPE (low0)))
5223 case ENUMERAL_TYPE:
5224 if (TYPE_PRECISION (TREE_TYPE (low0))
5225 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5226 break;
5227 /* FALLTHROUGH */
5228 case INTEGER_TYPE:
5229 if (tree_int_cst_equal (low0,
5230 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5231 low0 = 0;
5232 break;
5233 case POINTER_TYPE:
5234 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5235 && integer_zerop (low0))
5236 low0 = 0;
5237 break;
5238 default:
5239 break;
5242 /* Canonicalize - [x, max] into - [x, -]. */
5243 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5244 switch (TREE_CODE (TREE_TYPE (high1)))
5246 case ENUMERAL_TYPE:
5247 if (TYPE_PRECISION (TREE_TYPE (high1))
5248 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5249 break;
5250 /* FALLTHROUGH */
5251 case INTEGER_TYPE:
5252 if (tree_int_cst_equal (high1,
5253 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5254 high1 = 0;
5255 break;
5256 case POINTER_TYPE:
5257 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5258 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5259 high1, 1,
5260 integer_one_node, 1)))
5261 high1 = 0;
5262 break;
5263 default:
5264 break;
5267 /* The ranges might be also adjacent between the maximum and
5268 minimum values of the given type. For
5269 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5270 return + [x + 1, y - 1]. */
5271 if (low0 == 0 && high1 == 0)
5273 low = range_successor (high0);
5274 high = range_predecessor (low1);
5275 if (low == 0 || high == 0)
5276 return 0;
5278 in_p = 1;
5280 else
5281 return 0;
5284 else if (subset)
5285 in_p = 0, low = low0, high = high0;
5286 else
5287 in_p = 0, low = low0, high = high1;
5290 *pin_p = in_p, *plow = low, *phigh = high;
5291 return 1;
5295 /* Subroutine of fold, looking inside expressions of the form
5296 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5297 of the COND_EXPR. This function is being used also to optimize
5298 A op B ? C : A, by reversing the comparison first.
5300 Return a folded expression whose code is not a COND_EXPR
5301 anymore, or NULL_TREE if no folding opportunity is found. */
5303 static tree
5304 fold_cond_expr_with_comparison (location_t loc, tree type,
5305 tree arg0, tree arg1, tree arg2)
5307 enum tree_code comp_code = TREE_CODE (arg0);
5308 tree arg00 = TREE_OPERAND (arg0, 0);
5309 tree arg01 = TREE_OPERAND (arg0, 1);
5310 tree arg1_type = TREE_TYPE (arg1);
5311 tree tem;
5313 STRIP_NOPS (arg1);
5314 STRIP_NOPS (arg2);
5316 /* If we have A op 0 ? A : -A, consider applying the following
5317 transformations:
5319 A == 0? A : -A same as -A
5320 A != 0? A : -A same as A
5321 A >= 0? A : -A same as abs (A)
5322 A > 0? A : -A same as abs (A)
5323 A <= 0? A : -A same as -abs (A)
5324 A < 0? A : -A same as -abs (A)
5326 None of these transformations work for modes with signed
5327 zeros. If A is +/-0, the first two transformations will
5328 change the sign of the result (from +0 to -0, or vice
5329 versa). The last four will fix the sign of the result,
5330 even though the original expressions could be positive or
5331 negative, depending on the sign of A.
5333 Note that all these transformations are correct if A is
5334 NaN, since the two alternatives (A and -A) are also NaNs. */
5335 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5336 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5337 ? real_zerop (arg01)
5338 : integer_zerop (arg01))
5339 && ((TREE_CODE (arg2) == NEGATE_EXPR
5340 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5341 /* In the case that A is of the form X-Y, '-A' (arg2) may
5342 have already been folded to Y-X, check for that. */
5343 || (TREE_CODE (arg1) == MINUS_EXPR
5344 && TREE_CODE (arg2) == MINUS_EXPR
5345 && operand_equal_p (TREE_OPERAND (arg1, 0),
5346 TREE_OPERAND (arg2, 1), 0)
5347 && operand_equal_p (TREE_OPERAND (arg1, 1),
5348 TREE_OPERAND (arg2, 0), 0))))
5349 switch (comp_code)
5351 case EQ_EXPR:
5352 case UNEQ_EXPR:
5353 tem = fold_convert_loc (loc, arg1_type, arg1);
5354 return pedantic_non_lvalue_loc (loc,
5355 fold_convert_loc (loc, type,
5356 negate_expr (tem)));
5357 case NE_EXPR:
5358 case LTGT_EXPR:
5359 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5360 case UNGE_EXPR:
5361 case UNGT_EXPR:
5362 if (flag_trapping_math)
5363 break;
5364 /* Fall through. */
5365 case GE_EXPR:
5366 case GT_EXPR:
5367 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5368 arg1 = fold_convert_loc (loc, signed_type_for
5369 (TREE_TYPE (arg1)), arg1);
5370 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5371 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5372 case UNLE_EXPR:
5373 case UNLT_EXPR:
5374 if (flag_trapping_math)
5375 break;
5376 case LE_EXPR:
5377 case LT_EXPR:
5378 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5379 arg1 = fold_convert_loc (loc, signed_type_for
5380 (TREE_TYPE (arg1)), arg1);
5381 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5382 return negate_expr (fold_convert_loc (loc, type, tem));
5383 default:
5384 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5385 break;
5388 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5389 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5390 both transformations are correct when A is NaN: A != 0
5391 is then true, and A == 0 is false. */
5393 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5394 && integer_zerop (arg01) && integer_zerop (arg2))
5396 if (comp_code == NE_EXPR)
5397 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5398 else if (comp_code == EQ_EXPR)
5399 return build_int_cst (type, 0);
5402 /* Try some transformations of A op B ? A : B.
5404 A == B? A : B same as B
5405 A != B? A : B same as A
5406 A >= B? A : B same as max (A, B)
5407 A > B? A : B same as max (B, A)
5408 A <= B? A : B same as min (A, B)
5409 A < B? A : B same as min (B, A)
5411 As above, these transformations don't work in the presence
5412 of signed zeros. For example, if A and B are zeros of
5413 opposite sign, the first two transformations will change
5414 the sign of the result. In the last four, the original
5415 expressions give different results for (A=+0, B=-0) and
5416 (A=-0, B=+0), but the transformed expressions do not.
5418 The first two transformations are correct if either A or B
5419 is a NaN. In the first transformation, the condition will
5420 be false, and B will indeed be chosen. In the case of the
5421 second transformation, the condition A != B will be true,
5422 and A will be chosen.
5424 The conversions to max() and min() are not correct if B is
5425 a number and A is not. The conditions in the original
5426 expressions will be false, so all four give B. The min()
5427 and max() versions would give a NaN instead. */
5428 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5429 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5430 /* Avoid these transformations if the COND_EXPR may be used
5431 as an lvalue in the C++ front-end. PR c++/19199. */
5432 && (in_gimple_form
5433 || (strcmp (lang_hooks.name, "GNU C++") != 0
5434 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5435 || ! maybe_lvalue_p (arg1)
5436 || ! maybe_lvalue_p (arg2)))
5438 tree comp_op0 = arg00;
5439 tree comp_op1 = arg01;
5440 tree comp_type = TREE_TYPE (comp_op0);
5442 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5443 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5445 comp_type = type;
5446 comp_op0 = arg1;
5447 comp_op1 = arg2;
5450 switch (comp_code)
5452 case EQ_EXPR:
5453 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5454 case NE_EXPR:
5455 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5456 case LE_EXPR:
5457 case LT_EXPR:
5458 case UNLE_EXPR:
5459 case UNLT_EXPR:
5460 /* In C++ a ?: expression can be an lvalue, so put the
5461 operand which will be used if they are equal first
5462 so that we can convert this back to the
5463 corresponding COND_EXPR. */
5464 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5466 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5467 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5468 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5469 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5470 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5471 comp_op1, comp_op0);
5472 return pedantic_non_lvalue_loc (loc,
5473 fold_convert_loc (loc, type, tem));
5475 break;
5476 case GE_EXPR:
5477 case GT_EXPR:
5478 case UNGE_EXPR:
5479 case UNGT_EXPR:
5480 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5482 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5483 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5484 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5485 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5486 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5487 comp_op1, comp_op0);
5488 return pedantic_non_lvalue_loc (loc,
5489 fold_convert_loc (loc, type, tem));
5491 break;
5492 case UNEQ_EXPR:
5493 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5494 return pedantic_non_lvalue_loc (loc,
5495 fold_convert_loc (loc, type, arg2));
5496 break;
5497 case LTGT_EXPR:
5498 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5499 return pedantic_non_lvalue_loc (loc,
5500 fold_convert_loc (loc, type, arg1));
5501 break;
5502 default:
5503 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5504 break;
5508 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5509 we might still be able to simplify this. For example,
5510 if C1 is one less or one more than C2, this might have started
5511 out as a MIN or MAX and been transformed by this function.
5512 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5514 if (INTEGRAL_TYPE_P (type)
5515 && TREE_CODE (arg01) == INTEGER_CST
5516 && TREE_CODE (arg2) == INTEGER_CST)
5517 switch (comp_code)
5519 case EQ_EXPR:
5520 if (TREE_CODE (arg1) == INTEGER_CST)
5521 break;
5522 /* We can replace A with C1 in this case. */
5523 arg1 = fold_convert_loc (loc, type, arg01);
5524 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5526 case LT_EXPR:
5527 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5528 MIN_EXPR, to preserve the signedness of the comparison. */
5529 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5530 OEP_ONLY_CONST)
5531 && operand_equal_p (arg01,
5532 const_binop (PLUS_EXPR, arg2,
5533 build_int_cst (type, 1), 0),
5534 OEP_ONLY_CONST))
5536 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5537 fold_convert_loc (loc, TREE_TYPE (arg00),
5538 arg2));
5539 return pedantic_non_lvalue_loc (loc,
5540 fold_convert_loc (loc, type, tem));
5542 break;
5544 case LE_EXPR:
5545 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5546 as above. */
5547 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5548 OEP_ONLY_CONST)
5549 && operand_equal_p (arg01,
5550 const_binop (MINUS_EXPR, arg2,
5551 build_int_cst (type, 1), 0),
5552 OEP_ONLY_CONST))
5554 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5555 fold_convert_loc (loc, TREE_TYPE (arg00),
5556 arg2));
5557 return pedantic_non_lvalue_loc (loc,
5558 fold_convert_loc (loc, type, tem));
5560 break;
5562 case GT_EXPR:
5563 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5564 MAX_EXPR, to preserve the signedness of the comparison. */
5565 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5566 OEP_ONLY_CONST)
5567 && operand_equal_p (arg01,
5568 const_binop (MINUS_EXPR, arg2,
5569 build_int_cst (type, 1), 0),
5570 OEP_ONLY_CONST))
5572 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5573 fold_convert_loc (loc, TREE_TYPE (arg00),
5574 arg2));
5575 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5577 break;
5579 case GE_EXPR:
5580 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5581 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5582 OEP_ONLY_CONST)
5583 && operand_equal_p (arg01,
5584 const_binop (PLUS_EXPR, arg2,
5585 build_int_cst (type, 1), 0),
5586 OEP_ONLY_CONST))
5588 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5589 fold_convert_loc (loc, TREE_TYPE (arg00),
5590 arg2));
5591 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5593 break;
5594 case NE_EXPR:
5595 break;
5596 default:
5597 gcc_unreachable ();
5600 return NULL_TREE;
5605 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5606 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5607 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5608 false) >= 2)
5609 #endif
5611 /* EXP is some logical combination of boolean tests. See if we can
5612 merge it into some range test. Return the new tree if so. */
5614 static tree
5615 fold_range_test (location_t loc, enum tree_code code, tree type,
5616 tree op0, tree op1)
5618 int or_op = (code == TRUTH_ORIF_EXPR
5619 || code == TRUTH_OR_EXPR);
5620 int in0_p, in1_p, in_p;
5621 tree low0, low1, low, high0, high1, high;
5622 bool strict_overflow_p = false;
5623 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5624 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5625 tree tem;
5626 const char * const warnmsg = G_("assuming signed overflow does not occur "
5627 "when simplifying range test");
5629 /* If this is an OR operation, invert both sides; we will invert
5630 again at the end. */
5631 if (or_op)
5632 in0_p = ! in0_p, in1_p = ! in1_p;
5634 /* If both expressions are the same, if we can merge the ranges, and we
5635 can build the range test, return it or it inverted. If one of the
5636 ranges is always true or always false, consider it to be the same
5637 expression as the other. */
5638 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5639 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5640 in1_p, low1, high1)
5641 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
5642 lhs != 0 ? lhs
5643 : rhs != 0 ? rhs : integer_zero_node,
5644 in_p, low, high))))
5646 if (strict_overflow_p)
5647 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5648 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5651 /* On machines where the branch cost is expensive, if this is a
5652 short-circuited branch and the underlying object on both sides
5653 is the same, make a non-short-circuit operation. */
5654 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5655 && lhs != 0 && rhs != 0
5656 && (code == TRUTH_ANDIF_EXPR
5657 || code == TRUTH_ORIF_EXPR)
5658 && operand_equal_p (lhs, rhs, 0))
5660 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5661 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5662 which cases we can't do this. */
5663 if (simple_operand_p (lhs))
5665 tem = build2 (code == TRUTH_ANDIF_EXPR
5666 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5667 type, op0, op1);
5668 SET_EXPR_LOCATION (tem, loc);
5669 return tem;
5672 else if (lang_hooks.decls.global_bindings_p () == 0
5673 && ! CONTAINS_PLACEHOLDER_P (lhs))
5675 tree common = save_expr (lhs);
5677 if (0 != (lhs = build_range_check (loc, type, common,
5678 or_op ? ! in0_p : in0_p,
5679 low0, high0))
5680 && (0 != (rhs = build_range_check (loc, type, common,
5681 or_op ? ! in1_p : in1_p,
5682 low1, high1))))
5684 if (strict_overflow_p)
5685 fold_overflow_warning (warnmsg,
5686 WARN_STRICT_OVERFLOW_COMPARISON);
5687 tem = build2 (code == TRUTH_ANDIF_EXPR
5688 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5689 type, lhs, rhs);
5690 SET_EXPR_LOCATION (tem, loc);
5691 return tem;
5696 return 0;
5699 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5700 bit value. Arrange things so the extra bits will be set to zero if and
5701 only if C is signed-extended to its full width. If MASK is nonzero,
5702 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5704 static tree
5705 unextend (tree c, int p, int unsignedp, tree mask)
5707 tree type = TREE_TYPE (c);
5708 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5709 tree temp;
5711 if (p == modesize || unsignedp)
5712 return c;
5714 /* We work by getting just the sign bit into the low-order bit, then
5715 into the high-order bit, then sign-extend. We then XOR that value
5716 with C. */
5717 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5718 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5720 /* We must use a signed type in order to get an arithmetic right shift.
5721 However, we must also avoid introducing accidental overflows, so that
5722 a subsequent call to integer_zerop will work. Hence we must
5723 do the type conversion here. At this point, the constant is either
5724 zero or one, and the conversion to a signed type can never overflow.
5725 We could get an overflow if this conversion is done anywhere else. */
5726 if (TYPE_UNSIGNED (type))
5727 temp = fold_convert (signed_type_for (type), temp);
5729 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5730 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5731 if (mask != 0)
5732 temp = const_binop (BIT_AND_EXPR, temp,
5733 fold_convert (TREE_TYPE (c), mask),
5735 /* If necessary, convert the type back to match the type of C. */
5736 if (TYPE_UNSIGNED (type))
5737 temp = fold_convert (type, temp);
5739 return fold_convert (type,
5740 const_binop (BIT_XOR_EXPR, c, temp, 0));
5743 /* Find ways of folding logical expressions of LHS and RHS:
5744 Try to merge two comparisons to the same innermost item.
5745 Look for range tests like "ch >= '0' && ch <= '9'".
5746 Look for combinations of simple terms on machines with expensive branches
5747 and evaluate the RHS unconditionally.
5749 For example, if we have p->a == 2 && p->b == 4 and we can make an
5750 object large enough to span both A and B, we can do this with a comparison
5751 against the object ANDed with the a mask.
5753 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5754 operations to do this with one comparison.
5756 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5757 function and the one above.
5759 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5760 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5762 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5763 two operands.
5765 We return the simplified tree or 0 if no optimization is possible. */
5767 static tree
5768 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5769 tree lhs, tree rhs)
5771 /* If this is the "or" of two comparisons, we can do something if
5772 the comparisons are NE_EXPR. If this is the "and", we can do something
5773 if the comparisons are EQ_EXPR. I.e.,
5774 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5776 WANTED_CODE is this operation code. For single bit fields, we can
5777 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5778 comparison for one-bit fields. */
5780 enum tree_code wanted_code;
5781 enum tree_code lcode, rcode;
5782 tree ll_arg, lr_arg, rl_arg, rr_arg;
5783 tree ll_inner, lr_inner, rl_inner, rr_inner;
5784 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5785 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5786 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5787 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5788 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5789 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5790 enum machine_mode lnmode, rnmode;
5791 tree ll_mask, lr_mask, rl_mask, rr_mask;
5792 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5793 tree l_const, r_const;
5794 tree lntype, rntype, result;
5795 HOST_WIDE_INT first_bit, end_bit;
5796 int volatilep;
5797 tree orig_lhs = lhs, orig_rhs = rhs;
5798 enum tree_code orig_code = code;
5800 /* Start by getting the comparison codes. Fail if anything is volatile.
5801 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5802 it were surrounded with a NE_EXPR. */
5804 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5805 return 0;
5807 lcode = TREE_CODE (lhs);
5808 rcode = TREE_CODE (rhs);
5810 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5812 lhs = build2 (NE_EXPR, truth_type, lhs,
5813 build_int_cst (TREE_TYPE (lhs), 0));
5814 lcode = NE_EXPR;
5817 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5819 rhs = build2 (NE_EXPR, truth_type, rhs,
5820 build_int_cst (TREE_TYPE (rhs), 0));
5821 rcode = NE_EXPR;
5824 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5825 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5826 return 0;
5828 ll_arg = TREE_OPERAND (lhs, 0);
5829 lr_arg = TREE_OPERAND (lhs, 1);
5830 rl_arg = TREE_OPERAND (rhs, 0);
5831 rr_arg = TREE_OPERAND (rhs, 1);
5833 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5834 if (simple_operand_p (ll_arg)
5835 && simple_operand_p (lr_arg))
5837 tree result;
5838 if (operand_equal_p (ll_arg, rl_arg, 0)
5839 && operand_equal_p (lr_arg, rr_arg, 0))
5841 result = combine_comparisons (loc, code, lcode, rcode,
5842 truth_type, ll_arg, lr_arg);
5843 if (result)
5844 return result;
5846 else if (operand_equal_p (ll_arg, rr_arg, 0)
5847 && operand_equal_p (lr_arg, rl_arg, 0))
5849 result = combine_comparisons (loc, code, lcode,
5850 swap_tree_comparison (rcode),
5851 truth_type, ll_arg, lr_arg);
5852 if (result)
5853 return result;
5857 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5858 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5860 /* If the RHS can be evaluated unconditionally and its operands are
5861 simple, it wins to evaluate the RHS unconditionally on machines
5862 with expensive branches. In this case, this isn't a comparison
5863 that can be merged. Avoid doing this if the RHS is a floating-point
5864 comparison since those can trap. */
5866 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5867 false) >= 2
5868 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5869 && simple_operand_p (rl_arg)
5870 && simple_operand_p (rr_arg))
5872 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5873 if (code == TRUTH_OR_EXPR
5874 && lcode == NE_EXPR && integer_zerop (lr_arg)
5875 && rcode == NE_EXPR && integer_zerop (rr_arg)
5876 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5877 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5879 result = build2 (NE_EXPR, truth_type,
5880 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5881 ll_arg, rl_arg),
5882 build_int_cst (TREE_TYPE (ll_arg), 0));
5883 goto fold_truthop_exit;
5886 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5887 if (code == TRUTH_AND_EXPR
5888 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5889 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5890 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5891 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5893 result = build2 (EQ_EXPR, truth_type,
5894 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5895 ll_arg, rl_arg),
5896 build_int_cst (TREE_TYPE (ll_arg), 0));
5897 goto fold_truthop_exit;
5900 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5902 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5904 result = build2 (code, truth_type, lhs, rhs);
5905 goto fold_truthop_exit;
5907 return NULL_TREE;
5911 /* See if the comparisons can be merged. Then get all the parameters for
5912 each side. */
5914 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5915 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5916 return 0;
5918 volatilep = 0;
5919 ll_inner = decode_field_reference (loc, ll_arg,
5920 &ll_bitsize, &ll_bitpos, &ll_mode,
5921 &ll_unsignedp, &volatilep, &ll_mask,
5922 &ll_and_mask);
5923 lr_inner = decode_field_reference (loc, lr_arg,
5924 &lr_bitsize, &lr_bitpos, &lr_mode,
5925 &lr_unsignedp, &volatilep, &lr_mask,
5926 &lr_and_mask);
5927 rl_inner = decode_field_reference (loc, rl_arg,
5928 &rl_bitsize, &rl_bitpos, &rl_mode,
5929 &rl_unsignedp, &volatilep, &rl_mask,
5930 &rl_and_mask);
5931 rr_inner = decode_field_reference (loc, rr_arg,
5932 &rr_bitsize, &rr_bitpos, &rr_mode,
5933 &rr_unsignedp, &volatilep, &rr_mask,
5934 &rr_and_mask);
5936 /* It must be true that the inner operation on the lhs of each
5937 comparison must be the same if we are to be able to do anything.
5938 Then see if we have constants. If not, the same must be true for
5939 the rhs's. */
5940 if (volatilep || ll_inner == 0 || rl_inner == 0
5941 || ! operand_equal_p (ll_inner, rl_inner, 0))
5942 return 0;
5944 if (TREE_CODE (lr_arg) == INTEGER_CST
5945 && TREE_CODE (rr_arg) == INTEGER_CST)
5946 l_const = lr_arg, r_const = rr_arg;
5947 else if (lr_inner == 0 || rr_inner == 0
5948 || ! operand_equal_p (lr_inner, rr_inner, 0))
5949 return 0;
5950 else
5951 l_const = r_const = 0;
5953 /* If either comparison code is not correct for our logical operation,
5954 fail. However, we can convert a one-bit comparison against zero into
5955 the opposite comparison against that bit being set in the field. */
5957 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5958 if (lcode != wanted_code)
5960 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5962 /* Make the left operand unsigned, since we are only interested
5963 in the value of one bit. Otherwise we are doing the wrong
5964 thing below. */
5965 ll_unsignedp = 1;
5966 l_const = ll_mask;
5968 else
5969 return 0;
5972 /* This is analogous to the code for l_const above. */
5973 if (rcode != wanted_code)
5975 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5977 rl_unsignedp = 1;
5978 r_const = rl_mask;
5980 else
5981 return 0;
5984 /* See if we can find a mode that contains both fields being compared on
5985 the left. If we can't, fail. Otherwise, update all constants and masks
5986 to be relative to a field of that size. */
5987 first_bit = MIN (ll_bitpos, rl_bitpos);
5988 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5989 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5990 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5991 volatilep);
5992 if (lnmode == VOIDmode)
5993 return 0;
5995 lnbitsize = GET_MODE_BITSIZE (lnmode);
5996 lnbitpos = first_bit & ~ (lnbitsize - 1);
5997 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5998 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6000 if (BYTES_BIG_ENDIAN)
6002 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6003 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6006 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6007 size_int (xll_bitpos), 0);
6008 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6009 size_int (xrl_bitpos), 0);
6011 if (l_const)
6013 l_const = fold_convert_loc (loc, lntype, l_const);
6014 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6015 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
6016 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6017 fold_build1_loc (loc, BIT_NOT_EXPR,
6018 lntype, ll_mask),
6019 0)))
6021 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6023 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6026 if (r_const)
6028 r_const = fold_convert_loc (loc, lntype, r_const);
6029 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6030 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
6031 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6032 fold_build1_loc (loc, BIT_NOT_EXPR,
6033 lntype, rl_mask),
6034 0)))
6036 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6038 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6042 /* If the right sides are not constant, do the same for it. Also,
6043 disallow this optimization if a size or signedness mismatch occurs
6044 between the left and right sides. */
6045 if (l_const == 0)
6047 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6048 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6049 /* Make sure the two fields on the right
6050 correspond to the left without being swapped. */
6051 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6052 return 0;
6054 first_bit = MIN (lr_bitpos, rr_bitpos);
6055 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6056 rnmode = get_best_mode (end_bit - first_bit, first_bit,
6057 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
6058 volatilep);
6059 if (rnmode == VOIDmode)
6060 return 0;
6062 rnbitsize = GET_MODE_BITSIZE (rnmode);
6063 rnbitpos = first_bit & ~ (rnbitsize - 1);
6064 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6065 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6067 if (BYTES_BIG_ENDIAN)
6069 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6070 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6073 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6074 rntype, lr_mask),
6075 size_int (xlr_bitpos), 0);
6076 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6077 rntype, rr_mask),
6078 size_int (xrr_bitpos), 0);
6080 /* Make a mask that corresponds to both fields being compared.
6081 Do this for both items being compared. If the operands are the
6082 same size and the bits being compared are in the same position
6083 then we can do this by masking both and comparing the masked
6084 results. */
6085 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
6086 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
6087 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
6089 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
6090 ll_unsignedp || rl_unsignedp);
6091 if (! all_ones_mask_p (ll_mask, lnbitsize))
6092 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6094 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
6095 lr_unsignedp || rr_unsignedp);
6096 if (! all_ones_mask_p (lr_mask, rnbitsize))
6097 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6099 result = build2 (wanted_code, truth_type, lhs, rhs);
6100 goto fold_truthop_exit;
6103 /* There is still another way we can do something: If both pairs of
6104 fields being compared are adjacent, we may be able to make a wider
6105 field containing them both.
6107 Note that we still must mask the lhs/rhs expressions. Furthermore,
6108 the mask must be shifted to account for the shift done by
6109 make_bit_field_ref. */
6110 if ((ll_bitsize + ll_bitpos == rl_bitpos
6111 && lr_bitsize + lr_bitpos == rr_bitpos)
6112 || (ll_bitpos == rl_bitpos + rl_bitsize
6113 && lr_bitpos == rr_bitpos + rr_bitsize))
6115 tree type;
6117 lhs = make_bit_field_ref (loc, ll_inner, lntype,
6118 ll_bitsize + rl_bitsize,
6119 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
6120 rhs = make_bit_field_ref (loc, lr_inner, rntype,
6121 lr_bitsize + rr_bitsize,
6122 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
6124 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6125 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
6126 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6127 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
6129 /* Convert to the smaller type before masking out unwanted bits. */
6130 type = lntype;
6131 if (lntype != rntype)
6133 if (lnbitsize > rnbitsize)
6135 lhs = fold_convert_loc (loc, rntype, lhs);
6136 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6137 type = rntype;
6139 else if (lnbitsize < rnbitsize)
6141 rhs = fold_convert_loc (loc, lntype, rhs);
6142 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6143 type = lntype;
6147 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6148 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6150 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6151 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6153 result = build2 (wanted_code, truth_type, lhs, rhs);
6154 goto fold_truthop_exit;
6157 return 0;
6160 /* Handle the case of comparisons with constants. If there is something in
6161 common between the masks, those bits of the constants must be the same.
6162 If not, the condition is always false. Test for this to avoid generating
6163 incorrect code below. */
6164 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
6165 if (! integer_zerop (result)
6166 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
6167 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
6169 if (wanted_code == NE_EXPR)
6171 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6172 return constant_boolean_node (true, truth_type);
6174 else
6176 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6177 return constant_boolean_node (false, truth_type);
6181 /* Construct the expression we will return. First get the component
6182 reference we will make. Unless the mask is all ones the width of
6183 that field, perform the mask operation. Then compare with the
6184 merged constant. */
6185 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
6186 ll_unsignedp || rl_unsignedp);
6188 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
6189 if (! all_ones_mask_p (ll_mask, lnbitsize))
6191 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
6192 SET_EXPR_LOCATION (result, loc);
6195 result = build2 (wanted_code, truth_type, result,
6196 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
6198 fold_truthop_exit:
6199 SET_EXPR_LOCATION (result, loc);
6200 return result;
6203 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
6204 constant. */
6206 static tree
6207 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
6208 tree op0, tree op1)
6210 tree arg0 = op0;
6211 enum tree_code op_code;
6212 tree comp_const;
6213 tree minmax_const;
6214 int consts_equal, consts_lt;
6215 tree inner;
6217 STRIP_SIGN_NOPS (arg0);
6219 op_code = TREE_CODE (arg0);
6220 minmax_const = TREE_OPERAND (arg0, 1);
6221 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
6222 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
6223 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
6224 inner = TREE_OPERAND (arg0, 0);
6226 /* If something does not permit us to optimize, return the original tree. */
6227 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
6228 || TREE_CODE (comp_const) != INTEGER_CST
6229 || TREE_OVERFLOW (comp_const)
6230 || TREE_CODE (minmax_const) != INTEGER_CST
6231 || TREE_OVERFLOW (minmax_const))
6232 return NULL_TREE;
6234 /* Now handle all the various comparison codes. We only handle EQ_EXPR
6235 and GT_EXPR, doing the rest with recursive calls using logical
6236 simplifications. */
6237 switch (code)
6239 case NE_EXPR: case LT_EXPR: case LE_EXPR:
6241 tree tem
6242 = optimize_minmax_comparison (loc,
6243 invert_tree_comparison (code, false),
6244 type, op0, op1);
6245 if (tem)
6246 return invert_truthvalue_loc (loc, tem);
6247 return NULL_TREE;
6250 case GE_EXPR:
6251 return
6252 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
6253 optimize_minmax_comparison
6254 (loc, EQ_EXPR, type, arg0, comp_const),
6255 optimize_minmax_comparison
6256 (loc, GT_EXPR, type, arg0, comp_const));
6258 case EQ_EXPR:
6259 if (op_code == MAX_EXPR && consts_equal)
6260 /* MAX (X, 0) == 0 -> X <= 0 */
6261 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
6263 else if (op_code == MAX_EXPR && consts_lt)
6264 /* MAX (X, 0) == 5 -> X == 5 */
6265 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6267 else if (op_code == MAX_EXPR)
6268 /* MAX (X, 0) == -1 -> false */
6269 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6271 else if (consts_equal)
6272 /* MIN (X, 0) == 0 -> X >= 0 */
6273 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
6275 else if (consts_lt)
6276 /* MIN (X, 0) == 5 -> false */
6277 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6279 else
6280 /* MIN (X, 0) == -1 -> X == -1 */
6281 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6283 case GT_EXPR:
6284 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6285 /* MAX (X, 0) > 0 -> X > 0
6286 MAX (X, 0) > 5 -> X > 5 */
6287 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6289 else if (op_code == MAX_EXPR)
6290 /* MAX (X, 0) > -1 -> true */
6291 return omit_one_operand_loc (loc, type, integer_one_node, inner);
6293 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6294 /* MIN (X, 0) > 0 -> false
6295 MIN (X, 0) > 5 -> false */
6296 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6298 else
6299 /* MIN (X, 0) > -1 -> X > -1 */
6300 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6302 default:
6303 return NULL_TREE;
6307 /* T is an integer expression that is being multiplied, divided, or taken a
6308 modulus (CODE says which and what kind of divide or modulus) by a
6309 constant C. See if we can eliminate that operation by folding it with
6310 other operations already in T. WIDE_TYPE, if non-null, is a type that
6311 should be used for the computation if wider than our type.
6313 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6314 (X * 2) + (Y * 4). We must, however, be assured that either the original
6315 expression would not overflow or that overflow is undefined for the type
6316 in the language in question.
6318 If we return a non-null expression, it is an equivalent form of the
6319 original computation, but need not be in the original type.
6321 We set *STRICT_OVERFLOW_P to true if the return values depends on
6322 signed overflow being undefined. Otherwise we do not change
6323 *STRICT_OVERFLOW_P. */
6325 static tree
6326 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6327 bool *strict_overflow_p)
6329 /* To avoid exponential search depth, refuse to allow recursion past
6330 three levels. Beyond that (1) it's highly unlikely that we'll find
6331 something interesting and (2) we've probably processed it before
6332 when we built the inner expression. */
6334 static int depth;
6335 tree ret;
6337 if (depth > 3)
6338 return NULL;
6340 depth++;
6341 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6342 depth--;
6344 return ret;
6347 static tree
6348 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6349 bool *strict_overflow_p)
6351 tree type = TREE_TYPE (t);
6352 enum tree_code tcode = TREE_CODE (t);
6353 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6354 > GET_MODE_SIZE (TYPE_MODE (type)))
6355 ? wide_type : type);
6356 tree t1, t2;
6357 int same_p = tcode == code;
6358 tree op0 = NULL_TREE, op1 = NULL_TREE;
6359 bool sub_strict_overflow_p;
6361 /* Don't deal with constants of zero here; they confuse the code below. */
6362 if (integer_zerop (c))
6363 return NULL_TREE;
6365 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6366 op0 = TREE_OPERAND (t, 0);
6368 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6369 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6371 /* Note that we need not handle conditional operations here since fold
6372 already handles those cases. So just do arithmetic here. */
6373 switch (tcode)
6375 case INTEGER_CST:
6376 /* For a constant, we can always simplify if we are a multiply
6377 or (for divide and modulus) if it is a multiple of our constant. */
6378 if (code == MULT_EXPR
6379 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6380 return const_binop (code, fold_convert (ctype, t),
6381 fold_convert (ctype, c), 0);
6382 break;
6384 CASE_CONVERT: case NON_LVALUE_EXPR:
6385 /* If op0 is an expression ... */
6386 if ((COMPARISON_CLASS_P (op0)
6387 || UNARY_CLASS_P (op0)
6388 || BINARY_CLASS_P (op0)
6389 || VL_EXP_CLASS_P (op0)
6390 || EXPRESSION_CLASS_P (op0))
6391 /* ... and has wrapping overflow, and its type is smaller
6392 than ctype, then we cannot pass through as widening. */
6393 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6394 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6395 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6396 && (TYPE_PRECISION (ctype)
6397 > TYPE_PRECISION (TREE_TYPE (op0))))
6398 /* ... or this is a truncation (t is narrower than op0),
6399 then we cannot pass through this narrowing. */
6400 || (TYPE_PRECISION (type)
6401 < TYPE_PRECISION (TREE_TYPE (op0)))
6402 /* ... or signedness changes for division or modulus,
6403 then we cannot pass through this conversion. */
6404 || (code != MULT_EXPR
6405 && (TYPE_UNSIGNED (ctype)
6406 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6407 /* ... or has undefined overflow while the converted to
6408 type has not, we cannot do the operation in the inner type
6409 as that would introduce undefined overflow. */
6410 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6411 && !TYPE_OVERFLOW_UNDEFINED (type))))
6412 break;
6414 /* Pass the constant down and see if we can make a simplification. If
6415 we can, replace this expression with the inner simplification for
6416 possible later conversion to our or some other type. */
6417 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6418 && TREE_CODE (t2) == INTEGER_CST
6419 && !TREE_OVERFLOW (t2)
6420 && (0 != (t1 = extract_muldiv (op0, t2, code,
6421 code == MULT_EXPR
6422 ? ctype : NULL_TREE,
6423 strict_overflow_p))))
6424 return t1;
6425 break;
6427 case ABS_EXPR:
6428 /* If widening the type changes it from signed to unsigned, then we
6429 must avoid building ABS_EXPR itself as unsigned. */
6430 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6432 tree cstype = (*signed_type_for) (ctype);
6433 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6434 != 0)
6436 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6437 return fold_convert (ctype, t1);
6439 break;
6441 /* If the constant is negative, we cannot simplify this. */
6442 if (tree_int_cst_sgn (c) == -1)
6443 break;
6444 /* FALLTHROUGH */
6445 case NEGATE_EXPR:
6446 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6447 != 0)
6448 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6449 break;
6451 case MIN_EXPR: case MAX_EXPR:
6452 /* If widening the type changes the signedness, then we can't perform
6453 this optimization as that changes the result. */
6454 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6455 break;
6457 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6458 sub_strict_overflow_p = false;
6459 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6460 &sub_strict_overflow_p)) != 0
6461 && (t2 = extract_muldiv (op1, c, code, wide_type,
6462 &sub_strict_overflow_p)) != 0)
6464 if (tree_int_cst_sgn (c) < 0)
6465 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6466 if (sub_strict_overflow_p)
6467 *strict_overflow_p = true;
6468 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6469 fold_convert (ctype, t2));
6471 break;
6473 case LSHIFT_EXPR: case RSHIFT_EXPR:
6474 /* If the second operand is constant, this is a multiplication
6475 or floor division, by a power of two, so we can treat it that
6476 way unless the multiplier or divisor overflows. Signed
6477 left-shift overflow is implementation-defined rather than
6478 undefined in C90, so do not convert signed left shift into
6479 multiplication. */
6480 if (TREE_CODE (op1) == INTEGER_CST
6481 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6482 /* const_binop may not detect overflow correctly,
6483 so check for it explicitly here. */
6484 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6485 && TREE_INT_CST_HIGH (op1) == 0
6486 && 0 != (t1 = fold_convert (ctype,
6487 const_binop (LSHIFT_EXPR,
6488 size_one_node,
6489 op1, 0)))
6490 && !TREE_OVERFLOW (t1))
6491 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6492 ? MULT_EXPR : FLOOR_DIV_EXPR,
6493 ctype,
6494 fold_convert (ctype, op0),
6495 t1),
6496 c, code, wide_type, strict_overflow_p);
6497 break;
6499 case PLUS_EXPR: case MINUS_EXPR:
6500 /* See if we can eliminate the operation on both sides. If we can, we
6501 can return a new PLUS or MINUS. If we can't, the only remaining
6502 cases where we can do anything are if the second operand is a
6503 constant. */
6504 sub_strict_overflow_p = false;
6505 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6506 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6507 if (t1 != 0 && t2 != 0
6508 && (code == MULT_EXPR
6509 /* If not multiplication, we can only do this if both operands
6510 are divisible by c. */
6511 || (multiple_of_p (ctype, op0, c)
6512 && multiple_of_p (ctype, op1, c))))
6514 if (sub_strict_overflow_p)
6515 *strict_overflow_p = true;
6516 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6517 fold_convert (ctype, t2));
6520 /* If this was a subtraction, negate OP1 and set it to be an addition.
6521 This simplifies the logic below. */
6522 if (tcode == MINUS_EXPR)
6524 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6525 /* If OP1 was not easily negatable, the constant may be OP0. */
6526 if (TREE_CODE (op0) == INTEGER_CST)
6528 tree tem = op0;
6529 op0 = op1;
6530 op1 = tem;
6531 tem = t1;
6532 t1 = t2;
6533 t2 = tem;
6537 if (TREE_CODE (op1) != INTEGER_CST)
6538 break;
6540 /* If either OP1 or C are negative, this optimization is not safe for
6541 some of the division and remainder types while for others we need
6542 to change the code. */
6543 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6545 if (code == CEIL_DIV_EXPR)
6546 code = FLOOR_DIV_EXPR;
6547 else if (code == FLOOR_DIV_EXPR)
6548 code = CEIL_DIV_EXPR;
6549 else if (code != MULT_EXPR
6550 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6551 break;
6554 /* If it's a multiply or a division/modulus operation of a multiple
6555 of our constant, do the operation and verify it doesn't overflow. */
6556 if (code == MULT_EXPR
6557 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6559 op1 = const_binop (code, fold_convert (ctype, op1),
6560 fold_convert (ctype, c), 0);
6561 /* We allow the constant to overflow with wrapping semantics. */
6562 if (op1 == 0
6563 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6564 break;
6566 else
6567 break;
6569 /* If we have an unsigned type is not a sizetype, we cannot widen
6570 the operation since it will change the result if the original
6571 computation overflowed. */
6572 if (TYPE_UNSIGNED (ctype)
6573 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6574 && ctype != type)
6575 break;
6577 /* If we were able to eliminate our operation from the first side,
6578 apply our operation to the second side and reform the PLUS. */
6579 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6580 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6582 /* The last case is if we are a multiply. In that case, we can
6583 apply the distributive law to commute the multiply and addition
6584 if the multiplication of the constants doesn't overflow. */
6585 if (code == MULT_EXPR)
6586 return fold_build2 (tcode, ctype,
6587 fold_build2 (code, ctype,
6588 fold_convert (ctype, op0),
6589 fold_convert (ctype, c)),
6590 op1);
6592 break;
6594 case MULT_EXPR:
6595 /* We have a special case here if we are doing something like
6596 (C * 8) % 4 since we know that's zero. */
6597 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6598 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6599 /* If the multiplication can overflow we cannot optimize this.
6600 ??? Until we can properly mark individual operations as
6601 not overflowing we need to treat sizetype special here as
6602 stor-layout relies on this opimization to make
6603 DECL_FIELD_BIT_OFFSET always a constant. */
6604 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6605 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
6606 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
6607 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6608 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6610 *strict_overflow_p = true;
6611 return omit_one_operand (type, integer_zero_node, op0);
6614 /* ... fall through ... */
6616 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6617 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6618 /* If we can extract our operation from the LHS, do so and return a
6619 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6620 do something only if the second operand is a constant. */
6621 if (same_p
6622 && (t1 = extract_muldiv (op0, c, code, wide_type,
6623 strict_overflow_p)) != 0)
6624 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6625 fold_convert (ctype, op1));
6626 else if (tcode == MULT_EXPR && code == MULT_EXPR
6627 && (t1 = extract_muldiv (op1, c, code, wide_type,
6628 strict_overflow_p)) != 0)
6629 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6630 fold_convert (ctype, t1));
6631 else if (TREE_CODE (op1) != INTEGER_CST)
6632 return 0;
6634 /* If these are the same operation types, we can associate them
6635 assuming no overflow. */
6636 if (tcode == code
6637 && 0 != (t1 = int_const_binop (MULT_EXPR,
6638 fold_convert (ctype, op1),
6639 fold_convert (ctype, c), 1))
6640 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
6641 TREE_INT_CST_HIGH (t1),
6642 (TYPE_UNSIGNED (ctype)
6643 && tcode != MULT_EXPR) ? -1 : 1,
6644 TREE_OVERFLOW (t1)))
6645 && !TREE_OVERFLOW (t1))
6646 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6648 /* If these operations "cancel" each other, we have the main
6649 optimizations of this pass, which occur when either constant is a
6650 multiple of the other, in which case we replace this with either an
6651 operation or CODE or TCODE.
6653 If we have an unsigned type that is not a sizetype, we cannot do
6654 this since it will change the result if the original computation
6655 overflowed. */
6656 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6657 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6658 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6659 || (tcode == MULT_EXPR
6660 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6661 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6662 && code != MULT_EXPR)))
6664 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6666 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6667 *strict_overflow_p = true;
6668 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6669 fold_convert (ctype,
6670 const_binop (TRUNC_DIV_EXPR,
6671 op1, c, 0)));
6673 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6675 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6676 *strict_overflow_p = true;
6677 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6678 fold_convert (ctype,
6679 const_binop (TRUNC_DIV_EXPR,
6680 c, op1, 0)));
6683 break;
6685 default:
6686 break;
6689 return 0;
6692 /* Return a node which has the indicated constant VALUE (either 0 or
6693 1), and is of the indicated TYPE. */
6695 tree
6696 constant_boolean_node (int value, tree type)
6698 if (type == integer_type_node)
6699 return value ? integer_one_node : integer_zero_node;
6700 else if (type == boolean_type_node)
6701 return value ? boolean_true_node : boolean_false_node;
6702 else
6703 return build_int_cst (type, value);
6707 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6708 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6709 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6710 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6711 COND is the first argument to CODE; otherwise (as in the example
6712 given here), it is the second argument. TYPE is the type of the
6713 original expression. Return NULL_TREE if no simplification is
6714 possible. */
6716 static tree
6717 fold_binary_op_with_conditional_arg (location_t loc,
6718 enum tree_code code,
6719 tree type, tree op0, tree op1,
6720 tree cond, tree arg, int cond_first_p)
6722 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6723 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6724 tree test, true_value, false_value;
6725 tree lhs = NULL_TREE;
6726 tree rhs = NULL_TREE;
6728 /* This transformation is only worthwhile if we don't have to wrap
6729 arg in a SAVE_EXPR, and the operation can be simplified on at least
6730 one of the branches once its pushed inside the COND_EXPR. */
6731 if (!TREE_CONSTANT (arg))
6732 return NULL_TREE;
6734 if (TREE_CODE (cond) == COND_EXPR)
6736 test = TREE_OPERAND (cond, 0);
6737 true_value = TREE_OPERAND (cond, 1);
6738 false_value = TREE_OPERAND (cond, 2);
6739 /* If this operand throws an expression, then it does not make
6740 sense to try to perform a logical or arithmetic operation
6741 involving it. */
6742 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6743 lhs = true_value;
6744 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6745 rhs = false_value;
6747 else
6749 tree testtype = TREE_TYPE (cond);
6750 test = cond;
6751 true_value = constant_boolean_node (true, testtype);
6752 false_value = constant_boolean_node (false, testtype);
6755 arg = fold_convert_loc (loc, arg_type, arg);
6756 if (lhs == 0)
6758 true_value = fold_convert_loc (loc, cond_type, true_value);
6759 if (cond_first_p)
6760 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6761 else
6762 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6764 if (rhs == 0)
6766 false_value = fold_convert_loc (loc, cond_type, false_value);
6767 if (cond_first_p)
6768 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6769 else
6770 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6773 test = fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6774 return fold_convert_loc (loc, type, test);
6778 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6780 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6781 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6782 ADDEND is the same as X.
6784 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6785 and finite. The problematic cases are when X is zero, and its mode
6786 has signed zeros. In the case of rounding towards -infinity,
6787 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6788 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6790 bool
6791 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6793 if (!real_zerop (addend))
6794 return false;
6796 /* Don't allow the fold with -fsignaling-nans. */
6797 if (HONOR_SNANS (TYPE_MODE (type)))
6798 return false;
6800 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6801 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6802 return true;
6804 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6805 if (TREE_CODE (addend) == REAL_CST
6806 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6807 negate = !negate;
6809 /* The mode has signed zeros, and we have to honor their sign.
6810 In this situation, there is only one case we can return true for.
6811 X - 0 is the same as X unless rounding towards -infinity is
6812 supported. */
6813 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6816 /* Subroutine of fold() that checks comparisons of built-in math
6817 functions against real constants.
6819 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6820 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6821 is the type of the result and ARG0 and ARG1 are the operands of the
6822 comparison. ARG1 must be a TREE_REAL_CST.
6824 The function returns the constant folded tree if a simplification
6825 can be made, and NULL_TREE otherwise. */
6827 static tree
6828 fold_mathfn_compare (location_t loc,
6829 enum built_in_function fcode, enum tree_code code,
6830 tree type, tree arg0, tree arg1)
6832 REAL_VALUE_TYPE c;
6834 if (BUILTIN_SQRT_P (fcode))
6836 tree arg = CALL_EXPR_ARG (arg0, 0);
6837 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6839 c = TREE_REAL_CST (arg1);
6840 if (REAL_VALUE_NEGATIVE (c))
6842 /* sqrt(x) < y is always false, if y is negative. */
6843 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6844 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6846 /* sqrt(x) > y is always true, if y is negative and we
6847 don't care about NaNs, i.e. negative values of x. */
6848 if (code == NE_EXPR || !HONOR_NANS (mode))
6849 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6851 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6852 return fold_build2_loc (loc, GE_EXPR, type, arg,
6853 build_real (TREE_TYPE (arg), dconst0));
6855 else if (code == GT_EXPR || code == GE_EXPR)
6857 REAL_VALUE_TYPE c2;
6859 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6860 real_convert (&c2, mode, &c2);
6862 if (REAL_VALUE_ISINF (c2))
6864 /* sqrt(x) > y is x == +Inf, when y is very large. */
6865 if (HONOR_INFINITIES (mode))
6866 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6867 build_real (TREE_TYPE (arg), c2));
6869 /* sqrt(x) > y is always false, when y is very large
6870 and we don't care about infinities. */
6871 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6874 /* sqrt(x) > c is the same as x > c*c. */
6875 return fold_build2_loc (loc, code, type, arg,
6876 build_real (TREE_TYPE (arg), c2));
6878 else if (code == LT_EXPR || code == LE_EXPR)
6880 REAL_VALUE_TYPE c2;
6882 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6883 real_convert (&c2, mode, &c2);
6885 if (REAL_VALUE_ISINF (c2))
6887 /* sqrt(x) < y is always true, when y is a very large
6888 value and we don't care about NaNs or Infinities. */
6889 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6890 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6892 /* sqrt(x) < y is x != +Inf when y is very large and we
6893 don't care about NaNs. */
6894 if (! HONOR_NANS (mode))
6895 return fold_build2_loc (loc, NE_EXPR, type, arg,
6896 build_real (TREE_TYPE (arg), c2));
6898 /* sqrt(x) < y is x >= 0 when y is very large and we
6899 don't care about Infinities. */
6900 if (! HONOR_INFINITIES (mode))
6901 return fold_build2_loc (loc, GE_EXPR, type, arg,
6902 build_real (TREE_TYPE (arg), dconst0));
6904 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6905 if (lang_hooks.decls.global_bindings_p () != 0
6906 || CONTAINS_PLACEHOLDER_P (arg))
6907 return NULL_TREE;
6909 arg = save_expr (arg);
6910 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6911 fold_build2_loc (loc, GE_EXPR, type, arg,
6912 build_real (TREE_TYPE (arg),
6913 dconst0)),
6914 fold_build2_loc (loc, NE_EXPR, type, arg,
6915 build_real (TREE_TYPE (arg),
6916 c2)));
6919 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6920 if (! HONOR_NANS (mode))
6921 return fold_build2_loc (loc, code, type, arg,
6922 build_real (TREE_TYPE (arg), c2));
6924 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6925 if (lang_hooks.decls.global_bindings_p () == 0
6926 && ! CONTAINS_PLACEHOLDER_P (arg))
6928 arg = save_expr (arg);
6929 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6930 fold_build2_loc (loc, GE_EXPR, type, arg,
6931 build_real (TREE_TYPE (arg),
6932 dconst0)),
6933 fold_build2_loc (loc, code, type, arg,
6934 build_real (TREE_TYPE (arg),
6935 c2)));
6940 return NULL_TREE;
6943 /* Subroutine of fold() that optimizes comparisons against Infinities,
6944 either +Inf or -Inf.
6946 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6947 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6948 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6950 The function returns the constant folded tree if a simplification
6951 can be made, and NULL_TREE otherwise. */
6953 static tree
6954 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6955 tree arg0, tree arg1)
6957 enum machine_mode mode;
6958 REAL_VALUE_TYPE max;
6959 tree temp;
6960 bool neg;
6962 mode = TYPE_MODE (TREE_TYPE (arg0));
6964 /* For negative infinity swap the sense of the comparison. */
6965 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6966 if (neg)
6967 code = swap_tree_comparison (code);
6969 switch (code)
6971 case GT_EXPR:
6972 /* x > +Inf is always false, if with ignore sNANs. */
6973 if (HONOR_SNANS (mode))
6974 return NULL_TREE;
6975 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6977 case LE_EXPR:
6978 /* x <= +Inf is always true, if we don't case about NaNs. */
6979 if (! HONOR_NANS (mode))
6980 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6982 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6983 if (lang_hooks.decls.global_bindings_p () == 0
6984 && ! CONTAINS_PLACEHOLDER_P (arg0))
6986 arg0 = save_expr (arg0);
6987 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6989 break;
6991 case EQ_EXPR:
6992 case GE_EXPR:
6993 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6994 real_maxval (&max, neg, mode);
6995 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6996 arg0, build_real (TREE_TYPE (arg0), max));
6998 case LT_EXPR:
6999 /* x < +Inf is always equal to x <= DBL_MAX. */
7000 real_maxval (&max, neg, mode);
7001 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
7002 arg0, build_real (TREE_TYPE (arg0), max));
7004 case NE_EXPR:
7005 /* x != +Inf is always equal to !(x > DBL_MAX). */
7006 real_maxval (&max, neg, mode);
7007 if (! HONOR_NANS (mode))
7008 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
7009 arg0, build_real (TREE_TYPE (arg0), max));
7011 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
7012 arg0, build_real (TREE_TYPE (arg0), max));
7013 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
7015 default:
7016 break;
7019 return NULL_TREE;
7022 /* Subroutine of fold() that optimizes comparisons of a division by
7023 a nonzero integer constant against an integer constant, i.e.
7024 X/C1 op C2.
7026 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7027 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
7028 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
7030 The function returns the constant folded tree if a simplification
7031 can be made, and NULL_TREE otherwise. */
7033 static tree
7034 fold_div_compare (location_t loc,
7035 enum tree_code code, tree type, tree arg0, tree arg1)
7037 tree prod, tmp, hi, lo;
7038 tree arg00 = TREE_OPERAND (arg0, 0);
7039 tree arg01 = TREE_OPERAND (arg0, 1);
7040 unsigned HOST_WIDE_INT lpart;
7041 HOST_WIDE_INT hpart;
7042 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
7043 bool neg_overflow;
7044 int overflow;
7046 /* We have to do this the hard way to detect unsigned overflow.
7047 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
7048 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
7049 TREE_INT_CST_HIGH (arg01),
7050 TREE_INT_CST_LOW (arg1),
7051 TREE_INT_CST_HIGH (arg1),
7052 &lpart, &hpart, unsigned_p);
7053 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
7054 -1, overflow);
7055 neg_overflow = false;
7057 if (unsigned_p)
7059 tmp = int_const_binop (MINUS_EXPR, arg01,
7060 build_int_cst (TREE_TYPE (arg01), 1), 0);
7061 lo = prod;
7063 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
7064 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
7065 TREE_INT_CST_HIGH (prod),
7066 TREE_INT_CST_LOW (tmp),
7067 TREE_INT_CST_HIGH (tmp),
7068 &lpart, &hpart, unsigned_p);
7069 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
7070 -1, overflow | TREE_OVERFLOW (prod));
7072 else if (tree_int_cst_sgn (arg01) >= 0)
7074 tmp = int_const_binop (MINUS_EXPR, arg01,
7075 build_int_cst (TREE_TYPE (arg01), 1), 0);
7076 switch (tree_int_cst_sgn (arg1))
7078 case -1:
7079 neg_overflow = true;
7080 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
7081 hi = prod;
7082 break;
7084 case 0:
7085 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
7086 hi = tmp;
7087 break;
7089 case 1:
7090 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
7091 lo = prod;
7092 break;
7094 default:
7095 gcc_unreachable ();
7098 else
7100 /* A negative divisor reverses the relational operators. */
7101 code = swap_tree_comparison (code);
7103 tmp = int_const_binop (PLUS_EXPR, arg01,
7104 build_int_cst (TREE_TYPE (arg01), 1), 0);
7105 switch (tree_int_cst_sgn (arg1))
7107 case -1:
7108 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
7109 lo = prod;
7110 break;
7112 case 0:
7113 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
7114 lo = tmp;
7115 break;
7117 case 1:
7118 neg_overflow = true;
7119 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
7120 hi = prod;
7121 break;
7123 default:
7124 gcc_unreachable ();
7128 switch (code)
7130 case EQ_EXPR:
7131 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
7132 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
7133 if (TREE_OVERFLOW (hi))
7134 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
7135 if (TREE_OVERFLOW (lo))
7136 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
7137 return build_range_check (loc, type, arg00, 1, lo, hi);
7139 case NE_EXPR:
7140 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
7141 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
7142 if (TREE_OVERFLOW (hi))
7143 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
7144 if (TREE_OVERFLOW (lo))
7145 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
7146 return build_range_check (loc, type, arg00, 0, lo, hi);
7148 case LT_EXPR:
7149 if (TREE_OVERFLOW (lo))
7151 tmp = neg_overflow ? integer_zero_node : integer_one_node;
7152 return omit_one_operand_loc (loc, type, tmp, arg00);
7154 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
7156 case LE_EXPR:
7157 if (TREE_OVERFLOW (hi))
7159 tmp = neg_overflow ? integer_zero_node : integer_one_node;
7160 return omit_one_operand_loc (loc, type, tmp, arg00);
7162 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
7164 case GT_EXPR:
7165 if (TREE_OVERFLOW (hi))
7167 tmp = neg_overflow ? integer_one_node : integer_zero_node;
7168 return omit_one_operand_loc (loc, type, tmp, arg00);
7170 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
7172 case GE_EXPR:
7173 if (TREE_OVERFLOW (lo))
7175 tmp = neg_overflow ? integer_one_node : integer_zero_node;
7176 return omit_one_operand_loc (loc, type, tmp, arg00);
7178 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
7180 default:
7181 break;
7184 return NULL_TREE;
7188 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7189 equality/inequality test, then return a simplified form of the test
7190 using a sign testing. Otherwise return NULL. TYPE is the desired
7191 result type. */
7193 static tree
7194 fold_single_bit_test_into_sign_test (location_t loc,
7195 enum tree_code code, tree arg0, tree arg1,
7196 tree result_type)
7198 /* If this is testing a single bit, we can optimize the test. */
7199 if ((code == NE_EXPR || code == EQ_EXPR)
7200 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7201 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7203 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7204 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7205 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7207 if (arg00 != NULL_TREE
7208 /* This is only a win if casting to a signed type is cheap,
7209 i.e. when arg00's type is not a partial mode. */
7210 && TYPE_PRECISION (TREE_TYPE (arg00))
7211 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
7213 tree stype = signed_type_for (TREE_TYPE (arg00));
7214 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7215 result_type,
7216 fold_convert_loc (loc, stype, arg00),
7217 build_int_cst (stype, 0));
7221 return NULL_TREE;
7224 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7225 equality/inequality test, then return a simplified form of
7226 the test using shifts and logical operations. Otherwise return
7227 NULL. TYPE is the desired result type. */
7229 tree
7230 fold_single_bit_test (location_t loc, enum tree_code code,
7231 tree arg0, tree arg1, tree result_type)
7233 /* If this is testing a single bit, we can optimize the test. */
7234 if ((code == NE_EXPR || code == EQ_EXPR)
7235 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7236 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7238 tree inner = TREE_OPERAND (arg0, 0);
7239 tree type = TREE_TYPE (arg0);
7240 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7241 enum machine_mode operand_mode = TYPE_MODE (type);
7242 int ops_unsigned;
7243 tree signed_type, unsigned_type, intermediate_type;
7244 tree tem, one;
7246 /* First, see if we can fold the single bit test into a sign-bit
7247 test. */
7248 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7249 result_type);
7250 if (tem)
7251 return tem;
7253 /* Otherwise we have (A & C) != 0 where C is a single bit,
7254 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7255 Similarly for (A & C) == 0. */
7257 /* If INNER is a right shift of a constant and it plus BITNUM does
7258 not overflow, adjust BITNUM and INNER. */
7259 if (TREE_CODE (inner) == RSHIFT_EXPR
7260 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7261 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
7262 && bitnum < TYPE_PRECISION (type)
7263 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
7264 bitnum - TYPE_PRECISION (type)))
7266 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
7267 inner = TREE_OPERAND (inner, 0);
7270 /* If we are going to be able to omit the AND below, we must do our
7271 operations as unsigned. If we must use the AND, we have a choice.
7272 Normally unsigned is faster, but for some machines signed is. */
7273 #ifdef LOAD_EXTEND_OP
7274 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
7275 && !flag_syntax_only) ? 0 : 1;
7276 #else
7277 ops_unsigned = 1;
7278 #endif
7280 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7281 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7282 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7283 inner = fold_convert_loc (loc, intermediate_type, inner);
7285 if (bitnum != 0)
7286 inner = build2 (RSHIFT_EXPR, intermediate_type,
7287 inner, size_int (bitnum));
7289 one = build_int_cst (intermediate_type, 1);
7291 if (code == EQ_EXPR)
7292 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7294 /* Put the AND last so it can combine with more things. */
7295 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7297 /* Make sure to return the proper type. */
7298 inner = fold_convert_loc (loc, result_type, inner);
7300 return inner;
7302 return NULL_TREE;
7305 /* Check whether we are allowed to reorder operands arg0 and arg1,
7306 such that the evaluation of arg1 occurs before arg0. */
7308 static bool
7309 reorder_operands_p (const_tree arg0, const_tree arg1)
7311 if (! flag_evaluation_order)
7312 return true;
7313 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
7314 return true;
7315 return ! TREE_SIDE_EFFECTS (arg0)
7316 && ! TREE_SIDE_EFFECTS (arg1);
7319 /* Test whether it is preferable two swap two operands, ARG0 and
7320 ARG1, for example because ARG0 is an integer constant and ARG1
7321 isn't. If REORDER is true, only recommend swapping if we can
7322 evaluate the operands in reverse order. */
7324 bool
7325 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
7327 STRIP_SIGN_NOPS (arg0);
7328 STRIP_SIGN_NOPS (arg1);
7330 if (TREE_CODE (arg1) == INTEGER_CST)
7331 return 0;
7332 if (TREE_CODE (arg0) == INTEGER_CST)
7333 return 1;
7335 if (TREE_CODE (arg1) == REAL_CST)
7336 return 0;
7337 if (TREE_CODE (arg0) == REAL_CST)
7338 return 1;
7340 if (TREE_CODE (arg1) == FIXED_CST)
7341 return 0;
7342 if (TREE_CODE (arg0) == FIXED_CST)
7343 return 1;
7345 if (TREE_CODE (arg1) == COMPLEX_CST)
7346 return 0;
7347 if (TREE_CODE (arg0) == COMPLEX_CST)
7348 return 1;
7350 if (TREE_CONSTANT (arg1))
7351 return 0;
7352 if (TREE_CONSTANT (arg0))
7353 return 1;
7355 if (optimize_function_for_size_p (cfun))
7356 return 0;
7358 if (reorder && flag_evaluation_order
7359 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7360 return 0;
7362 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7363 for commutative and comparison operators. Ensuring a canonical
7364 form allows the optimizers to find additional redundancies without
7365 having to explicitly check for both orderings. */
7366 if (TREE_CODE (arg0) == SSA_NAME
7367 && TREE_CODE (arg1) == SSA_NAME
7368 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7369 return 1;
7371 /* Put SSA_NAMEs last. */
7372 if (TREE_CODE (arg1) == SSA_NAME)
7373 return 0;
7374 if (TREE_CODE (arg0) == SSA_NAME)
7375 return 1;
7377 /* Put variables last. */
7378 if (DECL_P (arg1))
7379 return 0;
7380 if (DECL_P (arg0))
7381 return 1;
7383 return 0;
7386 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7387 ARG0 is extended to a wider type. */
7389 static tree
7390 fold_widened_comparison (location_t loc, enum tree_code code,
7391 tree type, tree arg0, tree arg1)
7393 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7394 tree arg1_unw;
7395 tree shorter_type, outer_type;
7396 tree min, max;
7397 bool above, below;
7399 if (arg0_unw == arg0)
7400 return NULL_TREE;
7401 shorter_type = TREE_TYPE (arg0_unw);
7403 #ifdef HAVE_canonicalize_funcptr_for_compare
7404 /* Disable this optimization if we're casting a function pointer
7405 type on targets that require function pointer canonicalization. */
7406 if (HAVE_canonicalize_funcptr_for_compare
7407 && TREE_CODE (shorter_type) == POINTER_TYPE
7408 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7409 return NULL_TREE;
7410 #endif
7412 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7413 return NULL_TREE;
7415 arg1_unw = get_unwidened (arg1, NULL_TREE);
7417 /* If possible, express the comparison in the shorter mode. */
7418 if ((code == EQ_EXPR || code == NE_EXPR
7419 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7420 && (TREE_TYPE (arg1_unw) == shorter_type
7421 || ((TYPE_PRECISION (shorter_type)
7422 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7423 && (TYPE_UNSIGNED (shorter_type)
7424 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7425 || (TREE_CODE (arg1_unw) == INTEGER_CST
7426 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7427 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7428 && int_fits_type_p (arg1_unw, shorter_type))))
7429 return fold_build2_loc (loc, code, type, arg0_unw,
7430 fold_convert_loc (loc, shorter_type, arg1_unw));
7432 if (TREE_CODE (arg1_unw) != INTEGER_CST
7433 || TREE_CODE (shorter_type) != INTEGER_TYPE
7434 || !int_fits_type_p (arg1_unw, shorter_type))
7435 return NULL_TREE;
7437 /* If we are comparing with the integer that does not fit into the range
7438 of the shorter type, the result is known. */
7439 outer_type = TREE_TYPE (arg1_unw);
7440 min = lower_bound_in_type (outer_type, shorter_type);
7441 max = upper_bound_in_type (outer_type, shorter_type);
7443 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7444 max, arg1_unw));
7445 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7446 arg1_unw, min));
7448 switch (code)
7450 case EQ_EXPR:
7451 if (above || below)
7452 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7453 break;
7455 case NE_EXPR:
7456 if (above || below)
7457 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7458 break;
7460 case LT_EXPR:
7461 case LE_EXPR:
7462 if (above)
7463 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7464 else if (below)
7465 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7467 case GT_EXPR:
7468 case GE_EXPR:
7469 if (above)
7470 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7471 else if (below)
7472 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7474 default:
7475 break;
7478 return NULL_TREE;
7481 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7482 ARG0 just the signedness is changed. */
7484 static tree
7485 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7486 tree arg0, tree arg1)
7488 tree arg0_inner;
7489 tree inner_type, outer_type;
7491 if (!CONVERT_EXPR_P (arg0))
7492 return NULL_TREE;
7494 outer_type = TREE_TYPE (arg0);
7495 arg0_inner = TREE_OPERAND (arg0, 0);
7496 inner_type = TREE_TYPE (arg0_inner);
7498 #ifdef HAVE_canonicalize_funcptr_for_compare
7499 /* Disable this optimization if we're casting a function pointer
7500 type on targets that require function pointer canonicalization. */
7501 if (HAVE_canonicalize_funcptr_for_compare
7502 && TREE_CODE (inner_type) == POINTER_TYPE
7503 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7504 return NULL_TREE;
7505 #endif
7507 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7508 return NULL_TREE;
7510 if (TREE_CODE (arg1) != INTEGER_CST
7511 && !(CONVERT_EXPR_P (arg1)
7512 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7513 return NULL_TREE;
7515 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7516 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7517 && code != NE_EXPR
7518 && code != EQ_EXPR)
7519 return NULL_TREE;
7521 if (TREE_CODE (arg1) == INTEGER_CST)
7522 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7523 TREE_INT_CST_HIGH (arg1), 0,
7524 TREE_OVERFLOW (arg1));
7525 else
7526 arg1 = fold_convert_loc (loc, inner_type, arg1);
7528 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7531 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7532 step of the array. Reconstructs s and delta in the case of s *
7533 delta being an integer constant (and thus already folded). ADDR is
7534 the address. MULT is the multiplicative expression. If the
7535 function succeeds, the new address expression is returned.
7536 Otherwise NULL_TREE is returned. LOC is the location of the
7537 resulting expression. */
7539 static tree
7540 try_move_mult_to_index (location_t loc, tree addr, tree op1)
7542 tree s, delta, step;
7543 tree ref = TREE_OPERAND (addr, 0), pref;
7544 tree ret, pos;
7545 tree itype;
7546 bool mdim = false;
7548 /* Strip the nops that might be added when converting op1 to sizetype. */
7549 STRIP_NOPS (op1);
7551 /* Canonicalize op1 into a possibly non-constant delta
7552 and an INTEGER_CST s. */
7553 if (TREE_CODE (op1) == MULT_EXPR)
7555 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7557 STRIP_NOPS (arg0);
7558 STRIP_NOPS (arg1);
7560 if (TREE_CODE (arg0) == INTEGER_CST)
7562 s = arg0;
7563 delta = arg1;
7565 else if (TREE_CODE (arg1) == INTEGER_CST)
7567 s = arg1;
7568 delta = arg0;
7570 else
7571 return NULL_TREE;
7573 else if (TREE_CODE (op1) == INTEGER_CST)
7575 delta = op1;
7576 s = NULL_TREE;
7578 else
7580 /* Simulate we are delta * 1. */
7581 delta = op1;
7582 s = integer_one_node;
7585 for (;; ref = TREE_OPERAND (ref, 0))
7587 if (TREE_CODE (ref) == ARRAY_REF)
7589 /* Remember if this was a multi-dimensional array. */
7590 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7591 mdim = true;
7593 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7594 if (! itype)
7595 continue;
7597 step = array_ref_element_size (ref);
7598 if (TREE_CODE (step) != INTEGER_CST)
7599 continue;
7601 if (s)
7603 if (! tree_int_cst_equal (step, s))
7604 continue;
7606 else
7608 /* Try if delta is a multiple of step. */
7609 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7610 if (! tmp)
7611 continue;
7612 delta = tmp;
7615 /* Only fold here if we can verify we do not overflow one
7616 dimension of a multi-dimensional array. */
7617 if (mdim)
7619 tree tmp;
7621 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7622 || !INTEGRAL_TYPE_P (itype)
7623 || !TYPE_MAX_VALUE (itype)
7624 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7625 continue;
7627 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7628 fold_convert_loc (loc, itype,
7629 TREE_OPERAND (ref, 1)),
7630 fold_convert_loc (loc, itype, delta));
7631 if (!tmp
7632 || TREE_CODE (tmp) != INTEGER_CST
7633 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7634 continue;
7637 break;
7639 else
7640 mdim = false;
7642 if (!handled_component_p (ref))
7643 return NULL_TREE;
7646 /* We found the suitable array reference. So copy everything up to it,
7647 and replace the index. */
7649 pref = TREE_OPERAND (addr, 0);
7650 ret = copy_node (pref);
7651 SET_EXPR_LOCATION (ret, loc);
7652 pos = ret;
7654 while (pref != ref)
7656 pref = TREE_OPERAND (pref, 0);
7657 TREE_OPERAND (pos, 0) = copy_node (pref);
7658 pos = TREE_OPERAND (pos, 0);
7661 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
7662 fold_convert_loc (loc, itype,
7663 TREE_OPERAND (pos, 1)),
7664 fold_convert_loc (loc, itype, delta));
7666 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7670 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7671 means A >= Y && A != MAX, but in this case we know that
7672 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7674 static tree
7675 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7677 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7679 if (TREE_CODE (bound) == LT_EXPR)
7680 a = TREE_OPERAND (bound, 0);
7681 else if (TREE_CODE (bound) == GT_EXPR)
7682 a = TREE_OPERAND (bound, 1);
7683 else
7684 return NULL_TREE;
7686 typea = TREE_TYPE (a);
7687 if (!INTEGRAL_TYPE_P (typea)
7688 && !POINTER_TYPE_P (typea))
7689 return NULL_TREE;
7691 if (TREE_CODE (ineq) == LT_EXPR)
7693 a1 = TREE_OPERAND (ineq, 1);
7694 y = TREE_OPERAND (ineq, 0);
7696 else if (TREE_CODE (ineq) == GT_EXPR)
7698 a1 = TREE_OPERAND (ineq, 0);
7699 y = TREE_OPERAND (ineq, 1);
7701 else
7702 return NULL_TREE;
7704 if (TREE_TYPE (a1) != typea)
7705 return NULL_TREE;
7707 if (POINTER_TYPE_P (typea))
7709 /* Convert the pointer types into integer before taking the difference. */
7710 tree ta = fold_convert_loc (loc, ssizetype, a);
7711 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7712 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7714 else
7715 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7717 if (!diff || !integer_onep (diff))
7718 return NULL_TREE;
7720 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7723 /* Fold a sum or difference of at least one multiplication.
7724 Returns the folded tree or NULL if no simplification could be made. */
7726 static tree
7727 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7728 tree arg0, tree arg1)
7730 tree arg00, arg01, arg10, arg11;
7731 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7733 /* (A * C) +- (B * C) -> (A+-B) * C.
7734 (A * C) +- A -> A * (C+-1).
7735 We are most concerned about the case where C is a constant,
7736 but other combinations show up during loop reduction. Since
7737 it is not difficult, try all four possibilities. */
7739 if (TREE_CODE (arg0) == MULT_EXPR)
7741 arg00 = TREE_OPERAND (arg0, 0);
7742 arg01 = TREE_OPERAND (arg0, 1);
7744 else if (TREE_CODE (arg0) == INTEGER_CST)
7746 arg00 = build_one_cst (type);
7747 arg01 = arg0;
7749 else
7751 /* We cannot generate constant 1 for fract. */
7752 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7753 return NULL_TREE;
7754 arg00 = arg0;
7755 arg01 = build_one_cst (type);
7757 if (TREE_CODE (arg1) == MULT_EXPR)
7759 arg10 = TREE_OPERAND (arg1, 0);
7760 arg11 = TREE_OPERAND (arg1, 1);
7762 else if (TREE_CODE (arg1) == INTEGER_CST)
7764 arg10 = build_one_cst (type);
7765 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7766 the purpose of this canonicalization. */
7767 if (TREE_INT_CST_HIGH (arg1) == -1
7768 && negate_expr_p (arg1)
7769 && code == PLUS_EXPR)
7771 arg11 = negate_expr (arg1);
7772 code = MINUS_EXPR;
7774 else
7775 arg11 = arg1;
7777 else
7779 /* We cannot generate constant 1 for fract. */
7780 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7781 return NULL_TREE;
7782 arg10 = arg1;
7783 arg11 = build_one_cst (type);
7785 same = NULL_TREE;
7787 if (operand_equal_p (arg01, arg11, 0))
7788 same = arg01, alt0 = arg00, alt1 = arg10;
7789 else if (operand_equal_p (arg00, arg10, 0))
7790 same = arg00, alt0 = arg01, alt1 = arg11;
7791 else if (operand_equal_p (arg00, arg11, 0))
7792 same = arg00, alt0 = arg01, alt1 = arg10;
7793 else if (operand_equal_p (arg01, arg10, 0))
7794 same = arg01, alt0 = arg00, alt1 = arg11;
7796 /* No identical multiplicands; see if we can find a common
7797 power-of-two factor in non-power-of-two multiplies. This
7798 can help in multi-dimensional array access. */
7799 else if (host_integerp (arg01, 0)
7800 && host_integerp (arg11, 0))
7802 HOST_WIDE_INT int01, int11, tmp;
7803 bool swap = false;
7804 tree maybe_same;
7805 int01 = TREE_INT_CST_LOW (arg01);
7806 int11 = TREE_INT_CST_LOW (arg11);
7808 /* Move min of absolute values to int11. */
7809 if ((int01 >= 0 ? int01 : -int01)
7810 < (int11 >= 0 ? int11 : -int11))
7812 tmp = int01, int01 = int11, int11 = tmp;
7813 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7814 maybe_same = arg01;
7815 swap = true;
7817 else
7818 maybe_same = arg11;
7820 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7821 /* The remainder should not be a constant, otherwise we
7822 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7823 increased the number of multiplications necessary. */
7824 && TREE_CODE (arg10) != INTEGER_CST)
7826 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7827 build_int_cst (TREE_TYPE (arg00),
7828 int01 / int11));
7829 alt1 = arg10;
7830 same = maybe_same;
7831 if (swap)
7832 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7836 if (same)
7837 return fold_build2_loc (loc, MULT_EXPR, type,
7838 fold_build2_loc (loc, code, type,
7839 fold_convert_loc (loc, type, alt0),
7840 fold_convert_loc (loc, type, alt1)),
7841 fold_convert_loc (loc, type, same));
7843 return NULL_TREE;
7846 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7847 specified by EXPR into the buffer PTR of length LEN bytes.
7848 Return the number of bytes placed in the buffer, or zero
7849 upon failure. */
7851 static int
7852 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7854 tree type = TREE_TYPE (expr);
7855 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7856 int byte, offset, word, words;
7857 unsigned char value;
7859 if (total_bytes > len)
7860 return 0;
7861 words = total_bytes / UNITS_PER_WORD;
7863 for (byte = 0; byte < total_bytes; byte++)
7865 int bitpos = byte * BITS_PER_UNIT;
7866 if (bitpos < HOST_BITS_PER_WIDE_INT)
7867 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7868 else
7869 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7870 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7872 if (total_bytes > UNITS_PER_WORD)
7874 word = byte / UNITS_PER_WORD;
7875 if (WORDS_BIG_ENDIAN)
7876 word = (words - 1) - word;
7877 offset = word * UNITS_PER_WORD;
7878 if (BYTES_BIG_ENDIAN)
7879 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7880 else
7881 offset += byte % UNITS_PER_WORD;
7883 else
7884 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7885 ptr[offset] = value;
7887 return total_bytes;
7891 /* Subroutine of native_encode_expr. Encode the REAL_CST
7892 specified by EXPR into the buffer PTR of length LEN bytes.
7893 Return the number of bytes placed in the buffer, or zero
7894 upon failure. */
7896 static int
7897 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7899 tree type = TREE_TYPE (expr);
7900 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7901 int byte, offset, word, words, bitpos;
7902 unsigned char value;
7904 /* There are always 32 bits in each long, no matter the size of
7905 the hosts long. We handle floating point representations with
7906 up to 192 bits. */
7907 long tmp[6];
7909 if (total_bytes > len)
7910 return 0;
7911 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7913 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7915 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7916 bitpos += BITS_PER_UNIT)
7918 byte = (bitpos / BITS_PER_UNIT) & 3;
7919 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7921 if (UNITS_PER_WORD < 4)
7923 word = byte / UNITS_PER_WORD;
7924 if (WORDS_BIG_ENDIAN)
7925 word = (words - 1) - word;
7926 offset = word * UNITS_PER_WORD;
7927 if (BYTES_BIG_ENDIAN)
7928 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7929 else
7930 offset += byte % UNITS_PER_WORD;
7932 else
7933 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7934 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7936 return total_bytes;
7939 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7940 specified by EXPR into the buffer PTR of length LEN bytes.
7941 Return the number of bytes placed in the buffer, or zero
7942 upon failure. */
7944 static int
7945 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7947 int rsize, isize;
7948 tree part;
7950 part = TREE_REALPART (expr);
7951 rsize = native_encode_expr (part, ptr, len);
7952 if (rsize == 0)
7953 return 0;
7954 part = TREE_IMAGPART (expr);
7955 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7956 if (isize != rsize)
7957 return 0;
7958 return rsize + isize;
7962 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7963 specified by EXPR into the buffer PTR of length LEN bytes.
7964 Return the number of bytes placed in the buffer, or zero
7965 upon failure. */
7967 static int
7968 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7970 int i, size, offset, count;
7971 tree itype, elem, elements;
7973 offset = 0;
7974 elements = TREE_VECTOR_CST_ELTS (expr);
7975 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7976 itype = TREE_TYPE (TREE_TYPE (expr));
7977 size = GET_MODE_SIZE (TYPE_MODE (itype));
7978 for (i = 0; i < count; i++)
7980 if (elements)
7982 elem = TREE_VALUE (elements);
7983 elements = TREE_CHAIN (elements);
7985 else
7986 elem = NULL_TREE;
7988 if (elem)
7990 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7991 return 0;
7993 else
7995 if (offset + size > len)
7996 return 0;
7997 memset (ptr+offset, 0, size);
7999 offset += size;
8001 return offset;
8005 /* Subroutine of native_encode_expr. Encode the STRING_CST
8006 specified by EXPR into the buffer PTR of length LEN bytes.
8007 Return the number of bytes placed in the buffer, or zero
8008 upon failure. */
8010 static int
8011 native_encode_string (const_tree expr, unsigned char *ptr, int len)
8013 tree type = TREE_TYPE (expr);
8014 HOST_WIDE_INT total_bytes;
8016 if (TREE_CODE (type) != ARRAY_TYPE
8017 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
8018 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
8019 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
8020 return 0;
8021 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
8022 if (total_bytes > len)
8023 return 0;
8024 if (TREE_STRING_LENGTH (expr) < total_bytes)
8026 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
8027 memset (ptr + TREE_STRING_LENGTH (expr), 0,
8028 total_bytes - TREE_STRING_LENGTH (expr));
8030 else
8031 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
8032 return total_bytes;
8036 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
8037 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
8038 buffer PTR of length LEN bytes. Return the number of bytes
8039 placed in the buffer, or zero upon failure. */
8042 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
8044 switch (TREE_CODE (expr))
8046 case INTEGER_CST:
8047 return native_encode_int (expr, ptr, len);
8049 case REAL_CST:
8050 return native_encode_real (expr, ptr, len);
8052 case COMPLEX_CST:
8053 return native_encode_complex (expr, ptr, len);
8055 case VECTOR_CST:
8056 return native_encode_vector (expr, ptr, len);
8058 case STRING_CST:
8059 return native_encode_string (expr, ptr, len);
8061 default:
8062 return 0;
8067 /* Subroutine of native_interpret_expr. Interpret the contents of
8068 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8069 If the buffer cannot be interpreted, return NULL_TREE. */
8071 static tree
8072 native_interpret_int (tree type, const unsigned char *ptr, int len)
8074 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
8075 int byte, offset, word, words;
8076 unsigned char value;
8077 unsigned int HOST_WIDE_INT lo = 0;
8078 HOST_WIDE_INT hi = 0;
8080 if (total_bytes > len)
8081 return NULL_TREE;
8082 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
8083 return NULL_TREE;
8084 words = total_bytes / UNITS_PER_WORD;
8086 for (byte = 0; byte < total_bytes; byte++)
8088 int bitpos = byte * BITS_PER_UNIT;
8089 if (total_bytes > UNITS_PER_WORD)
8091 word = byte / UNITS_PER_WORD;
8092 if (WORDS_BIG_ENDIAN)
8093 word = (words - 1) - word;
8094 offset = word * UNITS_PER_WORD;
8095 if (BYTES_BIG_ENDIAN)
8096 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8097 else
8098 offset += byte % UNITS_PER_WORD;
8100 else
8101 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
8102 value = ptr[offset];
8104 if (bitpos < HOST_BITS_PER_WIDE_INT)
8105 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
8106 else
8107 hi |= (unsigned HOST_WIDE_INT) value
8108 << (bitpos - HOST_BITS_PER_WIDE_INT);
8111 return build_int_cst_wide_type (type, lo, hi);
8115 /* Subroutine of native_interpret_expr. Interpret the contents of
8116 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8117 If the buffer cannot be interpreted, return NULL_TREE. */
8119 static tree
8120 native_interpret_real (tree type, const unsigned char *ptr, int len)
8122 enum machine_mode mode = TYPE_MODE (type);
8123 int total_bytes = GET_MODE_SIZE (mode);
8124 int byte, offset, word, words, bitpos;
8125 unsigned char value;
8126 /* There are always 32 bits in each long, no matter the size of
8127 the hosts long. We handle floating point representations with
8128 up to 192 bits. */
8129 REAL_VALUE_TYPE r;
8130 long tmp[6];
8132 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
8133 if (total_bytes > len || total_bytes > 24)
8134 return NULL_TREE;
8135 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8137 memset (tmp, 0, sizeof (tmp));
8138 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8139 bitpos += BITS_PER_UNIT)
8141 byte = (bitpos / BITS_PER_UNIT) & 3;
8142 if (UNITS_PER_WORD < 4)
8144 word = byte / UNITS_PER_WORD;
8145 if (WORDS_BIG_ENDIAN)
8146 word = (words - 1) - word;
8147 offset = word * UNITS_PER_WORD;
8148 if (BYTES_BIG_ENDIAN)
8149 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8150 else
8151 offset += byte % UNITS_PER_WORD;
8153 else
8154 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
8155 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8157 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8160 real_from_target (&r, tmp, mode);
8161 return build_real (type, r);
8165 /* Subroutine of native_interpret_expr. Interpret the contents of
8166 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8167 If the buffer cannot be interpreted, return NULL_TREE. */
8169 static tree
8170 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8172 tree etype, rpart, ipart;
8173 int size;
8175 etype = TREE_TYPE (type);
8176 size = GET_MODE_SIZE (TYPE_MODE (etype));
8177 if (size * 2 > len)
8178 return NULL_TREE;
8179 rpart = native_interpret_expr (etype, ptr, size);
8180 if (!rpart)
8181 return NULL_TREE;
8182 ipart = native_interpret_expr (etype, ptr+size, size);
8183 if (!ipart)
8184 return NULL_TREE;
8185 return build_complex (type, rpart, ipart);
8189 /* Subroutine of native_interpret_expr. Interpret the contents of
8190 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8191 If the buffer cannot be interpreted, return NULL_TREE. */
8193 static tree
8194 native_interpret_vector (tree type, const unsigned char *ptr, int len)
8196 tree etype, elem, elements;
8197 int i, size, count;
8199 etype = TREE_TYPE (type);
8200 size = GET_MODE_SIZE (TYPE_MODE (etype));
8201 count = TYPE_VECTOR_SUBPARTS (type);
8202 if (size * count > len)
8203 return NULL_TREE;
8205 elements = NULL_TREE;
8206 for (i = count - 1; i >= 0; i--)
8208 elem = native_interpret_expr (etype, ptr+(i*size), size);
8209 if (!elem)
8210 return NULL_TREE;
8211 elements = tree_cons (NULL_TREE, elem, elements);
8213 return build_vector (type, elements);
8217 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8218 the buffer PTR of length LEN as a constant of type TYPE. For
8219 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8220 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8221 return NULL_TREE. */
8223 tree
8224 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8226 switch (TREE_CODE (type))
8228 case INTEGER_TYPE:
8229 case ENUMERAL_TYPE:
8230 case BOOLEAN_TYPE:
8231 return native_interpret_int (type, ptr, len);
8233 case REAL_TYPE:
8234 return native_interpret_real (type, ptr, len);
8236 case COMPLEX_TYPE:
8237 return native_interpret_complex (type, ptr, len);
8239 case VECTOR_TYPE:
8240 return native_interpret_vector (type, ptr, len);
8242 default:
8243 return NULL_TREE;
8248 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8249 TYPE at compile-time. If we're unable to perform the conversion
8250 return NULL_TREE. */
8252 static tree
8253 fold_view_convert_expr (tree type, tree expr)
8255 /* We support up to 512-bit values (for V8DFmode). */
8256 unsigned char buffer[64];
8257 int len;
8259 /* Check that the host and target are sane. */
8260 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8261 return NULL_TREE;
8263 len = native_encode_expr (expr, buffer, sizeof (buffer));
8264 if (len == 0)
8265 return NULL_TREE;
8267 return native_interpret_expr (type, buffer, len);
8270 /* Build an expression for the address of T. Folds away INDIRECT_REF
8271 to avoid confusing the gimplify process. */
8273 tree
8274 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8276 /* The size of the object is not relevant when talking about its address. */
8277 if (TREE_CODE (t) == WITH_SIZE_EXPR)
8278 t = TREE_OPERAND (t, 0);
8280 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
8281 if (TREE_CODE (t) == INDIRECT_REF
8282 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
8284 t = TREE_OPERAND (t, 0);
8286 if (TREE_TYPE (t) != ptrtype)
8288 t = build1 (NOP_EXPR, ptrtype, t);
8289 SET_EXPR_LOCATION (t, loc);
8292 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8294 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8296 if (TREE_TYPE (t) != ptrtype)
8297 t = fold_convert_loc (loc, ptrtype, t);
8299 else
8301 t = build1 (ADDR_EXPR, ptrtype, t);
8302 SET_EXPR_LOCATION (t, loc);
8305 return t;
8308 /* Build an expression for the address of T. */
8310 tree
8311 build_fold_addr_expr_loc (location_t loc, tree t)
8313 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8315 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8318 /* Fold a unary expression of code CODE and type TYPE with operand
8319 OP0. Return the folded expression if folding is successful.
8320 Otherwise, return NULL_TREE. */
8322 tree
8323 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8325 tree tem;
8326 tree arg0;
8327 enum tree_code_class kind = TREE_CODE_CLASS (code);
8329 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8330 && TREE_CODE_LENGTH (code) == 1);
8332 arg0 = op0;
8333 if (arg0)
8335 if (CONVERT_EXPR_CODE_P (code)
8336 || code == FLOAT_EXPR || code == ABS_EXPR)
8338 /* Don't use STRIP_NOPS, because signedness of argument type
8339 matters. */
8340 STRIP_SIGN_NOPS (arg0);
8342 else
8344 /* Strip any conversions that don't change the mode. This
8345 is safe for every expression, except for a comparison
8346 expression because its signedness is derived from its
8347 operands.
8349 Note that this is done as an internal manipulation within
8350 the constant folder, in order to find the simplest
8351 representation of the arguments so that their form can be
8352 studied. In any cases, the appropriate type conversions
8353 should be put back in the tree that will get out of the
8354 constant folder. */
8355 STRIP_NOPS (arg0);
8359 if (TREE_CODE_CLASS (code) == tcc_unary)
8361 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8362 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8363 fold_build1_loc (loc, code, type,
8364 fold_convert_loc (loc, TREE_TYPE (op0),
8365 TREE_OPERAND (arg0, 1))));
8366 else if (TREE_CODE (arg0) == COND_EXPR)
8368 tree arg01 = TREE_OPERAND (arg0, 1);
8369 tree arg02 = TREE_OPERAND (arg0, 2);
8370 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8371 arg01 = fold_build1_loc (loc, code, type,
8372 fold_convert_loc (loc,
8373 TREE_TYPE (op0), arg01));
8374 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8375 arg02 = fold_build1_loc (loc, code, type,
8376 fold_convert_loc (loc,
8377 TREE_TYPE (op0), arg02));
8378 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8379 arg01, arg02);
8381 /* If this was a conversion, and all we did was to move into
8382 inside the COND_EXPR, bring it back out. But leave it if
8383 it is a conversion from integer to integer and the
8384 result precision is no wider than a word since such a
8385 conversion is cheap and may be optimized away by combine,
8386 while it couldn't if it were outside the COND_EXPR. Then return
8387 so we don't get into an infinite recursion loop taking the
8388 conversion out and then back in. */
8390 if ((CONVERT_EXPR_CODE_P (code)
8391 || code == NON_LVALUE_EXPR)
8392 && TREE_CODE (tem) == COND_EXPR
8393 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8394 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8395 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8396 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8397 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8398 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8399 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8400 && (INTEGRAL_TYPE_P
8401 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8402 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8403 || flag_syntax_only))
8405 tem = build1 (code, type,
8406 build3 (COND_EXPR,
8407 TREE_TYPE (TREE_OPERAND
8408 (TREE_OPERAND (tem, 1), 0)),
8409 TREE_OPERAND (tem, 0),
8410 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8411 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8412 SET_EXPR_LOCATION (tem, loc);
8414 return tem;
8416 else if (COMPARISON_CLASS_P (arg0))
8418 if (TREE_CODE (type) == BOOLEAN_TYPE)
8420 arg0 = copy_node (arg0);
8421 TREE_TYPE (arg0) = type;
8422 return arg0;
8424 else if (TREE_CODE (type) != INTEGER_TYPE)
8425 return fold_build3_loc (loc, COND_EXPR, type, arg0,
8426 fold_build1_loc (loc, code, type,
8427 integer_one_node),
8428 fold_build1_loc (loc, code, type,
8429 integer_zero_node));
8433 switch (code)
8435 case PAREN_EXPR:
8436 /* Re-association barriers around constants and other re-association
8437 barriers can be removed. */
8438 if (CONSTANT_CLASS_P (op0)
8439 || TREE_CODE (op0) == PAREN_EXPR)
8440 return fold_convert_loc (loc, type, op0);
8441 return NULL_TREE;
8443 CASE_CONVERT:
8444 case FLOAT_EXPR:
8445 case FIX_TRUNC_EXPR:
8446 if (TREE_TYPE (op0) == type)
8447 return op0;
8449 /* If we have (type) (a CMP b) and type is an integral type, return
8450 new expression involving the new type. */
8451 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8452 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8453 TREE_OPERAND (op0, 1));
8455 /* Handle cases of two conversions in a row. */
8456 if (CONVERT_EXPR_P (op0))
8458 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8459 tree inter_type = TREE_TYPE (op0);
8460 int inside_int = INTEGRAL_TYPE_P (inside_type);
8461 int inside_ptr = POINTER_TYPE_P (inside_type);
8462 int inside_float = FLOAT_TYPE_P (inside_type);
8463 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8464 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8465 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8466 int inter_int = INTEGRAL_TYPE_P (inter_type);
8467 int inter_ptr = POINTER_TYPE_P (inter_type);
8468 int inter_float = FLOAT_TYPE_P (inter_type);
8469 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8470 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8471 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8472 int final_int = INTEGRAL_TYPE_P (type);
8473 int final_ptr = POINTER_TYPE_P (type);
8474 int final_float = FLOAT_TYPE_P (type);
8475 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8476 unsigned int final_prec = TYPE_PRECISION (type);
8477 int final_unsignedp = TYPE_UNSIGNED (type);
8479 /* In addition to the cases of two conversions in a row
8480 handled below, if we are converting something to its own
8481 type via an object of identical or wider precision, neither
8482 conversion is needed. */
8483 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8484 && (((inter_int || inter_ptr) && final_int)
8485 || (inter_float && final_float))
8486 && inter_prec >= final_prec)
8487 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8489 /* Likewise, if the intermediate and initial types are either both
8490 float or both integer, we don't need the middle conversion if the
8491 former is wider than the latter and doesn't change the signedness
8492 (for integers). Avoid this if the final type is a pointer since
8493 then we sometimes need the middle conversion. Likewise if the
8494 final type has a precision not equal to the size of its mode. */
8495 if (((inter_int && inside_int)
8496 || (inter_float && inside_float)
8497 || (inter_vec && inside_vec))
8498 && inter_prec >= inside_prec
8499 && (inter_float || inter_vec
8500 || inter_unsignedp == inside_unsignedp)
8501 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8502 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8503 && ! final_ptr
8504 && (! final_vec || inter_prec == inside_prec))
8505 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8507 /* If we have a sign-extension of a zero-extended value, we can
8508 replace that by a single zero-extension. */
8509 if (inside_int && inter_int && final_int
8510 && inside_prec < inter_prec && inter_prec < final_prec
8511 && inside_unsignedp && !inter_unsignedp)
8512 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8514 /* Two conversions in a row are not needed unless:
8515 - some conversion is floating-point (overstrict for now), or
8516 - some conversion is a vector (overstrict for now), or
8517 - the intermediate type is narrower than both initial and
8518 final, or
8519 - the intermediate type and innermost type differ in signedness,
8520 and the outermost type is wider than the intermediate, or
8521 - the initial type is a pointer type and the precisions of the
8522 intermediate and final types differ, or
8523 - the final type is a pointer type and the precisions of the
8524 initial and intermediate types differ. */
8525 if (! inside_float && ! inter_float && ! final_float
8526 && ! inside_vec && ! inter_vec && ! final_vec
8527 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8528 && ! (inside_int && inter_int
8529 && inter_unsignedp != inside_unsignedp
8530 && inter_prec < final_prec)
8531 && ((inter_unsignedp && inter_prec > inside_prec)
8532 == (final_unsignedp && final_prec > inter_prec))
8533 && ! (inside_ptr && inter_prec != final_prec)
8534 && ! (final_ptr && inside_prec != inter_prec)
8535 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8536 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8537 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8540 /* Handle (T *)&A.B.C for A being of type T and B and C
8541 living at offset zero. This occurs frequently in
8542 C++ upcasting and then accessing the base. */
8543 if (TREE_CODE (op0) == ADDR_EXPR
8544 && POINTER_TYPE_P (type)
8545 && handled_component_p (TREE_OPERAND (op0, 0)))
8547 HOST_WIDE_INT bitsize, bitpos;
8548 tree offset;
8549 enum machine_mode mode;
8550 int unsignedp, volatilep;
8551 tree base = TREE_OPERAND (op0, 0);
8552 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8553 &mode, &unsignedp, &volatilep, false);
8554 /* If the reference was to a (constant) zero offset, we can use
8555 the address of the base if it has the same base type
8556 as the result type. */
8557 if (! offset && bitpos == 0
8558 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8559 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8560 return fold_convert_loc (loc, type,
8561 build_fold_addr_expr_loc (loc, base));
8564 if (TREE_CODE (op0) == MODIFY_EXPR
8565 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8566 /* Detect assigning a bitfield. */
8567 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8568 && DECL_BIT_FIELD
8569 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8571 /* Don't leave an assignment inside a conversion
8572 unless assigning a bitfield. */
8573 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8574 /* First do the assignment, then return converted constant. */
8575 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8576 TREE_NO_WARNING (tem) = 1;
8577 TREE_USED (tem) = 1;
8578 SET_EXPR_LOCATION (tem, loc);
8579 return tem;
8582 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8583 constants (if x has signed type, the sign bit cannot be set
8584 in c). This folds extension into the BIT_AND_EXPR.
8585 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8586 very likely don't have maximal range for their precision and this
8587 transformation effectively doesn't preserve non-maximal ranges. */
8588 if (TREE_CODE (type) == INTEGER_TYPE
8589 && TREE_CODE (op0) == BIT_AND_EXPR
8590 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8592 tree and_expr = op0;
8593 tree and0 = TREE_OPERAND (and_expr, 0);
8594 tree and1 = TREE_OPERAND (and_expr, 1);
8595 int change = 0;
8597 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8598 || (TYPE_PRECISION (type)
8599 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8600 change = 1;
8601 else if (TYPE_PRECISION (TREE_TYPE (and1))
8602 <= HOST_BITS_PER_WIDE_INT
8603 && host_integerp (and1, 1))
8605 unsigned HOST_WIDE_INT cst;
8607 cst = tree_low_cst (and1, 1);
8608 cst &= (HOST_WIDE_INT) -1
8609 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8610 change = (cst == 0);
8611 #ifdef LOAD_EXTEND_OP
8612 if (change
8613 && !flag_syntax_only
8614 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8615 == ZERO_EXTEND))
8617 tree uns = unsigned_type_for (TREE_TYPE (and0));
8618 and0 = fold_convert_loc (loc, uns, and0);
8619 and1 = fold_convert_loc (loc, uns, and1);
8621 #endif
8623 if (change)
8625 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8626 TREE_INT_CST_HIGH (and1), 0,
8627 TREE_OVERFLOW (and1));
8628 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8629 fold_convert_loc (loc, type, and0), tem);
8633 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8634 when one of the new casts will fold away. Conservatively we assume
8635 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8636 if (POINTER_TYPE_P (type)
8637 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8638 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8639 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8640 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8642 tree arg00 = TREE_OPERAND (arg0, 0);
8643 tree arg01 = TREE_OPERAND (arg0, 1);
8645 return fold_build2_loc (loc,
8646 TREE_CODE (arg0), type,
8647 fold_convert_loc (loc, type, arg00),
8648 fold_convert_loc (loc, sizetype, arg01));
8651 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8652 of the same precision, and X is an integer type not narrower than
8653 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8654 if (INTEGRAL_TYPE_P (type)
8655 && TREE_CODE (op0) == BIT_NOT_EXPR
8656 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8657 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8658 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8660 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8661 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8662 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8663 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8664 fold_convert_loc (loc, type, tem));
8667 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8668 type of X and Y (integer types only). */
8669 if (INTEGRAL_TYPE_P (type)
8670 && TREE_CODE (op0) == MULT_EXPR
8671 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8672 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8674 /* Be careful not to introduce new overflows. */
8675 tree mult_type;
8676 if (TYPE_OVERFLOW_WRAPS (type))
8677 mult_type = type;
8678 else
8679 mult_type = unsigned_type_for (type);
8681 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8683 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8684 fold_convert_loc (loc, mult_type,
8685 TREE_OPERAND (op0, 0)),
8686 fold_convert_loc (loc, mult_type,
8687 TREE_OPERAND (op0, 1)));
8688 return fold_convert_loc (loc, type, tem);
8692 tem = fold_convert_const (code, type, op0);
8693 return tem ? tem : NULL_TREE;
8695 case ADDR_SPACE_CONVERT_EXPR:
8696 if (integer_zerop (arg0))
8697 return fold_convert_const (code, type, arg0);
8698 return NULL_TREE;
8700 case FIXED_CONVERT_EXPR:
8701 tem = fold_convert_const (code, type, arg0);
8702 return tem ? tem : NULL_TREE;
8704 case VIEW_CONVERT_EXPR:
8705 if (TREE_TYPE (op0) == type)
8706 return op0;
8707 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8708 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8709 type, TREE_OPERAND (op0, 0));
8711 /* For integral conversions with the same precision or pointer
8712 conversions use a NOP_EXPR instead. */
8713 if ((INTEGRAL_TYPE_P (type)
8714 || POINTER_TYPE_P (type))
8715 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8716 || POINTER_TYPE_P (TREE_TYPE (op0)))
8717 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8718 return fold_convert_loc (loc, type, op0);
8720 /* Strip inner integral conversions that do not change the precision. */
8721 if (CONVERT_EXPR_P (op0)
8722 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8723 || POINTER_TYPE_P (TREE_TYPE (op0)))
8724 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8725 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8726 && (TYPE_PRECISION (TREE_TYPE (op0))
8727 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8728 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8729 type, TREE_OPERAND (op0, 0));
8731 return fold_view_convert_expr (type, op0);
8733 case NEGATE_EXPR:
8734 tem = fold_negate_expr (loc, arg0);
8735 if (tem)
8736 return fold_convert_loc (loc, type, tem);
8737 return NULL_TREE;
8739 case ABS_EXPR:
8740 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8741 return fold_abs_const (arg0, type);
8742 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8743 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8744 /* Convert fabs((double)float) into (double)fabsf(float). */
8745 else if (TREE_CODE (arg0) == NOP_EXPR
8746 && TREE_CODE (type) == REAL_TYPE)
8748 tree targ0 = strip_float_extensions (arg0);
8749 if (targ0 != arg0)
8750 return fold_convert_loc (loc, type,
8751 fold_build1_loc (loc, ABS_EXPR,
8752 TREE_TYPE (targ0),
8753 targ0));
8755 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8756 else if (TREE_CODE (arg0) == ABS_EXPR)
8757 return arg0;
8758 else if (tree_expr_nonnegative_p (arg0))
8759 return arg0;
8761 /* Strip sign ops from argument. */
8762 if (TREE_CODE (type) == REAL_TYPE)
8764 tem = fold_strip_sign_ops (arg0);
8765 if (tem)
8766 return fold_build1_loc (loc, ABS_EXPR, type,
8767 fold_convert_loc (loc, type, tem));
8769 return NULL_TREE;
8771 case CONJ_EXPR:
8772 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8773 return fold_convert_loc (loc, type, arg0);
8774 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8776 tree itype = TREE_TYPE (type);
8777 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8778 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8779 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8780 negate_expr (ipart));
8782 if (TREE_CODE (arg0) == COMPLEX_CST)
8784 tree itype = TREE_TYPE (type);
8785 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8786 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8787 return build_complex (type, rpart, negate_expr (ipart));
8789 if (TREE_CODE (arg0) == CONJ_EXPR)
8790 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8791 return NULL_TREE;
8793 case BIT_NOT_EXPR:
8794 if (TREE_CODE (arg0) == INTEGER_CST)
8795 return fold_not_const (arg0, type);
8796 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8797 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8798 /* Convert ~ (-A) to A - 1. */
8799 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8800 return fold_build2_loc (loc, MINUS_EXPR, type,
8801 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8802 build_int_cst (type, 1));
8803 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8804 else if (INTEGRAL_TYPE_P (type)
8805 && ((TREE_CODE (arg0) == MINUS_EXPR
8806 && integer_onep (TREE_OPERAND (arg0, 1)))
8807 || (TREE_CODE (arg0) == PLUS_EXPR
8808 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8809 return fold_build1_loc (loc, NEGATE_EXPR, type,
8810 fold_convert_loc (loc, type,
8811 TREE_OPERAND (arg0, 0)));
8812 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8813 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8814 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8815 fold_convert_loc (loc, type,
8816 TREE_OPERAND (arg0, 0)))))
8817 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8818 fold_convert_loc (loc, type,
8819 TREE_OPERAND (arg0, 1)));
8820 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8821 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8822 fold_convert_loc (loc, type,
8823 TREE_OPERAND (arg0, 1)))))
8824 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8825 fold_convert_loc (loc, type,
8826 TREE_OPERAND (arg0, 0)), tem);
8827 /* Perform BIT_NOT_EXPR on each element individually. */
8828 else if (TREE_CODE (arg0) == VECTOR_CST)
8830 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8831 int count = TYPE_VECTOR_SUBPARTS (type), i;
8833 for (i = 0; i < count; i++)
8835 if (elements)
8837 elem = TREE_VALUE (elements);
8838 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8839 if (elem == NULL_TREE)
8840 break;
8841 elements = TREE_CHAIN (elements);
8843 else
8844 elem = build_int_cst (TREE_TYPE (type), -1);
8845 list = tree_cons (NULL_TREE, elem, list);
8847 if (i == count)
8848 return build_vector (type, nreverse (list));
8851 return NULL_TREE;
8853 case TRUTH_NOT_EXPR:
8854 /* The argument to invert_truthvalue must have Boolean type. */
8855 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8856 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8858 /* Note that the operand of this must be an int
8859 and its values must be 0 or 1.
8860 ("true" is a fixed value perhaps depending on the language,
8861 but we don't handle values other than 1 correctly yet.) */
8862 tem = fold_truth_not_expr (loc, arg0);
8863 if (!tem)
8864 return NULL_TREE;
8865 return fold_convert_loc (loc, type, tem);
8867 case REALPART_EXPR:
8868 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8869 return fold_convert_loc (loc, type, arg0);
8870 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8871 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8872 TREE_OPERAND (arg0, 1));
8873 if (TREE_CODE (arg0) == COMPLEX_CST)
8874 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8875 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8877 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8878 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8879 fold_build1_loc (loc, REALPART_EXPR, itype,
8880 TREE_OPERAND (arg0, 0)),
8881 fold_build1_loc (loc, REALPART_EXPR, itype,
8882 TREE_OPERAND (arg0, 1)));
8883 return fold_convert_loc (loc, type, tem);
8885 if (TREE_CODE (arg0) == CONJ_EXPR)
8887 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8888 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8889 TREE_OPERAND (arg0, 0));
8890 return fold_convert_loc (loc, type, tem);
8892 if (TREE_CODE (arg0) == CALL_EXPR)
8894 tree fn = get_callee_fndecl (arg0);
8895 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8896 switch (DECL_FUNCTION_CODE (fn))
8898 CASE_FLT_FN (BUILT_IN_CEXPI):
8899 fn = mathfn_built_in (type, BUILT_IN_COS);
8900 if (fn)
8901 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8902 break;
8904 default:
8905 break;
8908 return NULL_TREE;
8910 case IMAGPART_EXPR:
8911 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8912 return fold_convert_loc (loc, type, integer_zero_node);
8913 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8914 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8915 TREE_OPERAND (arg0, 0));
8916 if (TREE_CODE (arg0) == COMPLEX_CST)
8917 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8918 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8920 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8921 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8922 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8923 TREE_OPERAND (arg0, 0)),
8924 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8925 TREE_OPERAND (arg0, 1)));
8926 return fold_convert_loc (loc, type, tem);
8928 if (TREE_CODE (arg0) == CONJ_EXPR)
8930 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8931 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8932 return fold_convert_loc (loc, type, negate_expr (tem));
8934 if (TREE_CODE (arg0) == CALL_EXPR)
8936 tree fn = get_callee_fndecl (arg0);
8937 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8938 switch (DECL_FUNCTION_CODE (fn))
8940 CASE_FLT_FN (BUILT_IN_CEXPI):
8941 fn = mathfn_built_in (type, BUILT_IN_SIN);
8942 if (fn)
8943 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8944 break;
8946 default:
8947 break;
8950 return NULL_TREE;
8952 default:
8953 return NULL_TREE;
8954 } /* switch (code) */
8958 /* If the operation was a conversion do _not_ mark a resulting constant
8959 with TREE_OVERFLOW if the original constant was not. These conversions
8960 have implementation defined behavior and retaining the TREE_OVERFLOW
8961 flag here would confuse later passes such as VRP. */
8962 tree
8963 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8964 tree type, tree op0)
8966 tree res = fold_unary_loc (loc, code, type, op0);
8967 if (res
8968 && TREE_CODE (res) == INTEGER_CST
8969 && TREE_CODE (op0) == INTEGER_CST
8970 && CONVERT_EXPR_CODE_P (code))
8971 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8973 return res;
8976 /* Fold a binary expression of code CODE and type TYPE with operands
8977 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8978 Return the folded expression if folding is successful. Otherwise,
8979 return NULL_TREE. */
8981 static tree
8982 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8984 enum tree_code compl_code;
8986 if (code == MIN_EXPR)
8987 compl_code = MAX_EXPR;
8988 else if (code == MAX_EXPR)
8989 compl_code = MIN_EXPR;
8990 else
8991 gcc_unreachable ();
8993 /* MIN (MAX (a, b), b) == b. */
8994 if (TREE_CODE (op0) == compl_code
8995 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8996 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8998 /* MIN (MAX (b, a), b) == b. */
8999 if (TREE_CODE (op0) == compl_code
9000 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
9001 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
9002 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
9004 /* MIN (a, MAX (a, b)) == a. */
9005 if (TREE_CODE (op1) == compl_code
9006 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
9007 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
9008 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
9010 /* MIN (a, MAX (b, a)) == a. */
9011 if (TREE_CODE (op1) == compl_code
9012 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
9013 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
9014 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
9016 return NULL_TREE;
9019 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9020 by changing CODE to reduce the magnitude of constants involved in
9021 ARG0 of the comparison.
9022 Returns a canonicalized comparison tree if a simplification was
9023 possible, otherwise returns NULL_TREE.
9024 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9025 valid if signed overflow is undefined. */
9027 static tree
9028 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9029 tree arg0, tree arg1,
9030 bool *strict_overflow_p)
9032 enum tree_code code0 = TREE_CODE (arg0);
9033 tree t, cst0 = NULL_TREE;
9034 int sgn0;
9035 bool swap = false;
9037 /* Match A +- CST code arg1 and CST code arg1. We can change the
9038 first form only if overflow is undefined. */
9039 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9040 /* In principle pointers also have undefined overflow behavior,
9041 but that causes problems elsewhere. */
9042 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9043 && (code0 == MINUS_EXPR
9044 || code0 == PLUS_EXPR)
9045 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9046 || code0 == INTEGER_CST))
9047 return NULL_TREE;
9049 /* Identify the constant in arg0 and its sign. */
9050 if (code0 == INTEGER_CST)
9051 cst0 = arg0;
9052 else
9053 cst0 = TREE_OPERAND (arg0, 1);
9054 sgn0 = tree_int_cst_sgn (cst0);
9056 /* Overflowed constants and zero will cause problems. */
9057 if (integer_zerop (cst0)
9058 || TREE_OVERFLOW (cst0))
9059 return NULL_TREE;
9061 /* See if we can reduce the magnitude of the constant in
9062 arg0 by changing the comparison code. */
9063 if (code0 == INTEGER_CST)
9065 /* CST <= arg1 -> CST-1 < arg1. */
9066 if (code == LE_EXPR && sgn0 == 1)
9067 code = LT_EXPR;
9068 /* -CST < arg1 -> -CST-1 <= arg1. */
9069 else if (code == LT_EXPR && sgn0 == -1)
9070 code = LE_EXPR;
9071 /* CST > arg1 -> CST-1 >= arg1. */
9072 else if (code == GT_EXPR && sgn0 == 1)
9073 code = GE_EXPR;
9074 /* -CST >= arg1 -> -CST-1 > arg1. */
9075 else if (code == GE_EXPR && sgn0 == -1)
9076 code = GT_EXPR;
9077 else
9078 return NULL_TREE;
9079 /* arg1 code' CST' might be more canonical. */
9080 swap = true;
9082 else
9084 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9085 if (code == LT_EXPR
9086 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9087 code = LE_EXPR;
9088 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9089 else if (code == GT_EXPR
9090 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9091 code = GE_EXPR;
9092 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9093 else if (code == LE_EXPR
9094 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9095 code = LT_EXPR;
9096 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9097 else if (code == GE_EXPR
9098 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9099 code = GT_EXPR;
9100 else
9101 return NULL_TREE;
9102 *strict_overflow_p = true;
9105 /* Now build the constant reduced in magnitude. But not if that
9106 would produce one outside of its types range. */
9107 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9108 && ((sgn0 == 1
9109 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9110 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9111 || (sgn0 == -1
9112 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9113 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9114 /* We cannot swap the comparison here as that would cause us to
9115 endlessly recurse. */
9116 return NULL_TREE;
9118 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9119 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
9120 if (code0 != INTEGER_CST)
9121 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9123 /* If swapping might yield to a more canonical form, do so. */
9124 if (swap)
9125 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
9126 else
9127 return fold_build2_loc (loc, code, type, t, arg1);
9130 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9131 overflow further. Try to decrease the magnitude of constants involved
9132 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9133 and put sole constants at the second argument position.
9134 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9136 static tree
9137 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9138 tree arg0, tree arg1)
9140 tree t;
9141 bool strict_overflow_p;
9142 const char * const warnmsg = G_("assuming signed overflow does not occur "
9143 "when reducing constant in comparison");
9145 /* Try canonicalization by simplifying arg0. */
9146 strict_overflow_p = false;
9147 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9148 &strict_overflow_p);
9149 if (t)
9151 if (strict_overflow_p)
9152 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9153 return t;
9156 /* Try canonicalization by simplifying arg1 using the swapped
9157 comparison. */
9158 code = swap_tree_comparison (code);
9159 strict_overflow_p = false;
9160 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9161 &strict_overflow_p);
9162 if (t && strict_overflow_p)
9163 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9164 return t;
9167 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9168 space. This is used to avoid issuing overflow warnings for
9169 expressions like &p->x which can not wrap. */
9171 static bool
9172 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
9174 unsigned HOST_WIDE_INT offset_low, total_low;
9175 HOST_WIDE_INT size, offset_high, total_high;
9177 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9178 return true;
9180 if (bitpos < 0)
9181 return true;
9183 if (offset == NULL_TREE)
9185 offset_low = 0;
9186 offset_high = 0;
9188 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
9189 return true;
9190 else
9192 offset_low = TREE_INT_CST_LOW (offset);
9193 offset_high = TREE_INT_CST_HIGH (offset);
9196 if (add_double_with_sign (offset_low, offset_high,
9197 bitpos / BITS_PER_UNIT, 0,
9198 &total_low, &total_high,
9199 true))
9200 return true;
9202 if (total_high != 0)
9203 return true;
9205 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
9206 if (size <= 0)
9207 return true;
9209 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9210 array. */
9211 if (TREE_CODE (base) == ADDR_EXPR)
9213 HOST_WIDE_INT base_size;
9215 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
9216 if (base_size > 0 && size < base_size)
9217 size = base_size;
9220 return total_low > (unsigned HOST_WIDE_INT) size;
9223 /* Subroutine of fold_binary. This routine performs all of the
9224 transformations that are common to the equality/inequality
9225 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9226 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9227 fold_binary should call fold_binary. Fold a comparison with
9228 tree code CODE and type TYPE with operands OP0 and OP1. Return
9229 the folded comparison or NULL_TREE. */
9231 static tree
9232 fold_comparison (location_t loc, enum tree_code code, tree type,
9233 tree op0, tree op1)
9235 tree arg0, arg1, tem;
9237 arg0 = op0;
9238 arg1 = op1;
9240 STRIP_SIGN_NOPS (arg0);
9241 STRIP_SIGN_NOPS (arg1);
9243 tem = fold_relational_const (code, type, arg0, arg1);
9244 if (tem != NULL_TREE)
9245 return tem;
9247 /* If one arg is a real or integer constant, put it last. */
9248 if (tree_swap_operands_p (arg0, arg1, true))
9249 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9251 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9252 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9253 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9254 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9255 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
9256 && (TREE_CODE (arg1) == INTEGER_CST
9257 && !TREE_OVERFLOW (arg1)))
9259 tree const1 = TREE_OPERAND (arg0, 1);
9260 tree const2 = arg1;
9261 tree variable = TREE_OPERAND (arg0, 0);
9262 tree lhs;
9263 int lhs_add;
9264 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9266 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9267 TREE_TYPE (arg1), const2, const1);
9269 /* If the constant operation overflowed this can be
9270 simplified as a comparison against INT_MAX/INT_MIN. */
9271 if (TREE_CODE (lhs) == INTEGER_CST
9272 && TREE_OVERFLOW (lhs))
9274 int const1_sgn = tree_int_cst_sgn (const1);
9275 enum tree_code code2 = code;
9277 /* Get the sign of the constant on the lhs if the
9278 operation were VARIABLE + CONST1. */
9279 if (TREE_CODE (arg0) == MINUS_EXPR)
9280 const1_sgn = -const1_sgn;
9282 /* The sign of the constant determines if we overflowed
9283 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9284 Canonicalize to the INT_MIN overflow by swapping the comparison
9285 if necessary. */
9286 if (const1_sgn == -1)
9287 code2 = swap_tree_comparison (code);
9289 /* We now can look at the canonicalized case
9290 VARIABLE + 1 CODE2 INT_MIN
9291 and decide on the result. */
9292 if (code2 == LT_EXPR
9293 || code2 == LE_EXPR
9294 || code2 == EQ_EXPR)
9295 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9296 else if (code2 == NE_EXPR
9297 || code2 == GE_EXPR
9298 || code2 == GT_EXPR)
9299 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9302 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9303 && (TREE_CODE (lhs) != INTEGER_CST
9304 || !TREE_OVERFLOW (lhs)))
9306 fold_overflow_warning (("assuming signed overflow does not occur "
9307 "when changing X +- C1 cmp C2 to "
9308 "X cmp C1 +- C2"),
9309 WARN_STRICT_OVERFLOW_COMPARISON);
9310 return fold_build2_loc (loc, code, type, variable, lhs);
9314 /* For comparisons of pointers we can decompose it to a compile time
9315 comparison of the base objects and the offsets into the object.
9316 This requires at least one operand being an ADDR_EXPR or a
9317 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9318 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9319 && (TREE_CODE (arg0) == ADDR_EXPR
9320 || TREE_CODE (arg1) == ADDR_EXPR
9321 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9322 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9324 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9325 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9326 enum machine_mode mode;
9327 int volatilep, unsignedp;
9328 bool indirect_base0 = false, indirect_base1 = false;
9330 /* Get base and offset for the access. Strip ADDR_EXPR for
9331 get_inner_reference, but put it back by stripping INDIRECT_REF
9332 off the base object if possible. indirect_baseN will be true
9333 if baseN is not an address but refers to the object itself. */
9334 base0 = arg0;
9335 if (TREE_CODE (arg0) == ADDR_EXPR)
9337 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9338 &bitsize, &bitpos0, &offset0, &mode,
9339 &unsignedp, &volatilep, false);
9340 if (TREE_CODE (base0) == INDIRECT_REF)
9341 base0 = TREE_OPERAND (base0, 0);
9342 else
9343 indirect_base0 = true;
9345 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9347 base0 = TREE_OPERAND (arg0, 0);
9348 offset0 = TREE_OPERAND (arg0, 1);
9351 base1 = arg1;
9352 if (TREE_CODE (arg1) == ADDR_EXPR)
9354 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9355 &bitsize, &bitpos1, &offset1, &mode,
9356 &unsignedp, &volatilep, false);
9357 if (TREE_CODE (base1) == INDIRECT_REF)
9358 base1 = TREE_OPERAND (base1, 0);
9359 else
9360 indirect_base1 = true;
9362 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9364 base1 = TREE_OPERAND (arg1, 0);
9365 offset1 = TREE_OPERAND (arg1, 1);
9368 /* If we have equivalent bases we might be able to simplify. */
9369 if (indirect_base0 == indirect_base1
9370 && operand_equal_p (base0, base1, 0))
9372 /* We can fold this expression to a constant if the non-constant
9373 offset parts are equal. */
9374 if ((offset0 == offset1
9375 || (offset0 && offset1
9376 && operand_equal_p (offset0, offset1, 0)))
9377 && (code == EQ_EXPR
9378 || code == NE_EXPR
9379 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9382 if (code != EQ_EXPR
9383 && code != NE_EXPR
9384 && bitpos0 != bitpos1
9385 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9386 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9387 fold_overflow_warning (("assuming pointer wraparound does not "
9388 "occur when comparing P +- C1 with "
9389 "P +- C2"),
9390 WARN_STRICT_OVERFLOW_CONDITIONAL);
9392 switch (code)
9394 case EQ_EXPR:
9395 return constant_boolean_node (bitpos0 == bitpos1, type);
9396 case NE_EXPR:
9397 return constant_boolean_node (bitpos0 != bitpos1, type);
9398 case LT_EXPR:
9399 return constant_boolean_node (bitpos0 < bitpos1, type);
9400 case LE_EXPR:
9401 return constant_boolean_node (bitpos0 <= bitpos1, type);
9402 case GE_EXPR:
9403 return constant_boolean_node (bitpos0 >= bitpos1, type);
9404 case GT_EXPR:
9405 return constant_boolean_node (bitpos0 > bitpos1, type);
9406 default:;
9409 /* We can simplify the comparison to a comparison of the variable
9410 offset parts if the constant offset parts are equal.
9411 Be careful to use signed size type here because otherwise we
9412 mess with array offsets in the wrong way. This is possible
9413 because pointer arithmetic is restricted to retain within an
9414 object and overflow on pointer differences is undefined as of
9415 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9416 else if (bitpos0 == bitpos1
9417 && ((code == EQ_EXPR || code == NE_EXPR)
9418 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9420 tree signed_size_type_node;
9421 signed_size_type_node = signed_type_for (size_type_node);
9423 /* By converting to signed size type we cover middle-end pointer
9424 arithmetic which operates on unsigned pointer types of size
9425 type size and ARRAY_REF offsets which are properly sign or
9426 zero extended from their type in case it is narrower than
9427 size type. */
9428 if (offset0 == NULL_TREE)
9429 offset0 = build_int_cst (signed_size_type_node, 0);
9430 else
9431 offset0 = fold_convert_loc (loc, signed_size_type_node,
9432 offset0);
9433 if (offset1 == NULL_TREE)
9434 offset1 = build_int_cst (signed_size_type_node, 0);
9435 else
9436 offset1 = fold_convert_loc (loc, signed_size_type_node,
9437 offset1);
9439 if (code != EQ_EXPR
9440 && code != NE_EXPR
9441 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9442 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9443 fold_overflow_warning (("assuming pointer wraparound does not "
9444 "occur when comparing P +- C1 with "
9445 "P +- C2"),
9446 WARN_STRICT_OVERFLOW_COMPARISON);
9448 return fold_build2_loc (loc, code, type, offset0, offset1);
9451 /* For non-equal bases we can simplify if they are addresses
9452 of local binding decls or constants. */
9453 else if (indirect_base0 && indirect_base1
9454 /* We know that !operand_equal_p (base0, base1, 0)
9455 because the if condition was false. But make
9456 sure two decls are not the same. */
9457 && base0 != base1
9458 && TREE_CODE (arg0) == ADDR_EXPR
9459 && TREE_CODE (arg1) == ADDR_EXPR
9460 && (((TREE_CODE (base0) == VAR_DECL
9461 || TREE_CODE (base0) == PARM_DECL)
9462 && (targetm.binds_local_p (base0)
9463 || CONSTANT_CLASS_P (base1)))
9464 || CONSTANT_CLASS_P (base0))
9465 && (((TREE_CODE (base1) == VAR_DECL
9466 || TREE_CODE (base1) == PARM_DECL)
9467 && (targetm.binds_local_p (base1)
9468 || CONSTANT_CLASS_P (base0)))
9469 || CONSTANT_CLASS_P (base1)))
9471 if (code == EQ_EXPR)
9472 return omit_two_operands_loc (loc, type, boolean_false_node,
9473 arg0, arg1);
9474 else if (code == NE_EXPR)
9475 return omit_two_operands_loc (loc, type, boolean_true_node,
9476 arg0, arg1);
9478 /* For equal offsets we can simplify to a comparison of the
9479 base addresses. */
9480 else if (bitpos0 == bitpos1
9481 && (indirect_base0
9482 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9483 && (indirect_base1
9484 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9485 && ((offset0 == offset1)
9486 || (offset0 && offset1
9487 && operand_equal_p (offset0, offset1, 0))))
9489 if (indirect_base0)
9490 base0 = build_fold_addr_expr_loc (loc, base0);
9491 if (indirect_base1)
9492 base1 = build_fold_addr_expr_loc (loc, base1);
9493 return fold_build2_loc (loc, code, type, base0, base1);
9497 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9498 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9499 the resulting offset is smaller in absolute value than the
9500 original one. */
9501 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9502 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9503 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9504 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9505 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9506 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9507 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9509 tree const1 = TREE_OPERAND (arg0, 1);
9510 tree const2 = TREE_OPERAND (arg1, 1);
9511 tree variable1 = TREE_OPERAND (arg0, 0);
9512 tree variable2 = TREE_OPERAND (arg1, 0);
9513 tree cst;
9514 const char * const warnmsg = G_("assuming signed overflow does not "
9515 "occur when combining constants around "
9516 "a comparison");
9518 /* Put the constant on the side where it doesn't overflow and is
9519 of lower absolute value than before. */
9520 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9521 ? MINUS_EXPR : PLUS_EXPR,
9522 const2, const1, 0);
9523 if (!TREE_OVERFLOW (cst)
9524 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9526 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9527 return fold_build2_loc (loc, code, type,
9528 variable1,
9529 fold_build2_loc (loc,
9530 TREE_CODE (arg1), TREE_TYPE (arg1),
9531 variable2, cst));
9534 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9535 ? MINUS_EXPR : PLUS_EXPR,
9536 const1, const2, 0);
9537 if (!TREE_OVERFLOW (cst)
9538 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9540 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9541 return fold_build2_loc (loc, code, type,
9542 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9543 variable1, cst),
9544 variable2);
9548 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9549 signed arithmetic case. That form is created by the compiler
9550 often enough for folding it to be of value. One example is in
9551 computing loop trip counts after Operator Strength Reduction. */
9552 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9553 && TREE_CODE (arg0) == MULT_EXPR
9554 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9555 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9556 && integer_zerop (arg1))
9558 tree const1 = TREE_OPERAND (arg0, 1);
9559 tree const2 = arg1; /* zero */
9560 tree variable1 = TREE_OPERAND (arg0, 0);
9561 enum tree_code cmp_code = code;
9563 gcc_assert (!integer_zerop (const1));
9565 fold_overflow_warning (("assuming signed overflow does not occur when "
9566 "eliminating multiplication in comparison "
9567 "with zero"),
9568 WARN_STRICT_OVERFLOW_COMPARISON);
9570 /* If const1 is negative we swap the sense of the comparison. */
9571 if (tree_int_cst_sgn (const1) < 0)
9572 cmp_code = swap_tree_comparison (cmp_code);
9574 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9577 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
9578 if (tem)
9579 return tem;
9581 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9583 tree targ0 = strip_float_extensions (arg0);
9584 tree targ1 = strip_float_extensions (arg1);
9585 tree newtype = TREE_TYPE (targ0);
9587 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9588 newtype = TREE_TYPE (targ1);
9590 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9591 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9592 return fold_build2_loc (loc, code, type,
9593 fold_convert_loc (loc, newtype, targ0),
9594 fold_convert_loc (loc, newtype, targ1));
9596 /* (-a) CMP (-b) -> b CMP a */
9597 if (TREE_CODE (arg0) == NEGATE_EXPR
9598 && TREE_CODE (arg1) == NEGATE_EXPR)
9599 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9600 TREE_OPERAND (arg0, 0));
9602 if (TREE_CODE (arg1) == REAL_CST)
9604 REAL_VALUE_TYPE cst;
9605 cst = TREE_REAL_CST (arg1);
9607 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9608 if (TREE_CODE (arg0) == NEGATE_EXPR)
9609 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9610 TREE_OPERAND (arg0, 0),
9611 build_real (TREE_TYPE (arg1),
9612 REAL_VALUE_NEGATE (cst)));
9614 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9615 /* a CMP (-0) -> a CMP 0 */
9616 if (REAL_VALUE_MINUS_ZERO (cst))
9617 return fold_build2_loc (loc, code, type, arg0,
9618 build_real (TREE_TYPE (arg1), dconst0));
9620 /* x != NaN is always true, other ops are always false. */
9621 if (REAL_VALUE_ISNAN (cst)
9622 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9624 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9625 return omit_one_operand_loc (loc, type, tem, arg0);
9628 /* Fold comparisons against infinity. */
9629 if (REAL_VALUE_ISINF (cst)
9630 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9632 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9633 if (tem != NULL_TREE)
9634 return tem;
9638 /* If this is a comparison of a real constant with a PLUS_EXPR
9639 or a MINUS_EXPR of a real constant, we can convert it into a
9640 comparison with a revised real constant as long as no overflow
9641 occurs when unsafe_math_optimizations are enabled. */
9642 if (flag_unsafe_math_optimizations
9643 && TREE_CODE (arg1) == REAL_CST
9644 && (TREE_CODE (arg0) == PLUS_EXPR
9645 || TREE_CODE (arg0) == MINUS_EXPR)
9646 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9647 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9648 ? MINUS_EXPR : PLUS_EXPR,
9649 arg1, TREE_OPERAND (arg0, 1), 0))
9650 && !TREE_OVERFLOW (tem))
9651 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9653 /* Likewise, we can simplify a comparison of a real constant with
9654 a MINUS_EXPR whose first operand is also a real constant, i.e.
9655 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9656 floating-point types only if -fassociative-math is set. */
9657 if (flag_associative_math
9658 && TREE_CODE (arg1) == REAL_CST
9659 && TREE_CODE (arg0) == MINUS_EXPR
9660 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9661 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9662 arg1, 0))
9663 && !TREE_OVERFLOW (tem))
9664 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9665 TREE_OPERAND (arg0, 1), tem);
9667 /* Fold comparisons against built-in math functions. */
9668 if (TREE_CODE (arg1) == REAL_CST
9669 && flag_unsafe_math_optimizations
9670 && ! flag_errno_math)
9672 enum built_in_function fcode = builtin_mathfn_code (arg0);
9674 if (fcode != END_BUILTINS)
9676 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9677 if (tem != NULL_TREE)
9678 return tem;
9683 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9684 && CONVERT_EXPR_P (arg0))
9686 /* If we are widening one operand of an integer comparison,
9687 see if the other operand is similarly being widened. Perhaps we
9688 can do the comparison in the narrower type. */
9689 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9690 if (tem)
9691 return tem;
9693 /* Or if we are changing signedness. */
9694 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9695 if (tem)
9696 return tem;
9699 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9700 constant, we can simplify it. */
9701 if (TREE_CODE (arg1) == INTEGER_CST
9702 && (TREE_CODE (arg0) == MIN_EXPR
9703 || TREE_CODE (arg0) == MAX_EXPR)
9704 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9706 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9707 if (tem)
9708 return tem;
9711 /* Simplify comparison of something with itself. (For IEEE
9712 floating-point, we can only do some of these simplifications.) */
9713 if (operand_equal_p (arg0, arg1, 0))
9715 switch (code)
9717 case EQ_EXPR:
9718 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9719 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9720 return constant_boolean_node (1, type);
9721 break;
9723 case GE_EXPR:
9724 case LE_EXPR:
9725 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9726 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9727 return constant_boolean_node (1, type);
9728 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9730 case NE_EXPR:
9731 /* For NE, we can only do this simplification if integer
9732 or we don't honor IEEE floating point NaNs. */
9733 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9734 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9735 break;
9736 /* ... fall through ... */
9737 case GT_EXPR:
9738 case LT_EXPR:
9739 return constant_boolean_node (0, type);
9740 default:
9741 gcc_unreachable ();
9745 /* If we are comparing an expression that just has comparisons
9746 of two integer values, arithmetic expressions of those comparisons,
9747 and constants, we can simplify it. There are only three cases
9748 to check: the two values can either be equal, the first can be
9749 greater, or the second can be greater. Fold the expression for
9750 those three values. Since each value must be 0 or 1, we have
9751 eight possibilities, each of which corresponds to the constant 0
9752 or 1 or one of the six possible comparisons.
9754 This handles common cases like (a > b) == 0 but also handles
9755 expressions like ((x > y) - (y > x)) > 0, which supposedly
9756 occur in macroized code. */
9758 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9760 tree cval1 = 0, cval2 = 0;
9761 int save_p = 0;
9763 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9764 /* Don't handle degenerate cases here; they should already
9765 have been handled anyway. */
9766 && cval1 != 0 && cval2 != 0
9767 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9768 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9769 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9770 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9771 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9772 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9773 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9775 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9776 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9778 /* We can't just pass T to eval_subst in case cval1 or cval2
9779 was the same as ARG1. */
9781 tree high_result
9782 = fold_build2_loc (loc, code, type,
9783 eval_subst (loc, arg0, cval1, maxval,
9784 cval2, minval),
9785 arg1);
9786 tree equal_result
9787 = fold_build2_loc (loc, code, type,
9788 eval_subst (loc, arg0, cval1, maxval,
9789 cval2, maxval),
9790 arg1);
9791 tree low_result
9792 = fold_build2_loc (loc, code, type,
9793 eval_subst (loc, arg0, cval1, minval,
9794 cval2, maxval),
9795 arg1);
9797 /* All three of these results should be 0 or 1. Confirm they are.
9798 Then use those values to select the proper code to use. */
9800 if (TREE_CODE (high_result) == INTEGER_CST
9801 && TREE_CODE (equal_result) == INTEGER_CST
9802 && TREE_CODE (low_result) == INTEGER_CST)
9804 /* Make a 3-bit mask with the high-order bit being the
9805 value for `>', the next for '=', and the low for '<'. */
9806 switch ((integer_onep (high_result) * 4)
9807 + (integer_onep (equal_result) * 2)
9808 + integer_onep (low_result))
9810 case 0:
9811 /* Always false. */
9812 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9813 case 1:
9814 code = LT_EXPR;
9815 break;
9816 case 2:
9817 code = EQ_EXPR;
9818 break;
9819 case 3:
9820 code = LE_EXPR;
9821 break;
9822 case 4:
9823 code = GT_EXPR;
9824 break;
9825 case 5:
9826 code = NE_EXPR;
9827 break;
9828 case 6:
9829 code = GE_EXPR;
9830 break;
9831 case 7:
9832 /* Always true. */
9833 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9836 if (save_p)
9838 tem = save_expr (build2 (code, type, cval1, cval2));
9839 SET_EXPR_LOCATION (tem, loc);
9840 return tem;
9842 return fold_build2_loc (loc, code, type, cval1, cval2);
9847 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9848 into a single range test. */
9849 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9850 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9851 && TREE_CODE (arg1) == INTEGER_CST
9852 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9853 && !integer_zerop (TREE_OPERAND (arg0, 1))
9854 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9855 && !TREE_OVERFLOW (arg1))
9857 tem = fold_div_compare (loc, code, type, arg0, arg1);
9858 if (tem != NULL_TREE)
9859 return tem;
9862 /* Fold ~X op ~Y as Y op X. */
9863 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9864 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9866 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9867 return fold_build2_loc (loc, code, type,
9868 fold_convert_loc (loc, cmp_type,
9869 TREE_OPERAND (arg1, 0)),
9870 TREE_OPERAND (arg0, 0));
9873 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9874 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9875 && TREE_CODE (arg1) == INTEGER_CST)
9877 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9878 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9879 TREE_OPERAND (arg0, 0),
9880 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9881 fold_convert_loc (loc, cmp_type, arg1)));
9884 return NULL_TREE;
9888 /* Subroutine of fold_binary. Optimize complex multiplications of the
9889 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9890 argument EXPR represents the expression "z" of type TYPE. */
9892 static tree
9893 fold_mult_zconjz (location_t loc, tree type, tree expr)
9895 tree itype = TREE_TYPE (type);
9896 tree rpart, ipart, tem;
9898 if (TREE_CODE (expr) == COMPLEX_EXPR)
9900 rpart = TREE_OPERAND (expr, 0);
9901 ipart = TREE_OPERAND (expr, 1);
9903 else if (TREE_CODE (expr) == COMPLEX_CST)
9905 rpart = TREE_REALPART (expr);
9906 ipart = TREE_IMAGPART (expr);
9908 else
9910 expr = save_expr (expr);
9911 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9912 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9915 rpart = save_expr (rpart);
9916 ipart = save_expr (ipart);
9917 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9918 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9919 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9920 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9921 fold_convert_loc (loc, itype, integer_zero_node));
9925 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9926 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9927 guarantees that P and N have the same least significant log2(M) bits.
9928 N is not otherwise constrained. In particular, N is not normalized to
9929 0 <= N < M as is common. In general, the precise value of P is unknown.
9930 M is chosen as large as possible such that constant N can be determined.
9932 Returns M and sets *RESIDUE to N.
9934 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9935 account. This is not always possible due to PR 35705.
9938 static unsigned HOST_WIDE_INT
9939 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9940 bool allow_func_align)
9942 enum tree_code code;
9944 *residue = 0;
9946 code = TREE_CODE (expr);
9947 if (code == ADDR_EXPR)
9949 expr = TREE_OPERAND (expr, 0);
9950 if (handled_component_p (expr))
9952 HOST_WIDE_INT bitsize, bitpos;
9953 tree offset;
9954 enum machine_mode mode;
9955 int unsignedp, volatilep;
9957 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9958 &mode, &unsignedp, &volatilep, false);
9959 *residue = bitpos / BITS_PER_UNIT;
9960 if (offset)
9962 if (TREE_CODE (offset) == INTEGER_CST)
9963 *residue += TREE_INT_CST_LOW (offset);
9964 else
9965 /* We don't handle more complicated offset expressions. */
9966 return 1;
9970 if (DECL_P (expr)
9971 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9972 return DECL_ALIGN_UNIT (expr);
9974 else if (code == POINTER_PLUS_EXPR)
9976 tree op0, op1;
9977 unsigned HOST_WIDE_INT modulus;
9978 enum tree_code inner_code;
9980 op0 = TREE_OPERAND (expr, 0);
9981 STRIP_NOPS (op0);
9982 modulus = get_pointer_modulus_and_residue (op0, residue,
9983 allow_func_align);
9985 op1 = TREE_OPERAND (expr, 1);
9986 STRIP_NOPS (op1);
9987 inner_code = TREE_CODE (op1);
9988 if (inner_code == INTEGER_CST)
9990 *residue += TREE_INT_CST_LOW (op1);
9991 return modulus;
9993 else if (inner_code == MULT_EXPR)
9995 op1 = TREE_OPERAND (op1, 1);
9996 if (TREE_CODE (op1) == INTEGER_CST)
9998 unsigned HOST_WIDE_INT align;
10000 /* Compute the greatest power-of-2 divisor of op1. */
10001 align = TREE_INT_CST_LOW (op1);
10002 align &= -align;
10004 /* If align is non-zero and less than *modulus, replace
10005 *modulus with align., If align is 0, then either op1 is 0
10006 or the greatest power-of-2 divisor of op1 doesn't fit in an
10007 unsigned HOST_WIDE_INT. In either case, no additional
10008 constraint is imposed. */
10009 if (align)
10010 modulus = MIN (modulus, align);
10012 return modulus;
10017 /* If we get here, we were unable to determine anything useful about the
10018 expression. */
10019 return 1;
10023 /* Fold a binary expression of code CODE and type TYPE with operands
10024 OP0 and OP1. LOC is the location of the resulting expression.
10025 Return the folded expression if folding is successful. Otherwise,
10026 return NULL_TREE. */
10028 tree
10029 fold_binary_loc (location_t loc,
10030 enum tree_code code, tree type, tree op0, tree op1)
10032 enum tree_code_class kind = TREE_CODE_CLASS (code);
10033 tree arg0, arg1, tem;
10034 tree t1 = NULL_TREE;
10035 bool strict_overflow_p;
10037 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10038 && TREE_CODE_LENGTH (code) == 2
10039 && op0 != NULL_TREE
10040 && op1 != NULL_TREE);
10042 arg0 = op0;
10043 arg1 = op1;
10045 /* Strip any conversions that don't change the mode. This is
10046 safe for every expression, except for a comparison expression
10047 because its signedness is derived from its operands. So, in
10048 the latter case, only strip conversions that don't change the
10049 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10050 preserved.
10052 Note that this is done as an internal manipulation within the
10053 constant folder, in order to find the simplest representation
10054 of the arguments so that their form can be studied. In any
10055 cases, the appropriate type conversions should be put back in
10056 the tree that will get out of the constant folder. */
10058 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10060 STRIP_SIGN_NOPS (arg0);
10061 STRIP_SIGN_NOPS (arg1);
10063 else
10065 STRIP_NOPS (arg0);
10066 STRIP_NOPS (arg1);
10069 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10070 constant but we can't do arithmetic on them. */
10071 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10072 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10073 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10074 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10075 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10076 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
10078 if (kind == tcc_binary)
10080 /* Make sure type and arg0 have the same saturating flag. */
10081 gcc_assert (TYPE_SATURATING (type)
10082 == TYPE_SATURATING (TREE_TYPE (arg0)));
10083 tem = const_binop (code, arg0, arg1, 0);
10085 else if (kind == tcc_comparison)
10086 tem = fold_relational_const (code, type, arg0, arg1);
10087 else
10088 tem = NULL_TREE;
10090 if (tem != NULL_TREE)
10092 if (TREE_TYPE (tem) != type)
10093 tem = fold_convert_loc (loc, type, tem);
10094 return tem;
10098 /* If this is a commutative operation, and ARG0 is a constant, move it
10099 to ARG1 to reduce the number of tests below. */
10100 if (commutative_tree_code (code)
10101 && tree_swap_operands_p (arg0, arg1, true))
10102 return fold_build2_loc (loc, code, type, op1, op0);
10104 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10106 First check for cases where an arithmetic operation is applied to a
10107 compound, conditional, or comparison operation. Push the arithmetic
10108 operation inside the compound or conditional to see if any folding
10109 can then be done. Convert comparison to conditional for this purpose.
10110 The also optimizes non-constant cases that used to be done in
10111 expand_expr.
10113 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10114 one of the operands is a comparison and the other is a comparison, a
10115 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10116 code below would make the expression more complex. Change it to a
10117 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10118 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10120 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10121 || code == EQ_EXPR || code == NE_EXPR)
10122 && ((truth_value_p (TREE_CODE (arg0))
10123 && (truth_value_p (TREE_CODE (arg1))
10124 || (TREE_CODE (arg1) == BIT_AND_EXPR
10125 && integer_onep (TREE_OPERAND (arg1, 1)))))
10126 || (truth_value_p (TREE_CODE (arg1))
10127 && (truth_value_p (TREE_CODE (arg0))
10128 || (TREE_CODE (arg0) == BIT_AND_EXPR
10129 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10131 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10132 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10133 : TRUTH_XOR_EXPR,
10134 boolean_type_node,
10135 fold_convert_loc (loc, boolean_type_node, arg0),
10136 fold_convert_loc (loc, boolean_type_node, arg1));
10138 if (code == EQ_EXPR)
10139 tem = invert_truthvalue_loc (loc, tem);
10141 return fold_convert_loc (loc, type, tem);
10144 if (TREE_CODE_CLASS (code) == tcc_binary
10145 || TREE_CODE_CLASS (code) == tcc_comparison)
10147 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10149 tem = fold_build2_loc (loc, code, type,
10150 fold_convert_loc (loc, TREE_TYPE (op0),
10151 TREE_OPERAND (arg0, 1)), op1);
10152 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
10153 goto fold_binary_exit;
10155 if (TREE_CODE (arg1) == COMPOUND_EXPR
10156 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10158 tem = fold_build2_loc (loc, code, type, op0,
10159 fold_convert_loc (loc, TREE_TYPE (op1),
10160 TREE_OPERAND (arg1, 1)));
10161 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
10162 goto fold_binary_exit;
10165 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
10167 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10168 arg0, arg1,
10169 /*cond_first_p=*/1);
10170 if (tem != NULL_TREE)
10171 return tem;
10174 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
10176 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10177 arg1, arg0,
10178 /*cond_first_p=*/0);
10179 if (tem != NULL_TREE)
10180 return tem;
10184 switch (code)
10186 case POINTER_PLUS_EXPR:
10187 /* 0 +p index -> (type)index */
10188 if (integer_zerop (arg0))
10189 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10191 /* PTR +p 0 -> PTR */
10192 if (integer_zerop (arg1))
10193 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10195 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10196 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10197 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10198 return fold_convert_loc (loc, type,
10199 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10200 fold_convert_loc (loc, sizetype,
10201 arg1),
10202 fold_convert_loc (loc, sizetype,
10203 arg0)));
10205 /* index +p PTR -> PTR +p index */
10206 if (POINTER_TYPE_P (TREE_TYPE (arg1))
10207 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10208 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
10209 fold_convert_loc (loc, type, arg1),
10210 fold_convert_loc (loc, sizetype, arg0));
10212 /* (PTR +p B) +p A -> PTR +p (B + A) */
10213 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10215 tree inner;
10216 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10217 tree arg00 = TREE_OPERAND (arg0, 0);
10218 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10219 arg01, fold_convert_loc (loc, sizetype, arg1));
10220 return fold_convert_loc (loc, type,
10221 fold_build2_loc (loc, POINTER_PLUS_EXPR,
10222 TREE_TYPE (arg00),
10223 arg00, inner));
10226 /* PTR_CST +p CST -> CST1 */
10227 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10228 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10229 fold_convert_loc (loc, type, arg1));
10231 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10232 of the array. Loop optimizer sometimes produce this type of
10233 expressions. */
10234 if (TREE_CODE (arg0) == ADDR_EXPR)
10236 tem = try_move_mult_to_index (loc, arg0,
10237 fold_convert_loc (loc, sizetype, arg1));
10238 if (tem)
10239 return fold_convert_loc (loc, type, tem);
10242 return NULL_TREE;
10244 case PLUS_EXPR:
10245 /* A + (-B) -> A - B */
10246 if (TREE_CODE (arg1) == NEGATE_EXPR)
10247 return fold_build2_loc (loc, MINUS_EXPR, type,
10248 fold_convert_loc (loc, type, arg0),
10249 fold_convert_loc (loc, type,
10250 TREE_OPERAND (arg1, 0)));
10251 /* (-A) + B -> B - A */
10252 if (TREE_CODE (arg0) == NEGATE_EXPR
10253 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10254 return fold_build2_loc (loc, MINUS_EXPR, type,
10255 fold_convert_loc (loc, type, arg1),
10256 fold_convert_loc (loc, type,
10257 TREE_OPERAND (arg0, 0)));
10259 if (INTEGRAL_TYPE_P (type))
10261 /* Convert ~A + 1 to -A. */
10262 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10263 && integer_onep (arg1))
10264 return fold_build1_loc (loc, NEGATE_EXPR, type,
10265 fold_convert_loc (loc, type,
10266 TREE_OPERAND (arg0, 0)));
10268 /* ~X + X is -1. */
10269 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10270 && !TYPE_OVERFLOW_TRAPS (type))
10272 tree tem = TREE_OPERAND (arg0, 0);
10274 STRIP_NOPS (tem);
10275 if (operand_equal_p (tem, arg1, 0))
10277 t1 = build_int_cst_type (type, -1);
10278 return omit_one_operand_loc (loc, type, t1, arg1);
10282 /* X + ~X is -1. */
10283 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10284 && !TYPE_OVERFLOW_TRAPS (type))
10286 tree tem = TREE_OPERAND (arg1, 0);
10288 STRIP_NOPS (tem);
10289 if (operand_equal_p (arg0, tem, 0))
10291 t1 = build_int_cst_type (type, -1);
10292 return omit_one_operand_loc (loc, type, t1, arg0);
10296 /* X + (X / CST) * -CST is X % CST. */
10297 if (TREE_CODE (arg1) == MULT_EXPR
10298 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10299 && operand_equal_p (arg0,
10300 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10302 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10303 tree cst1 = TREE_OPERAND (arg1, 1);
10304 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10305 cst1, cst0);
10306 if (sum && integer_zerop (sum))
10307 return fold_convert_loc (loc, type,
10308 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10309 TREE_TYPE (arg0), arg0,
10310 cst0));
10314 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
10315 same or one. Make sure type is not saturating.
10316 fold_plusminus_mult_expr will re-associate. */
10317 if ((TREE_CODE (arg0) == MULT_EXPR
10318 || TREE_CODE (arg1) == MULT_EXPR)
10319 && !TYPE_SATURATING (type)
10320 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10322 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10323 if (tem)
10324 return tem;
10327 if (! FLOAT_TYPE_P (type))
10329 if (integer_zerop (arg1))
10330 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10332 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10333 with a constant, and the two constants have no bits in common,
10334 we should treat this as a BIT_IOR_EXPR since this may produce more
10335 simplifications. */
10336 if (TREE_CODE (arg0) == BIT_AND_EXPR
10337 && TREE_CODE (arg1) == BIT_AND_EXPR
10338 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10339 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10340 && integer_zerop (const_binop (BIT_AND_EXPR,
10341 TREE_OPERAND (arg0, 1),
10342 TREE_OPERAND (arg1, 1), 0)))
10344 code = BIT_IOR_EXPR;
10345 goto bit_ior;
10348 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10349 (plus (plus (mult) (mult)) (foo)) so that we can
10350 take advantage of the factoring cases below. */
10351 if (((TREE_CODE (arg0) == PLUS_EXPR
10352 || TREE_CODE (arg0) == MINUS_EXPR)
10353 && TREE_CODE (arg1) == MULT_EXPR)
10354 || ((TREE_CODE (arg1) == PLUS_EXPR
10355 || TREE_CODE (arg1) == MINUS_EXPR)
10356 && TREE_CODE (arg0) == MULT_EXPR))
10358 tree parg0, parg1, parg, marg;
10359 enum tree_code pcode;
10361 if (TREE_CODE (arg1) == MULT_EXPR)
10362 parg = arg0, marg = arg1;
10363 else
10364 parg = arg1, marg = arg0;
10365 pcode = TREE_CODE (parg);
10366 parg0 = TREE_OPERAND (parg, 0);
10367 parg1 = TREE_OPERAND (parg, 1);
10368 STRIP_NOPS (parg0);
10369 STRIP_NOPS (parg1);
10371 if (TREE_CODE (parg0) == MULT_EXPR
10372 && TREE_CODE (parg1) != MULT_EXPR)
10373 return fold_build2_loc (loc, pcode, type,
10374 fold_build2_loc (loc, PLUS_EXPR, type,
10375 fold_convert_loc (loc, type,
10376 parg0),
10377 fold_convert_loc (loc, type,
10378 marg)),
10379 fold_convert_loc (loc, type, parg1));
10380 if (TREE_CODE (parg0) != MULT_EXPR
10381 && TREE_CODE (parg1) == MULT_EXPR)
10382 return
10383 fold_build2_loc (loc, PLUS_EXPR, type,
10384 fold_convert_loc (loc, type, parg0),
10385 fold_build2_loc (loc, pcode, type,
10386 fold_convert_loc (loc, type, marg),
10387 fold_convert_loc (loc, type,
10388 parg1)));
10391 else
10393 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10394 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10395 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10397 /* Likewise if the operands are reversed. */
10398 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10399 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10401 /* Convert X + -C into X - C. */
10402 if (TREE_CODE (arg1) == REAL_CST
10403 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10405 tem = fold_negate_const (arg1, type);
10406 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10407 return fold_build2_loc (loc, MINUS_EXPR, type,
10408 fold_convert_loc (loc, type, arg0),
10409 fold_convert_loc (loc, type, tem));
10412 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10413 to __complex__ ( x, y ). This is not the same for SNaNs or
10414 if signed zeros are involved. */
10415 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10416 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10417 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10419 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10420 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10421 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10422 bool arg0rz = false, arg0iz = false;
10423 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10424 || (arg0i && (arg0iz = real_zerop (arg0i))))
10426 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10427 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10428 if (arg0rz && arg1i && real_zerop (arg1i))
10430 tree rp = arg1r ? arg1r
10431 : build1 (REALPART_EXPR, rtype, arg1);
10432 tree ip = arg0i ? arg0i
10433 : build1 (IMAGPART_EXPR, rtype, arg0);
10434 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10436 else if (arg0iz && arg1r && real_zerop (arg1r))
10438 tree rp = arg0r ? arg0r
10439 : build1 (REALPART_EXPR, rtype, arg0);
10440 tree ip = arg1i ? arg1i
10441 : build1 (IMAGPART_EXPR, rtype, arg1);
10442 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10447 if (flag_unsafe_math_optimizations
10448 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10449 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10450 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10451 return tem;
10453 /* Convert x+x into x*2.0. */
10454 if (operand_equal_p (arg0, arg1, 0)
10455 && SCALAR_FLOAT_TYPE_P (type))
10456 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10457 build_real (type, dconst2));
10459 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10460 We associate floats only if the user has specified
10461 -fassociative-math. */
10462 if (flag_associative_math
10463 && TREE_CODE (arg1) == PLUS_EXPR
10464 && TREE_CODE (arg0) != MULT_EXPR)
10466 tree tree10 = TREE_OPERAND (arg1, 0);
10467 tree tree11 = TREE_OPERAND (arg1, 1);
10468 if (TREE_CODE (tree11) == MULT_EXPR
10469 && TREE_CODE (tree10) == MULT_EXPR)
10471 tree tree0;
10472 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10473 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10476 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10477 We associate floats only if the user has specified
10478 -fassociative-math. */
10479 if (flag_associative_math
10480 && TREE_CODE (arg0) == PLUS_EXPR
10481 && TREE_CODE (arg1) != MULT_EXPR)
10483 tree tree00 = TREE_OPERAND (arg0, 0);
10484 tree tree01 = TREE_OPERAND (arg0, 1);
10485 if (TREE_CODE (tree01) == MULT_EXPR
10486 && TREE_CODE (tree00) == MULT_EXPR)
10488 tree tree0;
10489 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10490 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10495 bit_rotate:
10496 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10497 is a rotate of A by C1 bits. */
10498 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10499 is a rotate of A by B bits. */
10501 enum tree_code code0, code1;
10502 tree rtype;
10503 code0 = TREE_CODE (arg0);
10504 code1 = TREE_CODE (arg1);
10505 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10506 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10507 && operand_equal_p (TREE_OPERAND (arg0, 0),
10508 TREE_OPERAND (arg1, 0), 0)
10509 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10510 TYPE_UNSIGNED (rtype))
10511 /* Only create rotates in complete modes. Other cases are not
10512 expanded properly. */
10513 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10515 tree tree01, tree11;
10516 enum tree_code code01, code11;
10518 tree01 = TREE_OPERAND (arg0, 1);
10519 tree11 = TREE_OPERAND (arg1, 1);
10520 STRIP_NOPS (tree01);
10521 STRIP_NOPS (tree11);
10522 code01 = TREE_CODE (tree01);
10523 code11 = TREE_CODE (tree11);
10524 if (code01 == INTEGER_CST
10525 && code11 == INTEGER_CST
10526 && TREE_INT_CST_HIGH (tree01) == 0
10527 && TREE_INT_CST_HIGH (tree11) == 0
10528 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10529 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10531 tem = build2 (LROTATE_EXPR,
10532 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10533 TREE_OPERAND (arg0, 0),
10534 code0 == LSHIFT_EXPR
10535 ? tree01 : tree11);
10536 SET_EXPR_LOCATION (tem, loc);
10537 return fold_convert_loc (loc, type, tem);
10539 else if (code11 == MINUS_EXPR)
10541 tree tree110, tree111;
10542 tree110 = TREE_OPERAND (tree11, 0);
10543 tree111 = TREE_OPERAND (tree11, 1);
10544 STRIP_NOPS (tree110);
10545 STRIP_NOPS (tree111);
10546 if (TREE_CODE (tree110) == INTEGER_CST
10547 && 0 == compare_tree_int (tree110,
10548 TYPE_PRECISION
10549 (TREE_TYPE (TREE_OPERAND
10550 (arg0, 0))))
10551 && operand_equal_p (tree01, tree111, 0))
10552 return
10553 fold_convert_loc (loc, type,
10554 build2 ((code0 == LSHIFT_EXPR
10555 ? LROTATE_EXPR
10556 : RROTATE_EXPR),
10557 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10558 TREE_OPERAND (arg0, 0), tree01));
10560 else if (code01 == MINUS_EXPR)
10562 tree tree010, tree011;
10563 tree010 = TREE_OPERAND (tree01, 0);
10564 tree011 = TREE_OPERAND (tree01, 1);
10565 STRIP_NOPS (tree010);
10566 STRIP_NOPS (tree011);
10567 if (TREE_CODE (tree010) == INTEGER_CST
10568 && 0 == compare_tree_int (tree010,
10569 TYPE_PRECISION
10570 (TREE_TYPE (TREE_OPERAND
10571 (arg0, 0))))
10572 && operand_equal_p (tree11, tree011, 0))
10573 return fold_convert_loc
10574 (loc, type,
10575 build2 ((code0 != LSHIFT_EXPR
10576 ? LROTATE_EXPR
10577 : RROTATE_EXPR),
10578 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10579 TREE_OPERAND (arg0, 0), tree11));
10584 associate:
10585 /* In most languages, can't associate operations on floats through
10586 parentheses. Rather than remember where the parentheses were, we
10587 don't associate floats at all, unless the user has specified
10588 -fassociative-math.
10589 And, we need to make sure type is not saturating. */
10591 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10592 && !TYPE_SATURATING (type))
10594 tree var0, con0, lit0, minus_lit0;
10595 tree var1, con1, lit1, minus_lit1;
10596 bool ok = true;
10598 /* Split both trees into variables, constants, and literals. Then
10599 associate each group together, the constants with literals,
10600 then the result with variables. This increases the chances of
10601 literals being recombined later and of generating relocatable
10602 expressions for the sum of a constant and literal. */
10603 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10604 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10605 code == MINUS_EXPR);
10607 /* With undefined overflow we can only associate constants
10608 with one variable. */
10609 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10610 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10611 && var0 && var1)
10613 tree tmp0 = var0;
10614 tree tmp1 = var1;
10616 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10617 tmp0 = TREE_OPERAND (tmp0, 0);
10618 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10619 tmp1 = TREE_OPERAND (tmp1, 0);
10620 /* The only case we can still associate with two variables
10621 is if they are the same, modulo negation. */
10622 if (!operand_equal_p (tmp0, tmp1, 0))
10623 ok = false;
10626 /* Only do something if we found more than two objects. Otherwise,
10627 nothing has changed and we risk infinite recursion. */
10628 if (ok
10629 && (2 < ((var0 != 0) + (var1 != 0)
10630 + (con0 != 0) + (con1 != 0)
10631 + (lit0 != 0) + (lit1 != 0)
10632 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10634 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10635 if (code == MINUS_EXPR)
10636 code = PLUS_EXPR;
10638 var0 = associate_trees (loc, var0, var1, code, type);
10639 con0 = associate_trees (loc, con0, con1, code, type);
10640 lit0 = associate_trees (loc, lit0, lit1, code, type);
10641 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10643 /* Preserve the MINUS_EXPR if the negative part of the literal is
10644 greater than the positive part. Otherwise, the multiplicative
10645 folding code (i.e extract_muldiv) may be fooled in case
10646 unsigned constants are subtracted, like in the following
10647 example: ((X*2 + 4) - 8U)/2. */
10648 if (minus_lit0 && lit0)
10650 if (TREE_CODE (lit0) == INTEGER_CST
10651 && TREE_CODE (minus_lit0) == INTEGER_CST
10652 && tree_int_cst_lt (lit0, minus_lit0))
10654 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10655 MINUS_EXPR, type);
10656 lit0 = 0;
10658 else
10660 lit0 = associate_trees (loc, lit0, minus_lit0,
10661 MINUS_EXPR, type);
10662 minus_lit0 = 0;
10665 if (minus_lit0)
10667 if (con0 == 0)
10668 return
10669 fold_convert_loc (loc, type,
10670 associate_trees (loc, var0, minus_lit0,
10671 MINUS_EXPR, type));
10672 else
10674 con0 = associate_trees (loc, con0, minus_lit0,
10675 MINUS_EXPR, type);
10676 return
10677 fold_convert_loc (loc, type,
10678 associate_trees (loc, var0, con0,
10679 PLUS_EXPR, type));
10683 con0 = associate_trees (loc, con0, lit0, code, type);
10684 return
10685 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10686 code, type));
10690 return NULL_TREE;
10692 case MINUS_EXPR:
10693 /* Pointer simplifications for subtraction, simple reassociations. */
10694 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10696 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10697 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10698 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10700 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10701 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10702 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10703 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10704 return fold_build2_loc (loc, PLUS_EXPR, type,
10705 fold_build2_loc (loc, MINUS_EXPR, type,
10706 arg00, arg10),
10707 fold_build2_loc (loc, MINUS_EXPR, type,
10708 arg01, arg11));
10710 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10711 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10713 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10714 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10715 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10716 fold_convert_loc (loc, type, arg1));
10717 if (tmp)
10718 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10721 /* A - (-B) -> A + B */
10722 if (TREE_CODE (arg1) == NEGATE_EXPR)
10723 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10724 fold_convert_loc (loc, type,
10725 TREE_OPERAND (arg1, 0)));
10726 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10727 if (TREE_CODE (arg0) == NEGATE_EXPR
10728 && (FLOAT_TYPE_P (type)
10729 || INTEGRAL_TYPE_P (type))
10730 && negate_expr_p (arg1)
10731 && reorder_operands_p (arg0, arg1))
10732 return fold_build2_loc (loc, MINUS_EXPR, type,
10733 fold_convert_loc (loc, type,
10734 negate_expr (arg1)),
10735 fold_convert_loc (loc, type,
10736 TREE_OPERAND (arg0, 0)));
10737 /* Convert -A - 1 to ~A. */
10738 if (INTEGRAL_TYPE_P (type)
10739 && TREE_CODE (arg0) == NEGATE_EXPR
10740 && integer_onep (arg1)
10741 && !TYPE_OVERFLOW_TRAPS (type))
10742 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10743 fold_convert_loc (loc, type,
10744 TREE_OPERAND (arg0, 0)));
10746 /* Convert -1 - A to ~A. */
10747 if (INTEGRAL_TYPE_P (type)
10748 && integer_all_onesp (arg0))
10749 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10752 /* X - (X / CST) * CST is X % CST. */
10753 if (INTEGRAL_TYPE_P (type)
10754 && TREE_CODE (arg1) == MULT_EXPR
10755 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10756 && operand_equal_p (arg0,
10757 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10758 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10759 TREE_OPERAND (arg1, 1), 0))
10760 return
10761 fold_convert_loc (loc, type,
10762 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10763 arg0, TREE_OPERAND (arg1, 1)));
10765 if (! FLOAT_TYPE_P (type))
10767 if (integer_zerop (arg0))
10768 return negate_expr (fold_convert_loc (loc, type, arg1));
10769 if (integer_zerop (arg1))
10770 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10772 /* Fold A - (A & B) into ~B & A. */
10773 if (!TREE_SIDE_EFFECTS (arg0)
10774 && TREE_CODE (arg1) == BIT_AND_EXPR)
10776 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10778 tree arg10 = fold_convert_loc (loc, type,
10779 TREE_OPERAND (arg1, 0));
10780 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10781 fold_build1_loc (loc, BIT_NOT_EXPR,
10782 type, arg10),
10783 fold_convert_loc (loc, type, arg0));
10785 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10787 tree arg11 = fold_convert_loc (loc,
10788 type, TREE_OPERAND (arg1, 1));
10789 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10790 fold_build1_loc (loc, BIT_NOT_EXPR,
10791 type, arg11),
10792 fold_convert_loc (loc, type, arg0));
10796 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10797 any power of 2 minus 1. */
10798 if (TREE_CODE (arg0) == BIT_AND_EXPR
10799 && TREE_CODE (arg1) == BIT_AND_EXPR
10800 && operand_equal_p (TREE_OPERAND (arg0, 0),
10801 TREE_OPERAND (arg1, 0), 0))
10803 tree mask0 = TREE_OPERAND (arg0, 1);
10804 tree mask1 = TREE_OPERAND (arg1, 1);
10805 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10807 if (operand_equal_p (tem, mask1, 0))
10809 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10810 TREE_OPERAND (arg0, 0), mask1);
10811 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10816 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10817 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10818 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10820 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10821 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10822 (-ARG1 + ARG0) reduces to -ARG1. */
10823 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10824 return negate_expr (fold_convert_loc (loc, type, arg1));
10826 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10827 __complex__ ( x, -y ). This is not the same for SNaNs or if
10828 signed zeros are involved. */
10829 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10830 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10831 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10833 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10834 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10835 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10836 bool arg0rz = false, arg0iz = false;
10837 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10838 || (arg0i && (arg0iz = real_zerop (arg0i))))
10840 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10841 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10842 if (arg0rz && arg1i && real_zerop (arg1i))
10844 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10845 arg1r ? arg1r
10846 : build1 (REALPART_EXPR, rtype, arg1));
10847 tree ip = arg0i ? arg0i
10848 : build1 (IMAGPART_EXPR, rtype, arg0);
10849 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10851 else if (arg0iz && arg1r && real_zerop (arg1r))
10853 tree rp = arg0r ? arg0r
10854 : build1 (REALPART_EXPR, rtype, arg0);
10855 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10856 arg1i ? arg1i
10857 : build1 (IMAGPART_EXPR, rtype, arg1));
10858 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10863 /* Fold &x - &x. This can happen from &x.foo - &x.
10864 This is unsafe for certain floats even in non-IEEE formats.
10865 In IEEE, it is unsafe because it does wrong for NaNs.
10866 Also note that operand_equal_p is always false if an operand
10867 is volatile. */
10869 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10870 && operand_equal_p (arg0, arg1, 0))
10871 return fold_convert_loc (loc, type, integer_zero_node);
10873 /* A - B -> A + (-B) if B is easily negatable. */
10874 if (negate_expr_p (arg1)
10875 && ((FLOAT_TYPE_P (type)
10876 /* Avoid this transformation if B is a positive REAL_CST. */
10877 && (TREE_CODE (arg1) != REAL_CST
10878 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10879 || INTEGRAL_TYPE_P (type)))
10880 return fold_build2_loc (loc, PLUS_EXPR, type,
10881 fold_convert_loc (loc, type, arg0),
10882 fold_convert_loc (loc, type,
10883 negate_expr (arg1)));
10885 /* Try folding difference of addresses. */
10887 HOST_WIDE_INT diff;
10889 if ((TREE_CODE (arg0) == ADDR_EXPR
10890 || TREE_CODE (arg1) == ADDR_EXPR)
10891 && ptr_difference_const (arg0, arg1, &diff))
10892 return build_int_cst_type (type, diff);
10895 /* Fold &a[i] - &a[j] to i-j. */
10896 if (TREE_CODE (arg0) == ADDR_EXPR
10897 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10898 && TREE_CODE (arg1) == ADDR_EXPR
10899 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10901 tree aref0 = TREE_OPERAND (arg0, 0);
10902 tree aref1 = TREE_OPERAND (arg1, 0);
10903 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10904 TREE_OPERAND (aref1, 0), 0))
10906 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10907 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10908 tree esz = array_ref_element_size (aref0);
10909 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10910 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10911 fold_convert_loc (loc, type, esz));
10916 if (FLOAT_TYPE_P (type)
10917 && flag_unsafe_math_optimizations
10918 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10919 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10920 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10921 return tem;
10923 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10924 same or one. Make sure type is not saturating.
10925 fold_plusminus_mult_expr will re-associate. */
10926 if ((TREE_CODE (arg0) == MULT_EXPR
10927 || TREE_CODE (arg1) == MULT_EXPR)
10928 && !TYPE_SATURATING (type)
10929 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10931 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10932 if (tem)
10933 return tem;
10936 goto associate;
10938 case MULT_EXPR:
10939 /* (-A) * (-B) -> A * B */
10940 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10941 return fold_build2_loc (loc, MULT_EXPR, type,
10942 fold_convert_loc (loc, type,
10943 TREE_OPERAND (arg0, 0)),
10944 fold_convert_loc (loc, type,
10945 negate_expr (arg1)));
10946 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10947 return fold_build2_loc (loc, MULT_EXPR, type,
10948 fold_convert_loc (loc, type,
10949 negate_expr (arg0)),
10950 fold_convert_loc (loc, type,
10951 TREE_OPERAND (arg1, 0)));
10953 if (! FLOAT_TYPE_P (type))
10955 if (integer_zerop (arg1))
10956 return omit_one_operand_loc (loc, type, arg1, arg0);
10957 if (integer_onep (arg1))
10958 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10959 /* Transform x * -1 into -x. Make sure to do the negation
10960 on the original operand with conversions not stripped
10961 because we can only strip non-sign-changing conversions. */
10962 if (integer_all_onesp (arg1))
10963 return fold_convert_loc (loc, type, negate_expr (op0));
10964 /* Transform x * -C into -x * C if x is easily negatable. */
10965 if (TREE_CODE (arg1) == INTEGER_CST
10966 && tree_int_cst_sgn (arg1) == -1
10967 && negate_expr_p (arg0)
10968 && (tem = negate_expr (arg1)) != arg1
10969 && !TREE_OVERFLOW (tem))
10970 return fold_build2_loc (loc, MULT_EXPR, type,
10971 fold_convert_loc (loc, type,
10972 negate_expr (arg0)),
10973 tem);
10975 /* (a * (1 << b)) is (a << b) */
10976 if (TREE_CODE (arg1) == LSHIFT_EXPR
10977 && integer_onep (TREE_OPERAND (arg1, 0)))
10978 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10979 TREE_OPERAND (arg1, 1));
10980 if (TREE_CODE (arg0) == LSHIFT_EXPR
10981 && integer_onep (TREE_OPERAND (arg0, 0)))
10982 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10983 TREE_OPERAND (arg0, 1));
10985 /* (A + A) * C -> A * 2 * C */
10986 if (TREE_CODE (arg0) == PLUS_EXPR
10987 && TREE_CODE (arg1) == INTEGER_CST
10988 && operand_equal_p (TREE_OPERAND (arg0, 0),
10989 TREE_OPERAND (arg0, 1), 0))
10990 return fold_build2_loc (loc, MULT_EXPR, type,
10991 omit_one_operand_loc (loc, type,
10992 TREE_OPERAND (arg0, 0),
10993 TREE_OPERAND (arg0, 1)),
10994 fold_build2_loc (loc, MULT_EXPR, type,
10995 build_int_cst (type, 2) , arg1));
10997 strict_overflow_p = false;
10998 if (TREE_CODE (arg1) == INTEGER_CST
10999 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11000 &strict_overflow_p)))
11002 if (strict_overflow_p)
11003 fold_overflow_warning (("assuming signed overflow does not "
11004 "occur when simplifying "
11005 "multiplication"),
11006 WARN_STRICT_OVERFLOW_MISC);
11007 return fold_convert_loc (loc, type, tem);
11010 /* Optimize z * conj(z) for integer complex numbers. */
11011 if (TREE_CODE (arg0) == CONJ_EXPR
11012 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11013 return fold_mult_zconjz (loc, type, arg1);
11014 if (TREE_CODE (arg1) == CONJ_EXPR
11015 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11016 return fold_mult_zconjz (loc, type, arg0);
11018 else
11020 /* Maybe fold x * 0 to 0. The expressions aren't the same
11021 when x is NaN, since x * 0 is also NaN. Nor are they the
11022 same in modes with signed zeros, since multiplying a
11023 negative value by 0 gives -0, not +0. */
11024 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11025 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11026 && real_zerop (arg1))
11027 return omit_one_operand_loc (loc, type, arg1, arg0);
11028 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11029 Likewise for complex arithmetic with signed zeros. */
11030 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11031 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11032 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11033 && real_onep (arg1))
11034 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11036 /* Transform x * -1.0 into -x. */
11037 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11038 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11039 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11040 && real_minus_onep (arg1))
11041 return fold_convert_loc (loc, type, negate_expr (arg0));
11043 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11044 the result for floating point types due to rounding so it is applied
11045 only if -fassociative-math was specify. */
11046 if (flag_associative_math
11047 && TREE_CODE (arg0) == RDIV_EXPR
11048 && TREE_CODE (arg1) == REAL_CST
11049 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11051 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11052 arg1, 0);
11053 if (tem)
11054 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11055 TREE_OPERAND (arg0, 1));
11058 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11059 if (operand_equal_p (arg0, arg1, 0))
11061 tree tem = fold_strip_sign_ops (arg0);
11062 if (tem != NULL_TREE)
11064 tem = fold_convert_loc (loc, type, tem);
11065 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11069 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11070 This is not the same for NaNs or if signed zeros are
11071 involved. */
11072 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11073 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11074 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11075 && TREE_CODE (arg1) == COMPLEX_CST
11076 && real_zerop (TREE_REALPART (arg1)))
11078 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11079 if (real_onep (TREE_IMAGPART (arg1)))
11080 return
11081 fold_build2_loc (loc, COMPLEX_EXPR, type,
11082 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11083 rtype, arg0)),
11084 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11085 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11086 return
11087 fold_build2_loc (loc, COMPLEX_EXPR, type,
11088 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11089 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11090 rtype, arg0)));
11093 /* Optimize z * conj(z) for floating point complex numbers.
11094 Guarded by flag_unsafe_math_optimizations as non-finite
11095 imaginary components don't produce scalar results. */
11096 if (flag_unsafe_math_optimizations
11097 && TREE_CODE (arg0) == CONJ_EXPR
11098 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11099 return fold_mult_zconjz (loc, type, arg1);
11100 if (flag_unsafe_math_optimizations
11101 && TREE_CODE (arg1) == CONJ_EXPR
11102 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11103 return fold_mult_zconjz (loc, type, arg0);
11105 if (flag_unsafe_math_optimizations)
11107 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11108 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11110 /* Optimizations of root(...)*root(...). */
11111 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11113 tree rootfn, arg;
11114 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11115 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11117 /* Optimize sqrt(x)*sqrt(x) as x. */
11118 if (BUILTIN_SQRT_P (fcode0)
11119 && operand_equal_p (arg00, arg10, 0)
11120 && ! HONOR_SNANS (TYPE_MODE (type)))
11121 return arg00;
11123 /* Optimize root(x)*root(y) as root(x*y). */
11124 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11125 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11126 return build_call_expr_loc (loc, rootfn, 1, arg);
11129 /* Optimize expN(x)*expN(y) as expN(x+y). */
11130 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11132 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11133 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11134 CALL_EXPR_ARG (arg0, 0),
11135 CALL_EXPR_ARG (arg1, 0));
11136 return build_call_expr_loc (loc, expfn, 1, arg);
11139 /* Optimizations of pow(...)*pow(...). */
11140 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11141 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11142 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11144 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11145 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11146 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11147 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11149 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11150 if (operand_equal_p (arg01, arg11, 0))
11152 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11153 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11154 arg00, arg10);
11155 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11158 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11159 if (operand_equal_p (arg00, arg10, 0))
11161 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11162 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11163 arg01, arg11);
11164 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11168 /* Optimize tan(x)*cos(x) as sin(x). */
11169 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11170 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11171 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11172 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11173 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11174 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11175 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11176 CALL_EXPR_ARG (arg1, 0), 0))
11178 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11180 if (sinfn != NULL_TREE)
11181 return build_call_expr_loc (loc, sinfn, 1,
11182 CALL_EXPR_ARG (arg0, 0));
11185 /* Optimize x*pow(x,c) as pow(x,c+1). */
11186 if (fcode1 == BUILT_IN_POW
11187 || fcode1 == BUILT_IN_POWF
11188 || fcode1 == BUILT_IN_POWL)
11190 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11191 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11192 if (TREE_CODE (arg11) == REAL_CST
11193 && !TREE_OVERFLOW (arg11)
11194 && operand_equal_p (arg0, arg10, 0))
11196 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11197 REAL_VALUE_TYPE c;
11198 tree arg;
11200 c = TREE_REAL_CST (arg11);
11201 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11202 arg = build_real (type, c);
11203 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11207 /* Optimize pow(x,c)*x as pow(x,c+1). */
11208 if (fcode0 == BUILT_IN_POW
11209 || fcode0 == BUILT_IN_POWF
11210 || fcode0 == BUILT_IN_POWL)
11212 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11213 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11214 if (TREE_CODE (arg01) == REAL_CST
11215 && !TREE_OVERFLOW (arg01)
11216 && operand_equal_p (arg1, arg00, 0))
11218 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11219 REAL_VALUE_TYPE c;
11220 tree arg;
11222 c = TREE_REAL_CST (arg01);
11223 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11224 arg = build_real (type, c);
11225 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11229 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
11230 if (optimize_function_for_speed_p (cfun)
11231 && operand_equal_p (arg0, arg1, 0))
11233 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11235 if (powfn)
11237 tree arg = build_real (type, dconst2);
11238 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11243 goto associate;
11245 case BIT_IOR_EXPR:
11246 bit_ior:
11247 if (integer_all_onesp (arg1))
11248 return omit_one_operand_loc (loc, type, arg1, arg0);
11249 if (integer_zerop (arg1))
11250 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11251 if (operand_equal_p (arg0, arg1, 0))
11252 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11254 /* ~X | X is -1. */
11255 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11256 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11258 t1 = fold_convert_loc (loc, type, integer_zero_node);
11259 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11260 return omit_one_operand_loc (loc, type, t1, arg1);
11263 /* X | ~X is -1. */
11264 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11265 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11267 t1 = fold_convert_loc (loc, type, integer_zero_node);
11268 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11269 return omit_one_operand_loc (loc, type, t1, arg0);
11272 /* Canonicalize (X & C1) | C2. */
11273 if (TREE_CODE (arg0) == BIT_AND_EXPR
11274 && TREE_CODE (arg1) == INTEGER_CST
11275 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11277 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
11278 int width = TYPE_PRECISION (type), w;
11279 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
11280 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11281 hi2 = TREE_INT_CST_HIGH (arg1);
11282 lo2 = TREE_INT_CST_LOW (arg1);
11284 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11285 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
11286 return omit_one_operand_loc (loc, type, arg1,
11287 TREE_OPERAND (arg0, 0));
11289 if (width > HOST_BITS_PER_WIDE_INT)
11291 mhi = (unsigned HOST_WIDE_INT) -1
11292 >> (2 * HOST_BITS_PER_WIDE_INT - width);
11293 mlo = -1;
11295 else
11297 mhi = 0;
11298 mlo = (unsigned HOST_WIDE_INT) -1
11299 >> (HOST_BITS_PER_WIDE_INT - width);
11302 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11303 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
11304 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11305 TREE_OPERAND (arg0, 0), arg1);
11307 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11308 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11309 mode which allows further optimizations. */
11310 hi1 &= mhi;
11311 lo1 &= mlo;
11312 hi2 &= mhi;
11313 lo2 &= mlo;
11314 hi3 = hi1 & ~hi2;
11315 lo3 = lo1 & ~lo2;
11316 for (w = BITS_PER_UNIT;
11317 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11318 w <<= 1)
11320 unsigned HOST_WIDE_INT mask
11321 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11322 if (((lo1 | lo2) & mask) == mask
11323 && (lo1 & ~mask) == 0 && hi1 == 0)
11325 hi3 = 0;
11326 lo3 = mask;
11327 break;
11330 if (hi3 != hi1 || lo3 != lo1)
11331 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11332 fold_build2_loc (loc, BIT_AND_EXPR, type,
11333 TREE_OPERAND (arg0, 0),
11334 build_int_cst_wide (type,
11335 lo3, hi3)),
11336 arg1);
11339 /* (X & Y) | Y is (X, Y). */
11340 if (TREE_CODE (arg0) == BIT_AND_EXPR
11341 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11342 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11343 /* (X & Y) | X is (Y, X). */
11344 if (TREE_CODE (arg0) == BIT_AND_EXPR
11345 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11346 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11347 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11348 /* X | (X & Y) is (Y, X). */
11349 if (TREE_CODE (arg1) == BIT_AND_EXPR
11350 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11351 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11352 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11353 /* X | (Y & X) is (Y, X). */
11354 if (TREE_CODE (arg1) == BIT_AND_EXPR
11355 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11356 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11357 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11359 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11360 if (t1 != NULL_TREE)
11361 return t1;
11363 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11365 This results in more efficient code for machines without a NAND
11366 instruction. Combine will canonicalize to the first form
11367 which will allow use of NAND instructions provided by the
11368 backend if they exist. */
11369 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11370 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11372 return
11373 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11374 build2 (BIT_AND_EXPR, type,
11375 fold_convert_loc (loc, type,
11376 TREE_OPERAND (arg0, 0)),
11377 fold_convert_loc (loc, type,
11378 TREE_OPERAND (arg1, 0))));
11381 /* See if this can be simplified into a rotate first. If that
11382 is unsuccessful continue in the association code. */
11383 goto bit_rotate;
11385 case BIT_XOR_EXPR:
11386 if (integer_zerop (arg1))
11387 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11388 if (integer_all_onesp (arg1))
11389 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11390 if (operand_equal_p (arg0, arg1, 0))
11391 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11393 /* ~X ^ X is -1. */
11394 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11395 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11397 t1 = fold_convert_loc (loc, type, integer_zero_node);
11398 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11399 return omit_one_operand_loc (loc, type, t1, arg1);
11402 /* X ^ ~X is -1. */
11403 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11404 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11406 t1 = fold_convert_loc (loc, type, integer_zero_node);
11407 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11408 return omit_one_operand_loc (loc, type, t1, arg0);
11411 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11412 with a constant, and the two constants have no bits in common,
11413 we should treat this as a BIT_IOR_EXPR since this may produce more
11414 simplifications. */
11415 if (TREE_CODE (arg0) == BIT_AND_EXPR
11416 && TREE_CODE (arg1) == BIT_AND_EXPR
11417 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11418 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11419 && integer_zerop (const_binop (BIT_AND_EXPR,
11420 TREE_OPERAND (arg0, 1),
11421 TREE_OPERAND (arg1, 1), 0)))
11423 code = BIT_IOR_EXPR;
11424 goto bit_ior;
11427 /* (X | Y) ^ X -> Y & ~ X*/
11428 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11429 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11431 tree t2 = TREE_OPERAND (arg0, 1);
11432 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11433 arg1);
11434 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11435 fold_convert_loc (loc, type, t2),
11436 fold_convert_loc (loc, type, t1));
11437 return t1;
11440 /* (Y | X) ^ X -> Y & ~ X*/
11441 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11442 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11444 tree t2 = TREE_OPERAND (arg0, 0);
11445 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11446 arg1);
11447 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11448 fold_convert_loc (loc, type, t2),
11449 fold_convert_loc (loc, type, t1));
11450 return t1;
11453 /* X ^ (X | Y) -> Y & ~ X*/
11454 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11455 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11457 tree t2 = TREE_OPERAND (arg1, 1);
11458 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11459 arg0);
11460 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11461 fold_convert_loc (loc, type, t2),
11462 fold_convert_loc (loc, type, t1));
11463 return t1;
11466 /* X ^ (Y | X) -> Y & ~ X*/
11467 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11468 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11470 tree t2 = TREE_OPERAND (arg1, 0);
11471 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11472 arg0);
11473 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11474 fold_convert_loc (loc, type, t2),
11475 fold_convert_loc (loc, type, t1));
11476 return t1;
11479 /* Convert ~X ^ ~Y to X ^ Y. */
11480 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11481 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11482 return fold_build2_loc (loc, code, type,
11483 fold_convert_loc (loc, type,
11484 TREE_OPERAND (arg0, 0)),
11485 fold_convert_loc (loc, type,
11486 TREE_OPERAND (arg1, 0)));
11488 /* Convert ~X ^ C to X ^ ~C. */
11489 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11490 && TREE_CODE (arg1) == INTEGER_CST)
11491 return fold_build2_loc (loc, code, type,
11492 fold_convert_loc (loc, type,
11493 TREE_OPERAND (arg0, 0)),
11494 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11496 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11497 if (TREE_CODE (arg0) == BIT_AND_EXPR
11498 && integer_onep (TREE_OPERAND (arg0, 1))
11499 && integer_onep (arg1))
11500 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11501 build_int_cst (TREE_TYPE (arg0), 0));
11503 /* Fold (X & Y) ^ Y as ~X & Y. */
11504 if (TREE_CODE (arg0) == BIT_AND_EXPR
11505 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11507 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11508 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11509 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11510 fold_convert_loc (loc, type, arg1));
11512 /* Fold (X & Y) ^ X as ~Y & X. */
11513 if (TREE_CODE (arg0) == BIT_AND_EXPR
11514 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11515 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11517 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11518 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11519 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11520 fold_convert_loc (loc, type, arg1));
11522 /* Fold X ^ (X & Y) as X & ~Y. */
11523 if (TREE_CODE (arg1) == BIT_AND_EXPR
11524 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11526 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11527 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11528 fold_convert_loc (loc, type, arg0),
11529 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11531 /* Fold X ^ (Y & X) as ~Y & X. */
11532 if (TREE_CODE (arg1) == BIT_AND_EXPR
11533 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11534 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11536 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11537 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11538 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11539 fold_convert_loc (loc, type, arg0));
11542 /* See if this can be simplified into a rotate first. If that
11543 is unsuccessful continue in the association code. */
11544 goto bit_rotate;
11546 case BIT_AND_EXPR:
11547 if (integer_all_onesp (arg1))
11548 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11549 if (integer_zerop (arg1))
11550 return omit_one_operand_loc (loc, type, arg1, arg0);
11551 if (operand_equal_p (arg0, arg1, 0))
11552 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11554 /* ~X & X is always zero. */
11555 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11556 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11557 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11559 /* X & ~X is always zero. */
11560 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11561 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11562 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11564 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11565 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11566 && TREE_CODE (arg1) == INTEGER_CST
11567 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11569 tree tmp1 = fold_convert_loc (loc, type, arg1);
11570 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11571 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11572 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11573 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11574 return
11575 fold_convert_loc (loc, type,
11576 fold_build2_loc (loc, BIT_IOR_EXPR,
11577 type, tmp2, tmp3));
11580 /* (X | Y) & Y is (X, Y). */
11581 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11582 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11583 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11584 /* (X | Y) & X is (Y, X). */
11585 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11586 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11587 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11588 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11589 /* X & (X | Y) is (Y, X). */
11590 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11591 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11592 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11593 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11594 /* X & (Y | X) is (Y, X). */
11595 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11596 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11597 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11598 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11600 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11601 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11602 && integer_onep (TREE_OPERAND (arg0, 1))
11603 && integer_onep (arg1))
11605 tem = TREE_OPERAND (arg0, 0);
11606 return fold_build2_loc (loc, EQ_EXPR, type,
11607 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11608 build_int_cst (TREE_TYPE (tem), 1)),
11609 build_int_cst (TREE_TYPE (tem), 0));
11611 /* Fold ~X & 1 as (X & 1) == 0. */
11612 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11613 && integer_onep (arg1))
11615 tem = TREE_OPERAND (arg0, 0);
11616 return fold_build2_loc (loc, EQ_EXPR, type,
11617 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11618 build_int_cst (TREE_TYPE (tem), 1)),
11619 build_int_cst (TREE_TYPE (tem), 0));
11622 /* Fold (X ^ Y) & Y as ~X & Y. */
11623 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11624 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11626 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11627 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11628 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11629 fold_convert_loc (loc, type, arg1));
11631 /* Fold (X ^ Y) & X as ~Y & X. */
11632 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11633 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11634 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11636 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11637 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11638 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11639 fold_convert_loc (loc, type, arg1));
11641 /* Fold X & (X ^ Y) as X & ~Y. */
11642 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11643 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11645 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11646 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11647 fold_convert_loc (loc, type, arg0),
11648 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11650 /* Fold X & (Y ^ X) as ~Y & X. */
11651 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11652 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11653 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11655 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11656 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11657 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11658 fold_convert_loc (loc, type, arg0));
11661 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11662 if (t1 != NULL_TREE)
11663 return t1;
11664 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11665 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11666 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11668 unsigned int prec
11669 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11671 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11672 && (~TREE_INT_CST_LOW (arg1)
11673 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11674 return
11675 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11678 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11680 This results in more efficient code for machines without a NOR
11681 instruction. Combine will canonicalize to the first form
11682 which will allow use of NOR instructions provided by the
11683 backend if they exist. */
11684 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11685 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11687 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11688 build2 (BIT_IOR_EXPR, type,
11689 fold_convert_loc (loc, type,
11690 TREE_OPERAND (arg0, 0)),
11691 fold_convert_loc (loc, type,
11692 TREE_OPERAND (arg1, 0))));
11695 /* If arg0 is derived from the address of an object or function, we may
11696 be able to fold this expression using the object or function's
11697 alignment. */
11698 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11700 unsigned HOST_WIDE_INT modulus, residue;
11701 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11703 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11704 integer_onep (arg1));
11706 /* This works because modulus is a power of 2. If this weren't the
11707 case, we'd have to replace it by its greatest power-of-2
11708 divisor: modulus & -modulus. */
11709 if (low < modulus)
11710 return build_int_cst (type, residue & low);
11713 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11714 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11715 if the new mask might be further optimized. */
11716 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11717 || TREE_CODE (arg0) == RSHIFT_EXPR)
11718 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11719 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11720 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11721 < TYPE_PRECISION (TREE_TYPE (arg0))
11722 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11723 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11725 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11726 unsigned HOST_WIDE_INT mask
11727 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11728 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11729 tree shift_type = TREE_TYPE (arg0);
11731 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11732 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11733 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11734 && TYPE_PRECISION (TREE_TYPE (arg0))
11735 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11737 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11738 tree arg00 = TREE_OPERAND (arg0, 0);
11739 /* See if more bits can be proven as zero because of
11740 zero extension. */
11741 if (TREE_CODE (arg00) == NOP_EXPR
11742 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11744 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11745 if (TYPE_PRECISION (inner_type)
11746 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11747 && TYPE_PRECISION (inner_type) < prec)
11749 prec = TYPE_PRECISION (inner_type);
11750 /* See if we can shorten the right shift. */
11751 if (shiftc < prec)
11752 shift_type = inner_type;
11755 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11756 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11757 zerobits <<= prec - shiftc;
11758 /* For arithmetic shift if sign bit could be set, zerobits
11759 can contain actually sign bits, so no transformation is
11760 possible, unless MASK masks them all away. In that
11761 case the shift needs to be converted into logical shift. */
11762 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11763 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11765 if ((mask & zerobits) == 0)
11766 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11767 else
11768 zerobits = 0;
11772 /* ((X << 16) & 0xff00) is (X, 0). */
11773 if ((mask & zerobits) == mask)
11774 return omit_one_operand_loc (loc, type,
11775 build_int_cst (type, 0), arg0);
11777 newmask = mask | zerobits;
11778 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11780 unsigned int prec;
11782 /* Only do the transformation if NEWMASK is some integer
11783 mode's mask. */
11784 for (prec = BITS_PER_UNIT;
11785 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11786 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11787 break;
11788 if (prec < HOST_BITS_PER_WIDE_INT
11789 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11791 tree newmaskt;
11793 if (shift_type != TREE_TYPE (arg0))
11795 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11796 fold_convert_loc (loc, shift_type,
11797 TREE_OPERAND (arg0, 0)),
11798 TREE_OPERAND (arg0, 1));
11799 tem = fold_convert_loc (loc, type, tem);
11801 else
11802 tem = op0;
11803 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11804 if (!tree_int_cst_equal (newmaskt, arg1))
11805 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11810 goto associate;
11812 case RDIV_EXPR:
11813 /* Don't touch a floating-point divide by zero unless the mode
11814 of the constant can represent infinity. */
11815 if (TREE_CODE (arg1) == REAL_CST
11816 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11817 && real_zerop (arg1))
11818 return NULL_TREE;
11820 /* Optimize A / A to 1.0 if we don't care about
11821 NaNs or Infinities. Skip the transformation
11822 for non-real operands. */
11823 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11824 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11825 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11826 && operand_equal_p (arg0, arg1, 0))
11828 tree r = build_real (TREE_TYPE (arg0), dconst1);
11830 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11833 /* The complex version of the above A / A optimization. */
11834 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11835 && operand_equal_p (arg0, arg1, 0))
11837 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11838 if (! HONOR_NANS (TYPE_MODE (elem_type))
11839 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11841 tree r = build_real (elem_type, dconst1);
11842 /* omit_two_operands will call fold_convert for us. */
11843 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11847 /* (-A) / (-B) -> A / B */
11848 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11849 return fold_build2_loc (loc, RDIV_EXPR, type,
11850 TREE_OPERAND (arg0, 0),
11851 negate_expr (arg1));
11852 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11853 return fold_build2_loc (loc, RDIV_EXPR, type,
11854 negate_expr (arg0),
11855 TREE_OPERAND (arg1, 0));
11857 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11858 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11859 && real_onep (arg1))
11860 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11862 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11863 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11864 && real_minus_onep (arg1))
11865 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11866 negate_expr (arg0)));
11868 /* If ARG1 is a constant, we can convert this to a multiply by the
11869 reciprocal. This does not have the same rounding properties,
11870 so only do this if -freciprocal-math. We can actually
11871 always safely do it if ARG1 is a power of two, but it's hard to
11872 tell if it is or not in a portable manner. */
11873 if (TREE_CODE (arg1) == REAL_CST)
11875 if (flag_reciprocal_math
11876 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11877 arg1, 0)))
11878 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11879 /* Find the reciprocal if optimizing and the result is exact. */
11880 if (optimize)
11882 REAL_VALUE_TYPE r;
11883 r = TREE_REAL_CST (arg1);
11884 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11886 tem = build_real (type, r);
11887 return fold_build2_loc (loc, MULT_EXPR, type,
11888 fold_convert_loc (loc, type, arg0), tem);
11892 /* Convert A/B/C to A/(B*C). */
11893 if (flag_reciprocal_math
11894 && TREE_CODE (arg0) == RDIV_EXPR)
11895 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11896 fold_build2_loc (loc, MULT_EXPR, type,
11897 TREE_OPERAND (arg0, 1), arg1));
11899 /* Convert A/(B/C) to (A/B)*C. */
11900 if (flag_reciprocal_math
11901 && TREE_CODE (arg1) == RDIV_EXPR)
11902 return fold_build2_loc (loc, MULT_EXPR, type,
11903 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11904 TREE_OPERAND (arg1, 0)),
11905 TREE_OPERAND (arg1, 1));
11907 /* Convert C1/(X*C2) into (C1/C2)/X. */
11908 if (flag_reciprocal_math
11909 && TREE_CODE (arg1) == MULT_EXPR
11910 && TREE_CODE (arg0) == REAL_CST
11911 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11913 tree tem = const_binop (RDIV_EXPR, arg0,
11914 TREE_OPERAND (arg1, 1), 0);
11915 if (tem)
11916 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11917 TREE_OPERAND (arg1, 0));
11920 if (flag_unsafe_math_optimizations)
11922 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11923 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11925 /* Optimize sin(x)/cos(x) as tan(x). */
11926 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11927 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11928 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11929 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11930 CALL_EXPR_ARG (arg1, 0), 0))
11932 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11934 if (tanfn != NULL_TREE)
11935 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11938 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11939 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11940 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11941 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11942 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11943 CALL_EXPR_ARG (arg1, 0), 0))
11945 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11947 if (tanfn != NULL_TREE)
11949 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11950 CALL_EXPR_ARG (arg0, 0));
11951 return fold_build2_loc (loc, RDIV_EXPR, type,
11952 build_real (type, dconst1), tmp);
11956 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11957 NaNs or Infinities. */
11958 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11959 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11960 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11962 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11963 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11965 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11966 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11967 && operand_equal_p (arg00, arg01, 0))
11969 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11971 if (cosfn != NULL_TREE)
11972 return build_call_expr_loc (loc, cosfn, 1, arg00);
11976 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11977 NaNs or Infinities. */
11978 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11979 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11980 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11982 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11983 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11985 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11986 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11987 && operand_equal_p (arg00, arg01, 0))
11989 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11991 if (cosfn != NULL_TREE)
11993 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11994 return fold_build2_loc (loc, RDIV_EXPR, type,
11995 build_real (type, dconst1),
11996 tmp);
12001 /* Optimize pow(x,c)/x as pow(x,c-1). */
12002 if (fcode0 == BUILT_IN_POW
12003 || fcode0 == BUILT_IN_POWF
12004 || fcode0 == BUILT_IN_POWL)
12006 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12007 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12008 if (TREE_CODE (arg01) == REAL_CST
12009 && !TREE_OVERFLOW (arg01)
12010 && operand_equal_p (arg1, arg00, 0))
12012 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12013 REAL_VALUE_TYPE c;
12014 tree arg;
12016 c = TREE_REAL_CST (arg01);
12017 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12018 arg = build_real (type, c);
12019 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12023 /* Optimize a/root(b/c) into a*root(c/b). */
12024 if (BUILTIN_ROOT_P (fcode1))
12026 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12028 if (TREE_CODE (rootarg) == RDIV_EXPR)
12030 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12031 tree b = TREE_OPERAND (rootarg, 0);
12032 tree c = TREE_OPERAND (rootarg, 1);
12034 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12036 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12037 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12041 /* Optimize x/expN(y) into x*expN(-y). */
12042 if (BUILTIN_EXPONENT_P (fcode1))
12044 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12045 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12046 arg1 = build_call_expr_loc (loc,
12047 expfn, 1,
12048 fold_convert_loc (loc, type, arg));
12049 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12052 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12053 if (fcode1 == BUILT_IN_POW
12054 || fcode1 == BUILT_IN_POWF
12055 || fcode1 == BUILT_IN_POWL)
12057 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12058 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12059 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12060 tree neg11 = fold_convert_loc (loc, type,
12061 negate_expr (arg11));
12062 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12063 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12066 return NULL_TREE;
12068 case TRUNC_DIV_EXPR:
12069 case FLOOR_DIV_EXPR:
12070 /* Simplify A / (B << N) where A and B are positive and B is
12071 a power of 2, to A >> (N + log2(B)). */
12072 strict_overflow_p = false;
12073 if (TREE_CODE (arg1) == LSHIFT_EXPR
12074 && (TYPE_UNSIGNED (type)
12075 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12077 tree sval = TREE_OPERAND (arg1, 0);
12078 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12080 tree sh_cnt = TREE_OPERAND (arg1, 1);
12081 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12083 if (strict_overflow_p)
12084 fold_overflow_warning (("assuming signed overflow does not "
12085 "occur when simplifying A / (B << N)"),
12086 WARN_STRICT_OVERFLOW_MISC);
12088 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12089 sh_cnt, build_int_cst (NULL_TREE, pow2));
12090 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12091 fold_convert_loc (loc, type, arg0), sh_cnt);
12095 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12096 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12097 if (INTEGRAL_TYPE_P (type)
12098 && TYPE_UNSIGNED (type)
12099 && code == FLOOR_DIV_EXPR)
12100 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12102 /* Fall thru */
12104 case ROUND_DIV_EXPR:
12105 case CEIL_DIV_EXPR:
12106 case EXACT_DIV_EXPR:
12107 if (integer_onep (arg1))
12108 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12109 if (integer_zerop (arg1))
12110 return NULL_TREE;
12111 /* X / -1 is -X. */
12112 if (!TYPE_UNSIGNED (type)
12113 && TREE_CODE (arg1) == INTEGER_CST
12114 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12115 && TREE_INT_CST_HIGH (arg1) == -1)
12116 return fold_convert_loc (loc, type, negate_expr (arg0));
12118 /* Convert -A / -B to A / B when the type is signed and overflow is
12119 undefined. */
12120 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12121 && TREE_CODE (arg0) == NEGATE_EXPR
12122 && negate_expr_p (arg1))
12124 if (INTEGRAL_TYPE_P (type))
12125 fold_overflow_warning (("assuming signed overflow does not occur "
12126 "when distributing negation across "
12127 "division"),
12128 WARN_STRICT_OVERFLOW_MISC);
12129 return fold_build2_loc (loc, code, type,
12130 fold_convert_loc (loc, type,
12131 TREE_OPERAND (arg0, 0)),
12132 fold_convert_loc (loc, type,
12133 negate_expr (arg1)));
12135 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12136 && TREE_CODE (arg1) == NEGATE_EXPR
12137 && negate_expr_p (arg0))
12139 if (INTEGRAL_TYPE_P (type))
12140 fold_overflow_warning (("assuming signed overflow does not occur "
12141 "when distributing negation across "
12142 "division"),
12143 WARN_STRICT_OVERFLOW_MISC);
12144 return fold_build2_loc (loc, code, type,
12145 fold_convert_loc (loc, type,
12146 negate_expr (arg0)),
12147 fold_convert_loc (loc, type,
12148 TREE_OPERAND (arg1, 0)));
12151 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12152 operation, EXACT_DIV_EXPR.
12154 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12155 At one time others generated faster code, it's not clear if they do
12156 after the last round to changes to the DIV code in expmed.c. */
12157 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12158 && multiple_of_p (type, arg0, arg1))
12159 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12161 strict_overflow_p = false;
12162 if (TREE_CODE (arg1) == INTEGER_CST
12163 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12164 &strict_overflow_p)))
12166 if (strict_overflow_p)
12167 fold_overflow_warning (("assuming signed overflow does not occur "
12168 "when simplifying division"),
12169 WARN_STRICT_OVERFLOW_MISC);
12170 return fold_convert_loc (loc, type, tem);
12173 return NULL_TREE;
12175 case CEIL_MOD_EXPR:
12176 case FLOOR_MOD_EXPR:
12177 case ROUND_MOD_EXPR:
12178 case TRUNC_MOD_EXPR:
12179 /* X % 1 is always zero, but be sure to preserve any side
12180 effects in X. */
12181 if (integer_onep (arg1))
12182 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12184 /* X % 0, return X % 0 unchanged so that we can get the
12185 proper warnings and errors. */
12186 if (integer_zerop (arg1))
12187 return NULL_TREE;
12189 /* 0 % X is always zero, but be sure to preserve any side
12190 effects in X. Place this after checking for X == 0. */
12191 if (integer_zerop (arg0))
12192 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12194 /* X % -1 is zero. */
12195 if (!TYPE_UNSIGNED (type)
12196 && TREE_CODE (arg1) == INTEGER_CST
12197 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12198 && TREE_INT_CST_HIGH (arg1) == -1)
12199 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12201 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12202 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12203 strict_overflow_p = false;
12204 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12205 && (TYPE_UNSIGNED (type)
12206 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12208 tree c = arg1;
12209 /* Also optimize A % (C << N) where C is a power of 2,
12210 to A & ((C << N) - 1). */
12211 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12212 c = TREE_OPERAND (arg1, 0);
12214 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12216 tree mask = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12217 build_int_cst (TREE_TYPE (arg1), 1));
12218 if (strict_overflow_p)
12219 fold_overflow_warning (("assuming signed overflow does not "
12220 "occur when simplifying "
12221 "X % (power of two)"),
12222 WARN_STRICT_OVERFLOW_MISC);
12223 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12224 fold_convert_loc (loc, type, arg0),
12225 fold_convert_loc (loc, type, mask));
12229 /* X % -C is the same as X % C. */
12230 if (code == TRUNC_MOD_EXPR
12231 && !TYPE_UNSIGNED (type)
12232 && TREE_CODE (arg1) == INTEGER_CST
12233 && !TREE_OVERFLOW (arg1)
12234 && TREE_INT_CST_HIGH (arg1) < 0
12235 && !TYPE_OVERFLOW_TRAPS (type)
12236 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12237 && !sign_bit_p (arg1, arg1))
12238 return fold_build2_loc (loc, code, type,
12239 fold_convert_loc (loc, type, arg0),
12240 fold_convert_loc (loc, type,
12241 negate_expr (arg1)));
12243 /* X % -Y is the same as X % Y. */
12244 if (code == TRUNC_MOD_EXPR
12245 && !TYPE_UNSIGNED (type)
12246 && TREE_CODE (arg1) == NEGATE_EXPR
12247 && !TYPE_OVERFLOW_TRAPS (type))
12248 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12249 fold_convert_loc (loc, type,
12250 TREE_OPERAND (arg1, 0)));
12252 if (TREE_CODE (arg1) == INTEGER_CST
12253 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12254 &strict_overflow_p)))
12256 if (strict_overflow_p)
12257 fold_overflow_warning (("assuming signed overflow does not occur "
12258 "when simplifying modulus"),
12259 WARN_STRICT_OVERFLOW_MISC);
12260 return fold_convert_loc (loc, type, tem);
12263 return NULL_TREE;
12265 case LROTATE_EXPR:
12266 case RROTATE_EXPR:
12267 if (integer_all_onesp (arg0))
12268 return omit_one_operand_loc (loc, type, arg0, arg1);
12269 goto shift;
12271 case RSHIFT_EXPR:
12272 /* Optimize -1 >> x for arithmetic right shifts. */
12273 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12274 && tree_expr_nonnegative_p (arg1))
12275 return omit_one_operand_loc (loc, type, arg0, arg1);
12276 /* ... fall through ... */
12278 case LSHIFT_EXPR:
12279 shift:
12280 if (integer_zerop (arg1))
12281 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12282 if (integer_zerop (arg0))
12283 return omit_one_operand_loc (loc, type, arg0, arg1);
12285 /* Since negative shift count is not well-defined,
12286 don't try to compute it in the compiler. */
12287 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12288 return NULL_TREE;
12290 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12291 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12292 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12293 && host_integerp (TREE_OPERAND (arg0, 1), false)
12294 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12296 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12297 + TREE_INT_CST_LOW (arg1));
12299 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12300 being well defined. */
12301 if (low >= TYPE_PRECISION (type))
12303 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12304 low = low % TYPE_PRECISION (type);
12305 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12306 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12307 TREE_OPERAND (arg0, 0));
12308 else
12309 low = TYPE_PRECISION (type) - 1;
12312 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12313 build_int_cst (type, low));
12316 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12317 into x & ((unsigned)-1 >> c) for unsigned types. */
12318 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12319 || (TYPE_UNSIGNED (type)
12320 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12321 && host_integerp (arg1, false)
12322 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12323 && host_integerp (TREE_OPERAND (arg0, 1), false)
12324 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12326 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12327 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12328 tree lshift;
12329 tree arg00;
12331 if (low0 == low1)
12333 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12335 lshift = build_int_cst (type, -1);
12336 lshift = int_const_binop (code, lshift, arg1, 0);
12338 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12342 /* Rewrite an LROTATE_EXPR by a constant into an
12343 RROTATE_EXPR by a new constant. */
12344 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12346 tree tem = build_int_cst (TREE_TYPE (arg1),
12347 TYPE_PRECISION (type));
12348 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
12349 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12352 /* If we have a rotate of a bit operation with the rotate count and
12353 the second operand of the bit operation both constant,
12354 permute the two operations. */
12355 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12356 && (TREE_CODE (arg0) == BIT_AND_EXPR
12357 || TREE_CODE (arg0) == BIT_IOR_EXPR
12358 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12359 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12360 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12361 fold_build2_loc (loc, code, type,
12362 TREE_OPERAND (arg0, 0), arg1),
12363 fold_build2_loc (loc, code, type,
12364 TREE_OPERAND (arg0, 1), arg1));
12366 /* Two consecutive rotates adding up to the precision of the
12367 type can be ignored. */
12368 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12369 && TREE_CODE (arg0) == RROTATE_EXPR
12370 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12371 && TREE_INT_CST_HIGH (arg1) == 0
12372 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12373 && ((TREE_INT_CST_LOW (arg1)
12374 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12375 == (unsigned int) TYPE_PRECISION (type)))
12376 return TREE_OPERAND (arg0, 0);
12378 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12379 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12380 if the latter can be further optimized. */
12381 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12382 && TREE_CODE (arg0) == BIT_AND_EXPR
12383 && TREE_CODE (arg1) == INTEGER_CST
12384 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12386 tree mask = fold_build2_loc (loc, code, type,
12387 fold_convert_loc (loc, type,
12388 TREE_OPERAND (arg0, 1)),
12389 arg1);
12390 tree shift = fold_build2_loc (loc, code, type,
12391 fold_convert_loc (loc, type,
12392 TREE_OPERAND (arg0, 0)),
12393 arg1);
12394 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12395 if (tem)
12396 return tem;
12399 return NULL_TREE;
12401 case MIN_EXPR:
12402 if (operand_equal_p (arg0, arg1, 0))
12403 return omit_one_operand_loc (loc, type, arg0, arg1);
12404 if (INTEGRAL_TYPE_P (type)
12405 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12406 return omit_one_operand_loc (loc, type, arg1, arg0);
12407 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12408 if (tem)
12409 return tem;
12410 goto associate;
12412 case MAX_EXPR:
12413 if (operand_equal_p (arg0, arg1, 0))
12414 return omit_one_operand_loc (loc, type, arg0, arg1);
12415 if (INTEGRAL_TYPE_P (type)
12416 && TYPE_MAX_VALUE (type)
12417 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12418 return omit_one_operand_loc (loc, type, arg1, arg0);
12419 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12420 if (tem)
12421 return tem;
12422 goto associate;
12424 case TRUTH_ANDIF_EXPR:
12425 /* Note that the operands of this must be ints
12426 and their values must be 0 or 1.
12427 ("true" is a fixed value perhaps depending on the language.) */
12428 /* If first arg is constant zero, return it. */
12429 if (integer_zerop (arg0))
12430 return fold_convert_loc (loc, type, arg0);
12431 case TRUTH_AND_EXPR:
12432 /* If either arg is constant true, drop it. */
12433 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12434 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12435 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12436 /* Preserve sequence points. */
12437 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12438 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12439 /* If second arg is constant zero, result is zero, but first arg
12440 must be evaluated. */
12441 if (integer_zerop (arg1))
12442 return omit_one_operand_loc (loc, type, arg1, arg0);
12443 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12444 case will be handled here. */
12445 if (integer_zerop (arg0))
12446 return omit_one_operand_loc (loc, type, arg0, arg1);
12448 /* !X && X is always false. */
12449 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12450 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12451 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12452 /* X && !X is always false. */
12453 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12454 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12455 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12457 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12458 means A >= Y && A != MAX, but in this case we know that
12459 A < X <= MAX. */
12461 if (!TREE_SIDE_EFFECTS (arg0)
12462 && !TREE_SIDE_EFFECTS (arg1))
12464 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12465 if (tem && !operand_equal_p (tem, arg0, 0))
12466 return fold_build2_loc (loc, code, type, tem, arg1);
12468 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12469 if (tem && !operand_equal_p (tem, arg1, 0))
12470 return fold_build2_loc (loc, code, type, arg0, tem);
12473 truth_andor:
12474 /* We only do these simplifications if we are optimizing. */
12475 if (!optimize)
12476 return NULL_TREE;
12478 /* Check for things like (A || B) && (A || C). We can convert this
12479 to A || (B && C). Note that either operator can be any of the four
12480 truth and/or operations and the transformation will still be
12481 valid. Also note that we only care about order for the
12482 ANDIF and ORIF operators. If B contains side effects, this
12483 might change the truth-value of A. */
12484 if (TREE_CODE (arg0) == TREE_CODE (arg1)
12485 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12486 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12487 || TREE_CODE (arg0) == TRUTH_AND_EXPR
12488 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12489 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12491 tree a00 = TREE_OPERAND (arg0, 0);
12492 tree a01 = TREE_OPERAND (arg0, 1);
12493 tree a10 = TREE_OPERAND (arg1, 0);
12494 tree a11 = TREE_OPERAND (arg1, 1);
12495 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12496 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12497 && (code == TRUTH_AND_EXPR
12498 || code == TRUTH_OR_EXPR));
12500 if (operand_equal_p (a00, a10, 0))
12501 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12502 fold_build2_loc (loc, code, type, a01, a11));
12503 else if (commutative && operand_equal_p (a00, a11, 0))
12504 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12505 fold_build2_loc (loc, code, type, a01, a10));
12506 else if (commutative && operand_equal_p (a01, a10, 0))
12507 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
12508 fold_build2_loc (loc, code, type, a00, a11));
12510 /* This case if tricky because we must either have commutative
12511 operators or else A10 must not have side-effects. */
12513 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12514 && operand_equal_p (a01, a11, 0))
12515 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12516 fold_build2_loc (loc, code, type, a00, a10),
12517 a01);
12520 /* See if we can build a range comparison. */
12521 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
12522 return tem;
12524 /* Check for the possibility of merging component references. If our
12525 lhs is another similar operation, try to merge its rhs with our
12526 rhs. Then try to merge our lhs and rhs. */
12527 if (TREE_CODE (arg0) == code
12528 && 0 != (tem = fold_truthop (loc, code, type,
12529 TREE_OPERAND (arg0, 1), arg1)))
12530 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12532 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
12533 return tem;
12535 return NULL_TREE;
12537 case TRUTH_ORIF_EXPR:
12538 /* Note that the operands of this must be ints
12539 and their values must be 0 or true.
12540 ("true" is a fixed value perhaps depending on the language.) */
12541 /* If first arg is constant true, return it. */
12542 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12543 return fold_convert_loc (loc, type, arg0);
12544 case TRUTH_OR_EXPR:
12545 /* If either arg is constant zero, drop it. */
12546 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12547 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12548 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12549 /* Preserve sequence points. */
12550 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12551 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12552 /* If second arg is constant true, result is true, but we must
12553 evaluate first arg. */
12554 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12555 return omit_one_operand_loc (loc, type, arg1, arg0);
12556 /* Likewise for first arg, but note this only occurs here for
12557 TRUTH_OR_EXPR. */
12558 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12559 return omit_one_operand_loc (loc, type, arg0, arg1);
12561 /* !X || X is always true. */
12562 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12563 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12564 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12565 /* X || !X is always true. */
12566 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12567 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12568 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12570 goto truth_andor;
12572 case TRUTH_XOR_EXPR:
12573 /* If the second arg is constant zero, drop it. */
12574 if (integer_zerop (arg1))
12575 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12576 /* If the second arg is constant true, this is a logical inversion. */
12577 if (integer_onep (arg1))
12579 /* Only call invert_truthvalue if operand is a truth value. */
12580 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12581 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12582 else
12583 tem = invert_truthvalue_loc (loc, arg0);
12584 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12586 /* Identical arguments cancel to zero. */
12587 if (operand_equal_p (arg0, arg1, 0))
12588 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12590 /* !X ^ X is always true. */
12591 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12592 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12593 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12595 /* X ^ !X is always true. */
12596 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12597 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12598 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12600 return NULL_TREE;
12602 case EQ_EXPR:
12603 case NE_EXPR:
12604 tem = fold_comparison (loc, code, type, op0, op1);
12605 if (tem != NULL_TREE)
12606 return tem;
12608 /* bool_var != 0 becomes bool_var. */
12609 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12610 && code == NE_EXPR)
12611 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12613 /* bool_var == 1 becomes bool_var. */
12614 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12615 && code == EQ_EXPR)
12616 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12618 /* bool_var != 1 becomes !bool_var. */
12619 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12620 && code == NE_EXPR)
12621 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12622 fold_convert_loc (loc, type, arg0));
12624 /* bool_var == 0 becomes !bool_var. */
12625 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12626 && code == EQ_EXPR)
12627 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12628 fold_convert_loc (loc, type, arg0));
12630 /* !exp != 0 becomes !exp */
12631 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12632 && code == NE_EXPR)
12633 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12635 /* If this is an equality comparison of the address of two non-weak,
12636 unaliased symbols neither of which are extern (since we do not
12637 have access to attributes for externs), then we know the result. */
12638 if (TREE_CODE (arg0) == ADDR_EXPR
12639 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12640 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12641 && ! lookup_attribute ("alias",
12642 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12643 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12644 && TREE_CODE (arg1) == ADDR_EXPR
12645 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12646 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12647 && ! lookup_attribute ("alias",
12648 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12649 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12651 /* We know that we're looking at the address of two
12652 non-weak, unaliased, static _DECL nodes.
12654 It is both wasteful and incorrect to call operand_equal_p
12655 to compare the two ADDR_EXPR nodes. It is wasteful in that
12656 all we need to do is test pointer equality for the arguments
12657 to the two ADDR_EXPR nodes. It is incorrect to use
12658 operand_equal_p as that function is NOT equivalent to a
12659 C equality test. It can in fact return false for two
12660 objects which would test as equal using the C equality
12661 operator. */
12662 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12663 return constant_boolean_node (equal
12664 ? code == EQ_EXPR : code != EQ_EXPR,
12665 type);
12668 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12669 a MINUS_EXPR of a constant, we can convert it into a comparison with
12670 a revised constant as long as no overflow occurs. */
12671 if (TREE_CODE (arg1) == INTEGER_CST
12672 && (TREE_CODE (arg0) == PLUS_EXPR
12673 || TREE_CODE (arg0) == MINUS_EXPR)
12674 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12675 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12676 ? MINUS_EXPR : PLUS_EXPR,
12677 fold_convert_loc (loc, TREE_TYPE (arg0),
12678 arg1),
12679 TREE_OPERAND (arg0, 1), 0))
12680 && !TREE_OVERFLOW (tem))
12681 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12683 /* Similarly for a NEGATE_EXPR. */
12684 if (TREE_CODE (arg0) == NEGATE_EXPR
12685 && TREE_CODE (arg1) == INTEGER_CST
12686 && 0 != (tem = negate_expr (arg1))
12687 && TREE_CODE (tem) == INTEGER_CST
12688 && !TREE_OVERFLOW (tem))
12689 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12691 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12692 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12693 && TREE_CODE (arg1) == INTEGER_CST
12694 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12695 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12696 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12697 fold_convert_loc (loc,
12698 TREE_TYPE (arg0),
12699 arg1),
12700 TREE_OPERAND (arg0, 1)));
12702 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12703 if ((TREE_CODE (arg0) == PLUS_EXPR
12704 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12705 || TREE_CODE (arg0) == MINUS_EXPR)
12706 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12707 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12708 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12710 tree val = TREE_OPERAND (arg0, 1);
12711 return omit_two_operands_loc (loc, type,
12712 fold_build2_loc (loc, code, type,
12713 val,
12714 build_int_cst (TREE_TYPE (val),
12715 0)),
12716 TREE_OPERAND (arg0, 0), arg1);
12719 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12720 if (TREE_CODE (arg0) == MINUS_EXPR
12721 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12722 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12723 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12725 return omit_two_operands_loc (loc, type,
12726 code == NE_EXPR
12727 ? boolean_true_node : boolean_false_node,
12728 TREE_OPERAND (arg0, 1), arg1);
12731 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12732 for !=. Don't do this for ordered comparisons due to overflow. */
12733 if (TREE_CODE (arg0) == MINUS_EXPR
12734 && integer_zerop (arg1))
12735 return fold_build2_loc (loc, code, type,
12736 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12738 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12739 if (TREE_CODE (arg0) == ABS_EXPR
12740 && (integer_zerop (arg1) || real_zerop (arg1)))
12741 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12743 /* If this is an EQ or NE comparison with zero and ARG0 is
12744 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12745 two operations, but the latter can be done in one less insn
12746 on machines that have only two-operand insns or on which a
12747 constant cannot be the first operand. */
12748 if (TREE_CODE (arg0) == BIT_AND_EXPR
12749 && integer_zerop (arg1))
12751 tree arg00 = TREE_OPERAND (arg0, 0);
12752 tree arg01 = TREE_OPERAND (arg0, 1);
12753 if (TREE_CODE (arg00) == LSHIFT_EXPR
12754 && integer_onep (TREE_OPERAND (arg00, 0)))
12756 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12757 arg01, TREE_OPERAND (arg00, 1));
12758 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12759 build_int_cst (TREE_TYPE (arg0), 1));
12760 return fold_build2_loc (loc, code, type,
12761 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12762 arg1);
12764 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12765 && integer_onep (TREE_OPERAND (arg01, 0)))
12767 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12768 arg00, TREE_OPERAND (arg01, 1));
12769 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12770 build_int_cst (TREE_TYPE (arg0), 1));
12771 return fold_build2_loc (loc, code, type,
12772 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12773 arg1);
12777 /* If this is an NE or EQ comparison of zero against the result of a
12778 signed MOD operation whose second operand is a power of 2, make
12779 the MOD operation unsigned since it is simpler and equivalent. */
12780 if (integer_zerop (arg1)
12781 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12782 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12783 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12784 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12785 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12786 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12788 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12789 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12790 fold_convert_loc (loc, newtype,
12791 TREE_OPERAND (arg0, 0)),
12792 fold_convert_loc (loc, newtype,
12793 TREE_OPERAND (arg0, 1)));
12795 return fold_build2_loc (loc, code, type, newmod,
12796 fold_convert_loc (loc, newtype, arg1));
12799 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12800 C1 is a valid shift constant, and C2 is a power of two, i.e.
12801 a single bit. */
12802 if (TREE_CODE (arg0) == BIT_AND_EXPR
12803 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12804 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12805 == INTEGER_CST
12806 && integer_pow2p (TREE_OPERAND (arg0, 1))
12807 && integer_zerop (arg1))
12809 tree itype = TREE_TYPE (arg0);
12810 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12811 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12813 /* Check for a valid shift count. */
12814 if (TREE_INT_CST_HIGH (arg001) == 0
12815 && TREE_INT_CST_LOW (arg001) < prec)
12817 tree arg01 = TREE_OPERAND (arg0, 1);
12818 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12819 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12820 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12821 can be rewritten as (X & (C2 << C1)) != 0. */
12822 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12824 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12825 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12826 return fold_build2_loc (loc, code, type, tem, arg1);
12828 /* Otherwise, for signed (arithmetic) shifts,
12829 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12830 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12831 else if (!TYPE_UNSIGNED (itype))
12832 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12833 arg000, build_int_cst (itype, 0));
12834 /* Otherwise, of unsigned (logical) shifts,
12835 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12836 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12837 else
12838 return omit_one_operand_loc (loc, type,
12839 code == EQ_EXPR ? integer_one_node
12840 : integer_zero_node,
12841 arg000);
12845 /* If this is an NE comparison of zero with an AND of one, remove the
12846 comparison since the AND will give the correct value. */
12847 if (code == NE_EXPR
12848 && integer_zerop (arg1)
12849 && TREE_CODE (arg0) == BIT_AND_EXPR
12850 && integer_onep (TREE_OPERAND (arg0, 1)))
12851 return fold_convert_loc (loc, type, arg0);
12853 /* If we have (A & C) == C where C is a power of 2, convert this into
12854 (A & C) != 0. Similarly for NE_EXPR. */
12855 if (TREE_CODE (arg0) == BIT_AND_EXPR
12856 && integer_pow2p (TREE_OPERAND (arg0, 1))
12857 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12858 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12859 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12860 integer_zero_node));
12862 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12863 bit, then fold the expression into A < 0 or A >= 0. */
12864 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12865 if (tem)
12866 return tem;
12868 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12869 Similarly for NE_EXPR. */
12870 if (TREE_CODE (arg0) == BIT_AND_EXPR
12871 && TREE_CODE (arg1) == INTEGER_CST
12872 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12874 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12875 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12876 TREE_OPERAND (arg0, 1));
12877 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12878 arg1, notc);
12879 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12880 if (integer_nonzerop (dandnotc))
12881 return omit_one_operand_loc (loc, type, rslt, arg0);
12884 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12885 Similarly for NE_EXPR. */
12886 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12887 && TREE_CODE (arg1) == INTEGER_CST
12888 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12890 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12891 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12892 TREE_OPERAND (arg0, 1), notd);
12893 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12894 if (integer_nonzerop (candnotd))
12895 return omit_one_operand_loc (loc, type, rslt, arg0);
12898 /* If this is a comparison of a field, we may be able to simplify it. */
12899 if ((TREE_CODE (arg0) == COMPONENT_REF
12900 || TREE_CODE (arg0) == BIT_FIELD_REF)
12901 /* Handle the constant case even without -O
12902 to make sure the warnings are given. */
12903 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12905 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12906 if (t1)
12907 return t1;
12910 /* Optimize comparisons of strlen vs zero to a compare of the
12911 first character of the string vs zero. To wit,
12912 strlen(ptr) == 0 => *ptr == 0
12913 strlen(ptr) != 0 => *ptr != 0
12914 Other cases should reduce to one of these two (or a constant)
12915 due to the return value of strlen being unsigned. */
12916 if (TREE_CODE (arg0) == CALL_EXPR
12917 && integer_zerop (arg1))
12919 tree fndecl = get_callee_fndecl (arg0);
12921 if (fndecl
12922 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12923 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12924 && call_expr_nargs (arg0) == 1
12925 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12927 tree iref = build_fold_indirect_ref_loc (loc,
12928 CALL_EXPR_ARG (arg0, 0));
12929 return fold_build2_loc (loc, code, type, iref,
12930 build_int_cst (TREE_TYPE (iref), 0));
12934 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12935 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12936 if (TREE_CODE (arg0) == RSHIFT_EXPR
12937 && integer_zerop (arg1)
12938 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12940 tree arg00 = TREE_OPERAND (arg0, 0);
12941 tree arg01 = TREE_OPERAND (arg0, 1);
12942 tree itype = TREE_TYPE (arg00);
12943 if (TREE_INT_CST_HIGH (arg01) == 0
12944 && TREE_INT_CST_LOW (arg01)
12945 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12947 if (TYPE_UNSIGNED (itype))
12949 itype = signed_type_for (itype);
12950 arg00 = fold_convert_loc (loc, itype, arg00);
12952 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12953 type, arg00, build_int_cst (itype, 0));
12957 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12958 if (integer_zerop (arg1)
12959 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12960 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12961 TREE_OPERAND (arg0, 1));
12963 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12964 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12965 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12966 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12967 build_int_cst (TREE_TYPE (arg1), 0));
12968 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12969 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12970 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12971 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12972 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12973 build_int_cst (TREE_TYPE (arg1), 0));
12975 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12976 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12977 && TREE_CODE (arg1) == INTEGER_CST
12978 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12979 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12980 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12981 TREE_OPERAND (arg0, 1), arg1));
12983 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12984 (X & C) == 0 when C is a single bit. */
12985 if (TREE_CODE (arg0) == BIT_AND_EXPR
12986 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12987 && integer_zerop (arg1)
12988 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12990 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12991 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12992 TREE_OPERAND (arg0, 1));
12993 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12994 type, tem, arg1);
12997 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12998 constant C is a power of two, i.e. a single bit. */
12999 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13000 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13001 && integer_zerop (arg1)
13002 && integer_pow2p (TREE_OPERAND (arg0, 1))
13003 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13004 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13006 tree arg00 = TREE_OPERAND (arg0, 0);
13007 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13008 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13011 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13012 when is C is a power of two, i.e. a single bit. */
13013 if (TREE_CODE (arg0) == BIT_AND_EXPR
13014 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13015 && integer_zerop (arg1)
13016 && integer_pow2p (TREE_OPERAND (arg0, 1))
13017 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13018 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13020 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13021 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13022 arg000, TREE_OPERAND (arg0, 1));
13023 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13024 tem, build_int_cst (TREE_TYPE (tem), 0));
13027 if (integer_zerop (arg1)
13028 && tree_expr_nonzero_p (arg0))
13030 tree res = constant_boolean_node (code==NE_EXPR, type);
13031 return omit_one_operand_loc (loc, type, res, arg0);
13034 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13035 if (TREE_CODE (arg0) == NEGATE_EXPR
13036 && TREE_CODE (arg1) == NEGATE_EXPR)
13037 return fold_build2_loc (loc, code, type,
13038 TREE_OPERAND (arg0, 0),
13039 TREE_OPERAND (arg1, 0));
13041 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13042 if (TREE_CODE (arg0) == BIT_AND_EXPR
13043 && TREE_CODE (arg1) == BIT_AND_EXPR)
13045 tree arg00 = TREE_OPERAND (arg0, 0);
13046 tree arg01 = TREE_OPERAND (arg0, 1);
13047 tree arg10 = TREE_OPERAND (arg1, 0);
13048 tree arg11 = TREE_OPERAND (arg1, 1);
13049 tree itype = TREE_TYPE (arg0);
13051 if (operand_equal_p (arg01, arg11, 0))
13052 return fold_build2_loc (loc, code, type,
13053 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13054 fold_build2_loc (loc,
13055 BIT_XOR_EXPR, itype,
13056 arg00, arg10),
13057 arg01),
13058 build_int_cst (itype, 0));
13060 if (operand_equal_p (arg01, arg10, 0))
13061 return fold_build2_loc (loc, code, type,
13062 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13063 fold_build2_loc (loc,
13064 BIT_XOR_EXPR, itype,
13065 arg00, arg11),
13066 arg01),
13067 build_int_cst (itype, 0));
13069 if (operand_equal_p (arg00, arg11, 0))
13070 return fold_build2_loc (loc, code, type,
13071 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13072 fold_build2_loc (loc,
13073 BIT_XOR_EXPR, itype,
13074 arg01, arg10),
13075 arg00),
13076 build_int_cst (itype, 0));
13078 if (operand_equal_p (arg00, arg10, 0))
13079 return fold_build2_loc (loc, code, type,
13080 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13081 fold_build2_loc (loc,
13082 BIT_XOR_EXPR, itype,
13083 arg01, arg11),
13084 arg00),
13085 build_int_cst (itype, 0));
13088 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13089 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13091 tree arg00 = TREE_OPERAND (arg0, 0);
13092 tree arg01 = TREE_OPERAND (arg0, 1);
13093 tree arg10 = TREE_OPERAND (arg1, 0);
13094 tree arg11 = TREE_OPERAND (arg1, 1);
13095 tree itype = TREE_TYPE (arg0);
13097 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13098 operand_equal_p guarantees no side-effects so we don't need
13099 to use omit_one_operand on Z. */
13100 if (operand_equal_p (arg01, arg11, 0))
13101 return fold_build2_loc (loc, code, type, arg00, arg10);
13102 if (operand_equal_p (arg01, arg10, 0))
13103 return fold_build2_loc (loc, code, type, arg00, arg11);
13104 if (operand_equal_p (arg00, arg11, 0))
13105 return fold_build2_loc (loc, code, type, arg01, arg10);
13106 if (operand_equal_p (arg00, arg10, 0))
13107 return fold_build2_loc (loc, code, type, arg01, arg11);
13109 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13110 if (TREE_CODE (arg01) == INTEGER_CST
13111 && TREE_CODE (arg11) == INTEGER_CST)
13112 return fold_build2_loc (loc, code, type,
13113 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
13114 fold_build2_loc (loc,
13115 BIT_XOR_EXPR, itype,
13116 arg01, arg11)),
13117 arg10);
13120 /* Attempt to simplify equality/inequality comparisons of complex
13121 values. Only lower the comparison if the result is known or
13122 can be simplified to a single scalar comparison. */
13123 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13124 || TREE_CODE (arg0) == COMPLEX_CST)
13125 && (TREE_CODE (arg1) == COMPLEX_EXPR
13126 || TREE_CODE (arg1) == COMPLEX_CST))
13128 tree real0, imag0, real1, imag1;
13129 tree rcond, icond;
13131 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13133 real0 = TREE_OPERAND (arg0, 0);
13134 imag0 = TREE_OPERAND (arg0, 1);
13136 else
13138 real0 = TREE_REALPART (arg0);
13139 imag0 = TREE_IMAGPART (arg0);
13142 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13144 real1 = TREE_OPERAND (arg1, 0);
13145 imag1 = TREE_OPERAND (arg1, 1);
13147 else
13149 real1 = TREE_REALPART (arg1);
13150 imag1 = TREE_IMAGPART (arg1);
13153 rcond = fold_binary_loc (loc, code, type, real0, real1);
13154 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13156 if (integer_zerop (rcond))
13158 if (code == EQ_EXPR)
13159 return omit_two_operands_loc (loc, type, boolean_false_node,
13160 imag0, imag1);
13161 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13163 else
13165 if (code == NE_EXPR)
13166 return omit_two_operands_loc (loc, type, boolean_true_node,
13167 imag0, imag1);
13168 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13172 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13173 if (icond && TREE_CODE (icond) == INTEGER_CST)
13175 if (integer_zerop (icond))
13177 if (code == EQ_EXPR)
13178 return omit_two_operands_loc (loc, type, boolean_false_node,
13179 real0, real1);
13180 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13182 else
13184 if (code == NE_EXPR)
13185 return omit_two_operands_loc (loc, type, boolean_true_node,
13186 real0, real1);
13187 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13192 return NULL_TREE;
13194 case LT_EXPR:
13195 case GT_EXPR:
13196 case LE_EXPR:
13197 case GE_EXPR:
13198 tem = fold_comparison (loc, code, type, op0, op1);
13199 if (tem != NULL_TREE)
13200 return tem;
13202 /* Transform comparisons of the form X +- C CMP X. */
13203 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13204 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13205 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13206 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13207 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13208 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13210 tree arg01 = TREE_OPERAND (arg0, 1);
13211 enum tree_code code0 = TREE_CODE (arg0);
13212 int is_positive;
13214 if (TREE_CODE (arg01) == REAL_CST)
13215 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13216 else
13217 is_positive = tree_int_cst_sgn (arg01);
13219 /* (X - c) > X becomes false. */
13220 if (code == GT_EXPR
13221 && ((code0 == MINUS_EXPR && is_positive >= 0)
13222 || (code0 == PLUS_EXPR && is_positive <= 0)))
13224 if (TREE_CODE (arg01) == INTEGER_CST
13225 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13226 fold_overflow_warning (("assuming signed overflow does not "
13227 "occur when assuming that (X - c) > X "
13228 "is always false"),
13229 WARN_STRICT_OVERFLOW_ALL);
13230 return constant_boolean_node (0, type);
13233 /* Likewise (X + c) < X becomes false. */
13234 if (code == LT_EXPR
13235 && ((code0 == PLUS_EXPR && is_positive >= 0)
13236 || (code0 == MINUS_EXPR && is_positive <= 0)))
13238 if (TREE_CODE (arg01) == INTEGER_CST
13239 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13240 fold_overflow_warning (("assuming signed overflow does not "
13241 "occur when assuming that "
13242 "(X + c) < X is always false"),
13243 WARN_STRICT_OVERFLOW_ALL);
13244 return constant_boolean_node (0, type);
13247 /* Convert (X - c) <= X to true. */
13248 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13249 && code == LE_EXPR
13250 && ((code0 == MINUS_EXPR && is_positive >= 0)
13251 || (code0 == PLUS_EXPR && is_positive <= 0)))
13253 if (TREE_CODE (arg01) == INTEGER_CST
13254 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13255 fold_overflow_warning (("assuming signed overflow does not "
13256 "occur when assuming that "
13257 "(X - c) <= X is always true"),
13258 WARN_STRICT_OVERFLOW_ALL);
13259 return constant_boolean_node (1, type);
13262 /* Convert (X + c) >= X to true. */
13263 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13264 && code == GE_EXPR
13265 && ((code0 == PLUS_EXPR && is_positive >= 0)
13266 || (code0 == MINUS_EXPR && is_positive <= 0)))
13268 if (TREE_CODE (arg01) == INTEGER_CST
13269 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13270 fold_overflow_warning (("assuming signed overflow does not "
13271 "occur when assuming that "
13272 "(X + c) >= X is always true"),
13273 WARN_STRICT_OVERFLOW_ALL);
13274 return constant_boolean_node (1, type);
13277 if (TREE_CODE (arg01) == INTEGER_CST)
13279 /* Convert X + c > X and X - c < X to true for integers. */
13280 if (code == GT_EXPR
13281 && ((code0 == PLUS_EXPR && is_positive > 0)
13282 || (code0 == MINUS_EXPR && is_positive < 0)))
13284 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13285 fold_overflow_warning (("assuming signed overflow does "
13286 "not occur when assuming that "
13287 "(X + c) > X is always true"),
13288 WARN_STRICT_OVERFLOW_ALL);
13289 return constant_boolean_node (1, type);
13292 if (code == LT_EXPR
13293 && ((code0 == MINUS_EXPR && is_positive > 0)
13294 || (code0 == PLUS_EXPR && is_positive < 0)))
13296 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13297 fold_overflow_warning (("assuming signed overflow does "
13298 "not occur when assuming that "
13299 "(X - c) < X is always true"),
13300 WARN_STRICT_OVERFLOW_ALL);
13301 return constant_boolean_node (1, type);
13304 /* Convert X + c <= X and X - c >= X to false for integers. */
13305 if (code == LE_EXPR
13306 && ((code0 == PLUS_EXPR && is_positive > 0)
13307 || (code0 == MINUS_EXPR && is_positive < 0)))
13309 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13310 fold_overflow_warning (("assuming signed overflow does "
13311 "not occur when assuming that "
13312 "(X + c) <= X is always false"),
13313 WARN_STRICT_OVERFLOW_ALL);
13314 return constant_boolean_node (0, type);
13317 if (code == GE_EXPR
13318 && ((code0 == MINUS_EXPR && is_positive > 0)
13319 || (code0 == PLUS_EXPR && is_positive < 0)))
13321 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13322 fold_overflow_warning (("assuming signed overflow does "
13323 "not occur when assuming that "
13324 "(X - c) >= X is always false"),
13325 WARN_STRICT_OVERFLOW_ALL);
13326 return constant_boolean_node (0, type);
13331 /* Comparisons with the highest or lowest possible integer of
13332 the specified precision will have known values. */
13334 tree arg1_type = TREE_TYPE (arg1);
13335 unsigned int width = TYPE_PRECISION (arg1_type);
13337 if (TREE_CODE (arg1) == INTEGER_CST
13338 && width <= 2 * HOST_BITS_PER_WIDE_INT
13339 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13341 HOST_WIDE_INT signed_max_hi;
13342 unsigned HOST_WIDE_INT signed_max_lo;
13343 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13345 if (width <= HOST_BITS_PER_WIDE_INT)
13347 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13348 - 1;
13349 signed_max_hi = 0;
13350 max_hi = 0;
13352 if (TYPE_UNSIGNED (arg1_type))
13354 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13355 min_lo = 0;
13356 min_hi = 0;
13358 else
13360 max_lo = signed_max_lo;
13361 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13362 min_hi = -1;
13365 else
13367 width -= HOST_BITS_PER_WIDE_INT;
13368 signed_max_lo = -1;
13369 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13370 - 1;
13371 max_lo = -1;
13372 min_lo = 0;
13374 if (TYPE_UNSIGNED (arg1_type))
13376 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13377 min_hi = 0;
13379 else
13381 max_hi = signed_max_hi;
13382 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13386 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13387 && TREE_INT_CST_LOW (arg1) == max_lo)
13388 switch (code)
13390 case GT_EXPR:
13391 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13393 case GE_EXPR:
13394 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13396 case LE_EXPR:
13397 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13399 case LT_EXPR:
13400 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13402 /* The GE_EXPR and LT_EXPR cases above are not normally
13403 reached because of previous transformations. */
13405 default:
13406 break;
13408 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13409 == max_hi
13410 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13411 switch (code)
13413 case GT_EXPR:
13414 arg1 = const_binop (PLUS_EXPR, arg1,
13415 build_int_cst (TREE_TYPE (arg1), 1), 0);
13416 return fold_build2_loc (loc, EQ_EXPR, type,
13417 fold_convert_loc (loc,
13418 TREE_TYPE (arg1), arg0),
13419 arg1);
13420 case LE_EXPR:
13421 arg1 = const_binop (PLUS_EXPR, arg1,
13422 build_int_cst (TREE_TYPE (arg1), 1), 0);
13423 return fold_build2_loc (loc, NE_EXPR, type,
13424 fold_convert_loc (loc, TREE_TYPE (arg1),
13425 arg0),
13426 arg1);
13427 default:
13428 break;
13430 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13431 == min_hi
13432 && TREE_INT_CST_LOW (arg1) == min_lo)
13433 switch (code)
13435 case LT_EXPR:
13436 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13438 case LE_EXPR:
13439 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13441 case GE_EXPR:
13442 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13444 case GT_EXPR:
13445 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13447 default:
13448 break;
13450 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13451 == min_hi
13452 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13453 switch (code)
13455 case GE_EXPR:
13456 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13457 return fold_build2_loc (loc, NE_EXPR, type,
13458 fold_convert_loc (loc,
13459 TREE_TYPE (arg1), arg0),
13460 arg1);
13461 case LT_EXPR:
13462 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13463 return fold_build2_loc (loc, EQ_EXPR, type,
13464 fold_convert_loc (loc, TREE_TYPE (arg1),
13465 arg0),
13466 arg1);
13467 default:
13468 break;
13471 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13472 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13473 && TYPE_UNSIGNED (arg1_type)
13474 /* We will flip the signedness of the comparison operator
13475 associated with the mode of arg1, so the sign bit is
13476 specified by this mode. Check that arg1 is the signed
13477 max associated with this sign bit. */
13478 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13479 /* signed_type does not work on pointer types. */
13480 && INTEGRAL_TYPE_P (arg1_type))
13482 /* The following case also applies to X < signed_max+1
13483 and X >= signed_max+1 because previous transformations. */
13484 if (code == LE_EXPR || code == GT_EXPR)
13486 tree st;
13487 st = signed_type_for (TREE_TYPE (arg1));
13488 return fold_build2_loc (loc,
13489 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13490 type, fold_convert_loc (loc, st, arg0),
13491 build_int_cst (st, 0));
13497 /* If we are comparing an ABS_EXPR with a constant, we can
13498 convert all the cases into explicit comparisons, but they may
13499 well not be faster than doing the ABS and one comparison.
13500 But ABS (X) <= C is a range comparison, which becomes a subtraction
13501 and a comparison, and is probably faster. */
13502 if (code == LE_EXPR
13503 && TREE_CODE (arg1) == INTEGER_CST
13504 && TREE_CODE (arg0) == ABS_EXPR
13505 && ! TREE_SIDE_EFFECTS (arg0)
13506 && (0 != (tem = negate_expr (arg1)))
13507 && TREE_CODE (tem) == INTEGER_CST
13508 && !TREE_OVERFLOW (tem))
13509 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13510 build2 (GE_EXPR, type,
13511 TREE_OPERAND (arg0, 0), tem),
13512 build2 (LE_EXPR, type,
13513 TREE_OPERAND (arg0, 0), arg1));
13515 /* Convert ABS_EXPR<x> >= 0 to true. */
13516 strict_overflow_p = false;
13517 if (code == GE_EXPR
13518 && (integer_zerop (arg1)
13519 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13520 && real_zerop (arg1)))
13521 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13523 if (strict_overflow_p)
13524 fold_overflow_warning (("assuming signed overflow does not occur "
13525 "when simplifying comparison of "
13526 "absolute value and zero"),
13527 WARN_STRICT_OVERFLOW_CONDITIONAL);
13528 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13531 /* Convert ABS_EXPR<x> < 0 to false. */
13532 strict_overflow_p = false;
13533 if (code == LT_EXPR
13534 && (integer_zerop (arg1) || real_zerop (arg1))
13535 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13537 if (strict_overflow_p)
13538 fold_overflow_warning (("assuming signed overflow does not occur "
13539 "when simplifying comparison of "
13540 "absolute value and zero"),
13541 WARN_STRICT_OVERFLOW_CONDITIONAL);
13542 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13545 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13546 and similarly for >= into !=. */
13547 if ((code == LT_EXPR || code == GE_EXPR)
13548 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13549 && TREE_CODE (arg1) == LSHIFT_EXPR
13550 && integer_onep (TREE_OPERAND (arg1, 0)))
13552 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13553 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13554 TREE_OPERAND (arg1, 1)),
13555 build_int_cst (TREE_TYPE (arg0), 0));
13556 goto fold_binary_exit;
13559 if ((code == LT_EXPR || code == GE_EXPR)
13560 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13561 && CONVERT_EXPR_P (arg1)
13562 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13563 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13565 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13566 fold_convert_loc (loc, TREE_TYPE (arg0),
13567 build2 (RSHIFT_EXPR,
13568 TREE_TYPE (arg0), arg0,
13569 TREE_OPERAND (TREE_OPERAND (arg1, 0),
13570 1))),
13571 build_int_cst (TREE_TYPE (arg0), 0));
13572 goto fold_binary_exit;
13575 return NULL_TREE;
13577 case UNORDERED_EXPR:
13578 case ORDERED_EXPR:
13579 case UNLT_EXPR:
13580 case UNLE_EXPR:
13581 case UNGT_EXPR:
13582 case UNGE_EXPR:
13583 case UNEQ_EXPR:
13584 case LTGT_EXPR:
13585 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13587 t1 = fold_relational_const (code, type, arg0, arg1);
13588 if (t1 != NULL_TREE)
13589 return t1;
13592 /* If the first operand is NaN, the result is constant. */
13593 if (TREE_CODE (arg0) == REAL_CST
13594 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13595 && (code != LTGT_EXPR || ! flag_trapping_math))
13597 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13598 ? integer_zero_node
13599 : integer_one_node;
13600 return omit_one_operand_loc (loc, type, t1, arg1);
13603 /* If the second operand is NaN, the result is constant. */
13604 if (TREE_CODE (arg1) == REAL_CST
13605 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13606 && (code != LTGT_EXPR || ! flag_trapping_math))
13608 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13609 ? integer_zero_node
13610 : integer_one_node;
13611 return omit_one_operand_loc (loc, type, t1, arg0);
13614 /* Simplify unordered comparison of something with itself. */
13615 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13616 && operand_equal_p (arg0, arg1, 0))
13617 return constant_boolean_node (1, type);
13619 if (code == LTGT_EXPR
13620 && !flag_trapping_math
13621 && operand_equal_p (arg0, arg1, 0))
13622 return constant_boolean_node (0, type);
13624 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13626 tree targ0 = strip_float_extensions (arg0);
13627 tree targ1 = strip_float_extensions (arg1);
13628 tree newtype = TREE_TYPE (targ0);
13630 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13631 newtype = TREE_TYPE (targ1);
13633 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13634 return fold_build2_loc (loc, code, type,
13635 fold_convert_loc (loc, newtype, targ0),
13636 fold_convert_loc (loc, newtype, targ1));
13639 return NULL_TREE;
13641 case COMPOUND_EXPR:
13642 /* When pedantic, a compound expression can be neither an lvalue
13643 nor an integer constant expression. */
13644 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13645 return NULL_TREE;
13646 /* Don't let (0, 0) be null pointer constant. */
13647 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13648 : fold_convert_loc (loc, type, arg1);
13649 return pedantic_non_lvalue_loc (loc, tem);
13651 case COMPLEX_EXPR:
13652 if ((TREE_CODE (arg0) == REAL_CST
13653 && TREE_CODE (arg1) == REAL_CST)
13654 || (TREE_CODE (arg0) == INTEGER_CST
13655 && TREE_CODE (arg1) == INTEGER_CST))
13656 return build_complex (type, arg0, arg1);
13657 return NULL_TREE;
13659 case ASSERT_EXPR:
13660 /* An ASSERT_EXPR should never be passed to fold_binary. */
13661 gcc_unreachable ();
13663 default:
13664 return NULL_TREE;
13665 } /* switch (code) */
13666 fold_binary_exit:
13667 protected_set_expr_location (tem, loc);
13668 return tem;
13671 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13672 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13673 of GOTO_EXPR. */
13675 static tree
13676 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13678 switch (TREE_CODE (*tp))
13680 case LABEL_EXPR:
13681 return *tp;
13683 case GOTO_EXPR:
13684 *walk_subtrees = 0;
13686 /* ... fall through ... */
13688 default:
13689 return NULL_TREE;
13693 /* Return whether the sub-tree ST contains a label which is accessible from
13694 outside the sub-tree. */
13696 static bool
13697 contains_label_p (tree st)
13699 return
13700 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13703 /* Fold a ternary expression of code CODE and type TYPE with operands
13704 OP0, OP1, and OP2. Return the folded expression if folding is
13705 successful. Otherwise, return NULL_TREE. */
13707 tree
13708 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13709 tree op0, tree op1, tree op2)
13711 tree tem;
13712 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13713 enum tree_code_class kind = TREE_CODE_CLASS (code);
13715 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13716 && TREE_CODE_LENGTH (code) == 3);
13718 /* Strip any conversions that don't change the mode. This is safe
13719 for every expression, except for a comparison expression because
13720 its signedness is derived from its operands. So, in the latter
13721 case, only strip conversions that don't change the signedness.
13723 Note that this is done as an internal manipulation within the
13724 constant folder, in order to find the simplest representation of
13725 the arguments so that their form can be studied. In any cases,
13726 the appropriate type conversions should be put back in the tree
13727 that will get out of the constant folder. */
13728 if (op0)
13730 arg0 = op0;
13731 STRIP_NOPS (arg0);
13734 if (op1)
13736 arg1 = op1;
13737 STRIP_NOPS (arg1);
13740 switch (code)
13742 case COMPONENT_REF:
13743 if (TREE_CODE (arg0) == CONSTRUCTOR
13744 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13746 unsigned HOST_WIDE_INT idx;
13747 tree field, value;
13748 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13749 if (field == arg1)
13750 return value;
13752 return NULL_TREE;
13754 case COND_EXPR:
13755 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13756 so all simple results must be passed through pedantic_non_lvalue. */
13757 if (TREE_CODE (arg0) == INTEGER_CST)
13759 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13760 tem = integer_zerop (arg0) ? op2 : op1;
13761 /* Only optimize constant conditions when the selected branch
13762 has the same type as the COND_EXPR. This avoids optimizing
13763 away "c ? x : throw", where the throw has a void type.
13764 Avoid throwing away that operand which contains label. */
13765 if ((!TREE_SIDE_EFFECTS (unused_op)
13766 || !contains_label_p (unused_op))
13767 && (! VOID_TYPE_P (TREE_TYPE (tem))
13768 || VOID_TYPE_P (type)))
13769 return pedantic_non_lvalue_loc (loc, tem);
13770 return NULL_TREE;
13772 if (operand_equal_p (arg1, op2, 0))
13773 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13775 /* If we have A op B ? A : C, we may be able to convert this to a
13776 simpler expression, depending on the operation and the values
13777 of B and C. Signed zeros prevent all of these transformations,
13778 for reasons given above each one.
13780 Also try swapping the arguments and inverting the conditional. */
13781 if (COMPARISON_CLASS_P (arg0)
13782 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13783 arg1, TREE_OPERAND (arg0, 1))
13784 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13786 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13787 if (tem)
13788 return tem;
13791 if (COMPARISON_CLASS_P (arg0)
13792 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13793 op2,
13794 TREE_OPERAND (arg0, 1))
13795 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13797 tem = fold_truth_not_expr (loc, arg0);
13798 if (tem && COMPARISON_CLASS_P (tem))
13800 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13801 if (tem)
13802 return tem;
13806 /* If the second operand is simpler than the third, swap them
13807 since that produces better jump optimization results. */
13808 if (truth_value_p (TREE_CODE (arg0))
13809 && tree_swap_operands_p (op1, op2, false))
13811 /* See if this can be inverted. If it can't, possibly because
13812 it was a floating-point inequality comparison, don't do
13813 anything. */
13814 tem = fold_truth_not_expr (loc, arg0);
13815 if (tem)
13816 return fold_build3_loc (loc, code, type, tem, op2, op1);
13819 /* Convert A ? 1 : 0 to simply A. */
13820 if (integer_onep (op1)
13821 && integer_zerop (op2)
13822 /* If we try to convert OP0 to our type, the
13823 call to fold will try to move the conversion inside
13824 a COND, which will recurse. In that case, the COND_EXPR
13825 is probably the best choice, so leave it alone. */
13826 && type == TREE_TYPE (arg0))
13827 return pedantic_non_lvalue_loc (loc, arg0);
13829 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13830 over COND_EXPR in cases such as floating point comparisons. */
13831 if (integer_zerop (op1)
13832 && integer_onep (op2)
13833 && truth_value_p (TREE_CODE (arg0)))
13834 return pedantic_non_lvalue_loc (loc,
13835 fold_convert_loc (loc, type,
13836 invert_truthvalue_loc (loc,
13837 arg0)));
13839 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13840 if (TREE_CODE (arg0) == LT_EXPR
13841 && integer_zerop (TREE_OPERAND (arg0, 1))
13842 && integer_zerop (op2)
13843 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13845 /* sign_bit_p only checks ARG1 bits within A's precision.
13846 If <sign bit of A> has wider type than A, bits outside
13847 of A's precision in <sign bit of A> need to be checked.
13848 If they are all 0, this optimization needs to be done
13849 in unsigned A's type, if they are all 1 in signed A's type,
13850 otherwise this can't be done. */
13851 if (TYPE_PRECISION (TREE_TYPE (tem))
13852 < TYPE_PRECISION (TREE_TYPE (arg1))
13853 && TYPE_PRECISION (TREE_TYPE (tem))
13854 < TYPE_PRECISION (type))
13856 unsigned HOST_WIDE_INT mask_lo;
13857 HOST_WIDE_INT mask_hi;
13858 int inner_width, outer_width;
13859 tree tem_type;
13861 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13862 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13863 if (outer_width > TYPE_PRECISION (type))
13864 outer_width = TYPE_PRECISION (type);
13866 if (outer_width > HOST_BITS_PER_WIDE_INT)
13868 mask_hi = ((unsigned HOST_WIDE_INT) -1
13869 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13870 mask_lo = -1;
13872 else
13874 mask_hi = 0;
13875 mask_lo = ((unsigned HOST_WIDE_INT) -1
13876 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13878 if (inner_width > HOST_BITS_PER_WIDE_INT)
13880 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13881 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13882 mask_lo = 0;
13884 else
13885 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13886 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13888 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13889 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13891 tem_type = signed_type_for (TREE_TYPE (tem));
13892 tem = fold_convert_loc (loc, tem_type, tem);
13894 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13895 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13897 tem_type = unsigned_type_for (TREE_TYPE (tem));
13898 tem = fold_convert_loc (loc, tem_type, tem);
13900 else
13901 tem = NULL;
13904 if (tem)
13905 return
13906 fold_convert_loc (loc, type,
13907 fold_build2_loc (loc, BIT_AND_EXPR,
13908 TREE_TYPE (tem), tem,
13909 fold_convert_loc (loc,
13910 TREE_TYPE (tem),
13911 arg1)));
13914 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13915 already handled above. */
13916 if (TREE_CODE (arg0) == BIT_AND_EXPR
13917 && integer_onep (TREE_OPERAND (arg0, 1))
13918 && integer_zerop (op2)
13919 && integer_pow2p (arg1))
13921 tree tem = TREE_OPERAND (arg0, 0);
13922 STRIP_NOPS (tem);
13923 if (TREE_CODE (tem) == RSHIFT_EXPR
13924 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13925 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13926 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13927 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13928 TREE_OPERAND (tem, 0), arg1);
13931 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13932 is probably obsolete because the first operand should be a
13933 truth value (that's why we have the two cases above), but let's
13934 leave it in until we can confirm this for all front-ends. */
13935 if (integer_zerop (op2)
13936 && TREE_CODE (arg0) == NE_EXPR
13937 && integer_zerop (TREE_OPERAND (arg0, 1))
13938 && integer_pow2p (arg1)
13939 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13940 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13941 arg1, OEP_ONLY_CONST))
13942 return pedantic_non_lvalue_loc (loc,
13943 fold_convert_loc (loc, type,
13944 TREE_OPERAND (arg0, 0)));
13946 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13947 if (integer_zerop (op2)
13948 && truth_value_p (TREE_CODE (arg0))
13949 && truth_value_p (TREE_CODE (arg1)))
13950 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13951 fold_convert_loc (loc, type, arg0),
13952 arg1);
13954 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13955 if (integer_onep (op2)
13956 && truth_value_p (TREE_CODE (arg0))
13957 && truth_value_p (TREE_CODE (arg1)))
13959 /* Only perform transformation if ARG0 is easily inverted. */
13960 tem = fold_truth_not_expr (loc, arg0);
13961 if (tem)
13962 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13963 fold_convert_loc (loc, type, tem),
13964 arg1);
13967 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13968 if (integer_zerop (arg1)
13969 && truth_value_p (TREE_CODE (arg0))
13970 && truth_value_p (TREE_CODE (op2)))
13972 /* Only perform transformation if ARG0 is easily inverted. */
13973 tem = fold_truth_not_expr (loc, arg0);
13974 if (tem)
13975 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13976 fold_convert_loc (loc, type, tem),
13977 op2);
13980 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13981 if (integer_onep (arg1)
13982 && truth_value_p (TREE_CODE (arg0))
13983 && truth_value_p (TREE_CODE (op2)))
13984 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13985 fold_convert_loc (loc, type, arg0),
13986 op2);
13988 return NULL_TREE;
13990 case CALL_EXPR:
13991 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13992 of fold_ternary on them. */
13993 gcc_unreachable ();
13995 case BIT_FIELD_REF:
13996 if ((TREE_CODE (arg0) == VECTOR_CST
13997 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13998 && type == TREE_TYPE (TREE_TYPE (arg0)))
14000 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
14001 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14003 if (width != 0
14004 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
14005 && (idx % width) == 0
14006 && (idx = idx / width)
14007 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14009 tree elements = NULL_TREE;
14011 if (TREE_CODE (arg0) == VECTOR_CST)
14012 elements = TREE_VECTOR_CST_ELTS (arg0);
14013 else
14015 unsigned HOST_WIDE_INT idx;
14016 tree value;
14018 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
14019 elements = tree_cons (NULL_TREE, value, elements);
14021 while (idx-- > 0 && elements)
14022 elements = TREE_CHAIN (elements);
14023 if (elements)
14024 return TREE_VALUE (elements);
14025 else
14026 return fold_convert_loc (loc, type, integer_zero_node);
14030 /* A bit-field-ref that referenced the full argument can be stripped. */
14031 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14032 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14033 && integer_zerop (op2))
14034 return fold_convert_loc (loc, type, arg0);
14036 return NULL_TREE;
14038 default:
14039 return NULL_TREE;
14040 } /* switch (code) */
14043 /* Perform constant folding and related simplification of EXPR.
14044 The related simplifications include x*1 => x, x*0 => 0, etc.,
14045 and application of the associative law.
14046 NOP_EXPR conversions may be removed freely (as long as we
14047 are careful not to change the type of the overall expression).
14048 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14049 but we can constant-fold them if they have constant operands. */
14051 #ifdef ENABLE_FOLD_CHECKING
14052 # define fold(x) fold_1 (x)
14053 static tree fold_1 (tree);
14054 static
14055 #endif
14056 tree
14057 fold (tree expr)
14059 const tree t = expr;
14060 enum tree_code code = TREE_CODE (t);
14061 enum tree_code_class kind = TREE_CODE_CLASS (code);
14062 tree tem;
14063 location_t loc = EXPR_LOCATION (expr);
14065 /* Return right away if a constant. */
14066 if (kind == tcc_constant)
14067 return t;
14069 /* CALL_EXPR-like objects with variable numbers of operands are
14070 treated specially. */
14071 if (kind == tcc_vl_exp)
14073 if (code == CALL_EXPR)
14075 tem = fold_call_expr (loc, expr, false);
14076 return tem ? tem : expr;
14078 return expr;
14081 if (IS_EXPR_CODE_CLASS (kind))
14083 tree type = TREE_TYPE (t);
14084 tree op0, op1, op2;
14086 switch (TREE_CODE_LENGTH (code))
14088 case 1:
14089 op0 = TREE_OPERAND (t, 0);
14090 tem = fold_unary_loc (loc, code, type, op0);
14091 return tem ? tem : expr;
14092 case 2:
14093 op0 = TREE_OPERAND (t, 0);
14094 op1 = TREE_OPERAND (t, 1);
14095 tem = fold_binary_loc (loc, code, type, op0, op1);
14096 return tem ? tem : expr;
14097 case 3:
14098 op0 = TREE_OPERAND (t, 0);
14099 op1 = TREE_OPERAND (t, 1);
14100 op2 = TREE_OPERAND (t, 2);
14101 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14102 return tem ? tem : expr;
14103 default:
14104 break;
14108 switch (code)
14110 case ARRAY_REF:
14112 tree op0 = TREE_OPERAND (t, 0);
14113 tree op1 = TREE_OPERAND (t, 1);
14115 if (TREE_CODE (op1) == INTEGER_CST
14116 && TREE_CODE (op0) == CONSTRUCTOR
14117 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14119 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
14120 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
14121 unsigned HOST_WIDE_INT begin = 0;
14123 /* Find a matching index by means of a binary search. */
14124 while (begin != end)
14126 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14127 tree index = VEC_index (constructor_elt, elts, middle)->index;
14129 if (TREE_CODE (index) == INTEGER_CST
14130 && tree_int_cst_lt (index, op1))
14131 begin = middle + 1;
14132 else if (TREE_CODE (index) == INTEGER_CST
14133 && tree_int_cst_lt (op1, index))
14134 end = middle;
14135 else if (TREE_CODE (index) == RANGE_EXPR
14136 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14137 begin = middle + 1;
14138 else if (TREE_CODE (index) == RANGE_EXPR
14139 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14140 end = middle;
14141 else
14142 return VEC_index (constructor_elt, elts, middle)->value;
14146 return t;
14149 case CONST_DECL:
14150 return fold (DECL_INITIAL (t));
14152 default:
14153 return t;
14154 } /* switch (code) */
14157 #ifdef ENABLE_FOLD_CHECKING
14158 #undef fold
14160 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
14161 static void fold_check_failed (const_tree, const_tree);
14162 void print_fold_checksum (const_tree);
14164 /* When --enable-checking=fold, compute a digest of expr before
14165 and after actual fold call to see if fold did not accidentally
14166 change original expr. */
14168 tree
14169 fold (tree expr)
14171 tree ret;
14172 struct md5_ctx ctx;
14173 unsigned char checksum_before[16], checksum_after[16];
14174 htab_t ht;
14176 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14177 md5_init_ctx (&ctx);
14178 fold_checksum_tree (expr, &ctx, ht);
14179 md5_finish_ctx (&ctx, checksum_before);
14180 htab_empty (ht);
14182 ret = fold_1 (expr);
14184 md5_init_ctx (&ctx);
14185 fold_checksum_tree (expr, &ctx, ht);
14186 md5_finish_ctx (&ctx, checksum_after);
14187 htab_delete (ht);
14189 if (memcmp (checksum_before, checksum_after, 16))
14190 fold_check_failed (expr, ret);
14192 return ret;
14195 void
14196 print_fold_checksum (const_tree expr)
14198 struct md5_ctx ctx;
14199 unsigned char checksum[16], cnt;
14200 htab_t ht;
14202 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14203 md5_init_ctx (&ctx);
14204 fold_checksum_tree (expr, &ctx, ht);
14205 md5_finish_ctx (&ctx, checksum);
14206 htab_delete (ht);
14207 for (cnt = 0; cnt < 16; ++cnt)
14208 fprintf (stderr, "%02x", checksum[cnt]);
14209 putc ('\n', stderr);
14212 static void
14213 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14215 internal_error ("fold check: original tree changed by fold");
14218 static void
14219 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
14221 const void **slot;
14222 enum tree_code code;
14223 union tree_node buf;
14224 int i, len;
14226 recursive_label:
14228 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
14229 <= sizeof (struct tree_function_decl))
14230 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
14231 if (expr == NULL)
14232 return;
14233 slot = (const void **) htab_find_slot (ht, expr, INSERT);
14234 if (*slot != NULL)
14235 return;
14236 *slot = expr;
14237 code = TREE_CODE (expr);
14238 if (TREE_CODE_CLASS (code) == tcc_declaration
14239 && DECL_ASSEMBLER_NAME_SET_P (expr))
14241 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14242 memcpy ((char *) &buf, expr, tree_size (expr));
14243 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14244 expr = (tree) &buf;
14246 else if (TREE_CODE_CLASS (code) == tcc_type
14247 && (TYPE_POINTER_TO (expr)
14248 || TYPE_REFERENCE_TO (expr)
14249 || TYPE_CACHED_VALUES_P (expr)
14250 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14251 || TYPE_NEXT_VARIANT (expr)))
14253 /* Allow these fields to be modified. */
14254 tree tmp;
14255 memcpy ((char *) &buf, expr, tree_size (expr));
14256 expr = tmp = (tree) &buf;
14257 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14258 TYPE_POINTER_TO (tmp) = NULL;
14259 TYPE_REFERENCE_TO (tmp) = NULL;
14260 TYPE_NEXT_VARIANT (tmp) = NULL;
14261 if (TYPE_CACHED_VALUES_P (tmp))
14263 TYPE_CACHED_VALUES_P (tmp) = 0;
14264 TYPE_CACHED_VALUES (tmp) = NULL;
14267 md5_process_bytes (expr, tree_size (expr), ctx);
14268 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14269 if (TREE_CODE_CLASS (code) != tcc_type
14270 && TREE_CODE_CLASS (code) != tcc_declaration
14271 && code != TREE_LIST
14272 && code != SSA_NAME)
14273 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14274 switch (TREE_CODE_CLASS (code))
14276 case tcc_constant:
14277 switch (code)
14279 case STRING_CST:
14280 md5_process_bytes (TREE_STRING_POINTER (expr),
14281 TREE_STRING_LENGTH (expr), ctx);
14282 break;
14283 case COMPLEX_CST:
14284 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14285 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14286 break;
14287 case VECTOR_CST:
14288 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
14289 break;
14290 default:
14291 break;
14293 break;
14294 case tcc_exceptional:
14295 switch (code)
14297 case TREE_LIST:
14298 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14299 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14300 expr = TREE_CHAIN (expr);
14301 goto recursive_label;
14302 break;
14303 case TREE_VEC:
14304 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14305 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14306 break;
14307 default:
14308 break;
14310 break;
14311 case tcc_expression:
14312 case tcc_reference:
14313 case tcc_comparison:
14314 case tcc_unary:
14315 case tcc_binary:
14316 case tcc_statement:
14317 case tcc_vl_exp:
14318 len = TREE_OPERAND_LENGTH (expr);
14319 for (i = 0; i < len; ++i)
14320 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14321 break;
14322 case tcc_declaration:
14323 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14324 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14325 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14327 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14328 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14329 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14330 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14331 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14333 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14334 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14336 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14338 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14339 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14340 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14342 break;
14343 case tcc_type:
14344 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14345 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14346 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14347 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14348 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14349 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14350 if (INTEGRAL_TYPE_P (expr)
14351 || SCALAR_FLOAT_TYPE_P (expr))
14353 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14354 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14356 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14357 if (TREE_CODE (expr) == RECORD_TYPE
14358 || TREE_CODE (expr) == UNION_TYPE
14359 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14360 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14361 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14362 break;
14363 default:
14364 break;
14368 /* Helper function for outputting the checksum of a tree T. When
14369 debugging with gdb, you can "define mynext" to be "next" followed
14370 by "call debug_fold_checksum (op0)", then just trace down till the
14371 outputs differ. */
14373 void
14374 debug_fold_checksum (const_tree t)
14376 int i;
14377 unsigned char checksum[16];
14378 struct md5_ctx ctx;
14379 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14381 md5_init_ctx (&ctx);
14382 fold_checksum_tree (t, &ctx, ht);
14383 md5_finish_ctx (&ctx, checksum);
14384 htab_empty (ht);
14386 for (i = 0; i < 16; i++)
14387 fprintf (stderr, "%d ", checksum[i]);
14389 fprintf (stderr, "\n");
14392 #endif
14394 /* Fold a unary tree expression with code CODE of type TYPE with an
14395 operand OP0. LOC is the location of the resulting expression.
14396 Return a folded expression if successful. Otherwise, return a tree
14397 expression with code CODE of type TYPE with an operand OP0. */
14399 tree
14400 fold_build1_stat_loc (location_t loc,
14401 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14403 tree tem;
14404 #ifdef ENABLE_FOLD_CHECKING
14405 unsigned char checksum_before[16], checksum_after[16];
14406 struct md5_ctx ctx;
14407 htab_t ht;
14409 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14410 md5_init_ctx (&ctx);
14411 fold_checksum_tree (op0, &ctx, ht);
14412 md5_finish_ctx (&ctx, checksum_before);
14413 htab_empty (ht);
14414 #endif
14416 tem = fold_unary_loc (loc, code, type, op0);
14417 if (!tem)
14419 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
14420 SET_EXPR_LOCATION (tem, loc);
14423 #ifdef ENABLE_FOLD_CHECKING
14424 md5_init_ctx (&ctx);
14425 fold_checksum_tree (op0, &ctx, ht);
14426 md5_finish_ctx (&ctx, checksum_after);
14427 htab_delete (ht);
14429 if (memcmp (checksum_before, checksum_after, 16))
14430 fold_check_failed (op0, tem);
14431 #endif
14432 return tem;
14435 /* Fold a binary tree expression with code CODE of type TYPE with
14436 operands OP0 and OP1. LOC is the location of the resulting
14437 expression. Return a folded expression if successful. Otherwise,
14438 return a tree expression with code CODE of type TYPE with operands
14439 OP0 and OP1. */
14441 tree
14442 fold_build2_stat_loc (location_t loc,
14443 enum tree_code code, tree type, tree op0, tree op1
14444 MEM_STAT_DECL)
14446 tree tem;
14447 #ifdef ENABLE_FOLD_CHECKING
14448 unsigned char checksum_before_op0[16],
14449 checksum_before_op1[16],
14450 checksum_after_op0[16],
14451 checksum_after_op1[16];
14452 struct md5_ctx ctx;
14453 htab_t ht;
14455 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14456 md5_init_ctx (&ctx);
14457 fold_checksum_tree (op0, &ctx, ht);
14458 md5_finish_ctx (&ctx, checksum_before_op0);
14459 htab_empty (ht);
14461 md5_init_ctx (&ctx);
14462 fold_checksum_tree (op1, &ctx, ht);
14463 md5_finish_ctx (&ctx, checksum_before_op1);
14464 htab_empty (ht);
14465 #endif
14467 tem = fold_binary_loc (loc, code, type, op0, op1);
14468 if (!tem)
14470 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
14471 SET_EXPR_LOCATION (tem, loc);
14474 #ifdef ENABLE_FOLD_CHECKING
14475 md5_init_ctx (&ctx);
14476 fold_checksum_tree (op0, &ctx, ht);
14477 md5_finish_ctx (&ctx, checksum_after_op0);
14478 htab_empty (ht);
14480 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14481 fold_check_failed (op0, tem);
14483 md5_init_ctx (&ctx);
14484 fold_checksum_tree (op1, &ctx, ht);
14485 md5_finish_ctx (&ctx, checksum_after_op1);
14486 htab_delete (ht);
14488 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14489 fold_check_failed (op1, tem);
14490 #endif
14491 return tem;
14494 /* Fold a ternary tree expression with code CODE of type TYPE with
14495 operands OP0, OP1, and OP2. Return a folded expression if
14496 successful. Otherwise, return a tree expression with code CODE of
14497 type TYPE with operands OP0, OP1, and OP2. */
14499 tree
14500 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14501 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14503 tree tem;
14504 #ifdef ENABLE_FOLD_CHECKING
14505 unsigned char checksum_before_op0[16],
14506 checksum_before_op1[16],
14507 checksum_before_op2[16],
14508 checksum_after_op0[16],
14509 checksum_after_op1[16],
14510 checksum_after_op2[16];
14511 struct md5_ctx ctx;
14512 htab_t ht;
14514 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14515 md5_init_ctx (&ctx);
14516 fold_checksum_tree (op0, &ctx, ht);
14517 md5_finish_ctx (&ctx, checksum_before_op0);
14518 htab_empty (ht);
14520 md5_init_ctx (&ctx);
14521 fold_checksum_tree (op1, &ctx, ht);
14522 md5_finish_ctx (&ctx, checksum_before_op1);
14523 htab_empty (ht);
14525 md5_init_ctx (&ctx);
14526 fold_checksum_tree (op2, &ctx, ht);
14527 md5_finish_ctx (&ctx, checksum_before_op2);
14528 htab_empty (ht);
14529 #endif
14531 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14532 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14533 if (!tem)
14535 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14536 SET_EXPR_LOCATION (tem, loc);
14539 #ifdef ENABLE_FOLD_CHECKING
14540 md5_init_ctx (&ctx);
14541 fold_checksum_tree (op0, &ctx, ht);
14542 md5_finish_ctx (&ctx, checksum_after_op0);
14543 htab_empty (ht);
14545 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14546 fold_check_failed (op0, tem);
14548 md5_init_ctx (&ctx);
14549 fold_checksum_tree (op1, &ctx, ht);
14550 md5_finish_ctx (&ctx, checksum_after_op1);
14551 htab_empty (ht);
14553 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14554 fold_check_failed (op1, tem);
14556 md5_init_ctx (&ctx);
14557 fold_checksum_tree (op2, &ctx, ht);
14558 md5_finish_ctx (&ctx, checksum_after_op2);
14559 htab_delete (ht);
14561 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14562 fold_check_failed (op2, tem);
14563 #endif
14564 return tem;
14567 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14568 arguments in ARGARRAY, and a null static chain.
14569 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14570 of type TYPE from the given operands as constructed by build_call_array. */
14572 tree
14573 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14574 int nargs, tree *argarray)
14576 tree tem;
14577 #ifdef ENABLE_FOLD_CHECKING
14578 unsigned char checksum_before_fn[16],
14579 checksum_before_arglist[16],
14580 checksum_after_fn[16],
14581 checksum_after_arglist[16];
14582 struct md5_ctx ctx;
14583 htab_t ht;
14584 int i;
14586 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14587 md5_init_ctx (&ctx);
14588 fold_checksum_tree (fn, &ctx, ht);
14589 md5_finish_ctx (&ctx, checksum_before_fn);
14590 htab_empty (ht);
14592 md5_init_ctx (&ctx);
14593 for (i = 0; i < nargs; i++)
14594 fold_checksum_tree (argarray[i], &ctx, ht);
14595 md5_finish_ctx (&ctx, checksum_before_arglist);
14596 htab_empty (ht);
14597 #endif
14599 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14601 #ifdef ENABLE_FOLD_CHECKING
14602 md5_init_ctx (&ctx);
14603 fold_checksum_tree (fn, &ctx, ht);
14604 md5_finish_ctx (&ctx, checksum_after_fn);
14605 htab_empty (ht);
14607 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14608 fold_check_failed (fn, tem);
14610 md5_init_ctx (&ctx);
14611 for (i = 0; i < nargs; i++)
14612 fold_checksum_tree (argarray[i], &ctx, ht);
14613 md5_finish_ctx (&ctx, checksum_after_arglist);
14614 htab_delete (ht);
14616 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14617 fold_check_failed (NULL_TREE, tem);
14618 #endif
14619 return tem;
14622 /* Perform constant folding and related simplification of initializer
14623 expression EXPR. These behave identically to "fold_buildN" but ignore
14624 potential run-time traps and exceptions that fold must preserve. */
14626 #define START_FOLD_INIT \
14627 int saved_signaling_nans = flag_signaling_nans;\
14628 int saved_trapping_math = flag_trapping_math;\
14629 int saved_rounding_math = flag_rounding_math;\
14630 int saved_trapv = flag_trapv;\
14631 int saved_folding_initializer = folding_initializer;\
14632 flag_signaling_nans = 0;\
14633 flag_trapping_math = 0;\
14634 flag_rounding_math = 0;\
14635 flag_trapv = 0;\
14636 folding_initializer = 1;
14638 #define END_FOLD_INIT \
14639 flag_signaling_nans = saved_signaling_nans;\
14640 flag_trapping_math = saved_trapping_math;\
14641 flag_rounding_math = saved_rounding_math;\
14642 flag_trapv = saved_trapv;\
14643 folding_initializer = saved_folding_initializer;
14645 tree
14646 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14647 tree type, tree op)
14649 tree result;
14650 START_FOLD_INIT;
14652 result = fold_build1_loc (loc, code, type, op);
14654 END_FOLD_INIT;
14655 return result;
14658 tree
14659 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14660 tree type, tree op0, tree op1)
14662 tree result;
14663 START_FOLD_INIT;
14665 result = fold_build2_loc (loc, code, type, op0, op1);
14667 END_FOLD_INIT;
14668 return result;
14671 tree
14672 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14673 tree type, tree op0, tree op1, tree op2)
14675 tree result;
14676 START_FOLD_INIT;
14678 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14680 END_FOLD_INIT;
14681 return result;
14684 tree
14685 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14686 int nargs, tree *argarray)
14688 tree result;
14689 START_FOLD_INIT;
14691 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14693 END_FOLD_INIT;
14694 return result;
14697 #undef START_FOLD_INIT
14698 #undef END_FOLD_INIT
14700 /* Determine if first argument is a multiple of second argument. Return 0 if
14701 it is not, or we cannot easily determined it to be.
14703 An example of the sort of thing we care about (at this point; this routine
14704 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14705 fold cases do now) is discovering that
14707 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14709 is a multiple of
14711 SAVE_EXPR (J * 8)
14713 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14715 This code also handles discovering that
14717 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14719 is a multiple of 8 so we don't have to worry about dealing with a
14720 possible remainder.
14722 Note that we *look* inside a SAVE_EXPR only to determine how it was
14723 calculated; it is not safe for fold to do much of anything else with the
14724 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14725 at run time. For example, the latter example above *cannot* be implemented
14726 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14727 evaluation time of the original SAVE_EXPR is not necessarily the same at
14728 the time the new expression is evaluated. The only optimization of this
14729 sort that would be valid is changing
14731 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14733 divided by 8 to
14735 SAVE_EXPR (I) * SAVE_EXPR (J)
14737 (where the same SAVE_EXPR (J) is used in the original and the
14738 transformed version). */
14741 multiple_of_p (tree type, const_tree top, const_tree bottom)
14743 if (operand_equal_p (top, bottom, 0))
14744 return 1;
14746 if (TREE_CODE (type) != INTEGER_TYPE)
14747 return 0;
14749 switch (TREE_CODE (top))
14751 case BIT_AND_EXPR:
14752 /* Bitwise and provides a power of two multiple. If the mask is
14753 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14754 if (!integer_pow2p (bottom))
14755 return 0;
14756 /* FALLTHRU */
14758 case MULT_EXPR:
14759 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14760 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14762 case PLUS_EXPR:
14763 case MINUS_EXPR:
14764 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14765 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14767 case LSHIFT_EXPR:
14768 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14770 tree op1, t1;
14772 op1 = TREE_OPERAND (top, 1);
14773 /* const_binop may not detect overflow correctly,
14774 so check for it explicitly here. */
14775 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14776 > TREE_INT_CST_LOW (op1)
14777 && TREE_INT_CST_HIGH (op1) == 0
14778 && 0 != (t1 = fold_convert (type,
14779 const_binop (LSHIFT_EXPR,
14780 size_one_node,
14781 op1, 0)))
14782 && !TREE_OVERFLOW (t1))
14783 return multiple_of_p (type, t1, bottom);
14785 return 0;
14787 case NOP_EXPR:
14788 /* Can't handle conversions from non-integral or wider integral type. */
14789 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14790 || (TYPE_PRECISION (type)
14791 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14792 return 0;
14794 /* .. fall through ... */
14796 case SAVE_EXPR:
14797 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14799 case INTEGER_CST:
14800 if (TREE_CODE (bottom) != INTEGER_CST
14801 || integer_zerop (bottom)
14802 || (TYPE_UNSIGNED (type)
14803 && (tree_int_cst_sgn (top) < 0
14804 || tree_int_cst_sgn (bottom) < 0)))
14805 return 0;
14806 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14807 top, bottom, 0));
14809 default:
14810 return 0;
14814 /* Return true if CODE or TYPE is known to be non-negative. */
14816 static bool
14817 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14819 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14820 && truth_value_p (code))
14821 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14822 have a signed:1 type (where the value is -1 and 0). */
14823 return true;
14824 return false;
14827 /* Return true if (CODE OP0) is known to be non-negative. If the return
14828 value is based on the assumption that signed overflow is undefined,
14829 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14830 *STRICT_OVERFLOW_P. */
14832 bool
14833 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14834 bool *strict_overflow_p)
14836 if (TYPE_UNSIGNED (type))
14837 return true;
14839 switch (code)
14841 case ABS_EXPR:
14842 /* We can't return 1 if flag_wrapv is set because
14843 ABS_EXPR<INT_MIN> = INT_MIN. */
14844 if (!INTEGRAL_TYPE_P (type))
14845 return true;
14846 if (TYPE_OVERFLOW_UNDEFINED (type))
14848 *strict_overflow_p = true;
14849 return true;
14851 break;
14853 case NON_LVALUE_EXPR:
14854 case FLOAT_EXPR:
14855 case FIX_TRUNC_EXPR:
14856 return tree_expr_nonnegative_warnv_p (op0,
14857 strict_overflow_p);
14859 case NOP_EXPR:
14861 tree inner_type = TREE_TYPE (op0);
14862 tree outer_type = type;
14864 if (TREE_CODE (outer_type) == REAL_TYPE)
14866 if (TREE_CODE (inner_type) == REAL_TYPE)
14867 return tree_expr_nonnegative_warnv_p (op0,
14868 strict_overflow_p);
14869 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14871 if (TYPE_UNSIGNED (inner_type))
14872 return true;
14873 return tree_expr_nonnegative_warnv_p (op0,
14874 strict_overflow_p);
14877 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14879 if (TREE_CODE (inner_type) == REAL_TYPE)
14880 return tree_expr_nonnegative_warnv_p (op0,
14881 strict_overflow_p);
14882 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14883 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14884 && TYPE_UNSIGNED (inner_type);
14887 break;
14889 default:
14890 return tree_simple_nonnegative_warnv_p (code, type);
14893 /* We don't know sign of `t', so be conservative and return false. */
14894 return false;
14897 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14898 value is based on the assumption that signed overflow is undefined,
14899 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14900 *STRICT_OVERFLOW_P. */
14902 bool
14903 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14904 tree op1, bool *strict_overflow_p)
14906 if (TYPE_UNSIGNED (type))
14907 return true;
14909 switch (code)
14911 case POINTER_PLUS_EXPR:
14912 case PLUS_EXPR:
14913 if (FLOAT_TYPE_P (type))
14914 return (tree_expr_nonnegative_warnv_p (op0,
14915 strict_overflow_p)
14916 && tree_expr_nonnegative_warnv_p (op1,
14917 strict_overflow_p));
14919 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14920 both unsigned and at least 2 bits shorter than the result. */
14921 if (TREE_CODE (type) == INTEGER_TYPE
14922 && TREE_CODE (op0) == NOP_EXPR
14923 && TREE_CODE (op1) == NOP_EXPR)
14925 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14926 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14927 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14928 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14930 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14931 TYPE_PRECISION (inner2)) + 1;
14932 return prec < TYPE_PRECISION (type);
14935 break;
14937 case MULT_EXPR:
14938 if (FLOAT_TYPE_P (type))
14940 /* x * x for floating point x is always non-negative. */
14941 if (operand_equal_p (op0, op1, 0))
14942 return true;
14943 return (tree_expr_nonnegative_warnv_p (op0,
14944 strict_overflow_p)
14945 && tree_expr_nonnegative_warnv_p (op1,
14946 strict_overflow_p));
14949 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14950 both unsigned and their total bits is shorter than the result. */
14951 if (TREE_CODE (type) == INTEGER_TYPE
14952 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14953 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14955 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14956 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14957 : TREE_TYPE (op0);
14958 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14959 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14960 : TREE_TYPE (op1);
14962 bool unsigned0 = TYPE_UNSIGNED (inner0);
14963 bool unsigned1 = TYPE_UNSIGNED (inner1);
14965 if (TREE_CODE (op0) == INTEGER_CST)
14966 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14968 if (TREE_CODE (op1) == INTEGER_CST)
14969 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14971 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14972 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14974 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14975 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14976 : TYPE_PRECISION (inner0);
14978 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14979 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14980 : TYPE_PRECISION (inner1);
14982 return precision0 + precision1 < TYPE_PRECISION (type);
14985 return false;
14987 case BIT_AND_EXPR:
14988 case MAX_EXPR:
14989 return (tree_expr_nonnegative_warnv_p (op0,
14990 strict_overflow_p)
14991 || tree_expr_nonnegative_warnv_p (op1,
14992 strict_overflow_p));
14994 case BIT_IOR_EXPR:
14995 case BIT_XOR_EXPR:
14996 case MIN_EXPR:
14997 case RDIV_EXPR:
14998 case TRUNC_DIV_EXPR:
14999 case CEIL_DIV_EXPR:
15000 case FLOOR_DIV_EXPR:
15001 case ROUND_DIV_EXPR:
15002 return (tree_expr_nonnegative_warnv_p (op0,
15003 strict_overflow_p)
15004 && tree_expr_nonnegative_warnv_p (op1,
15005 strict_overflow_p));
15007 case TRUNC_MOD_EXPR:
15008 case CEIL_MOD_EXPR:
15009 case FLOOR_MOD_EXPR:
15010 case ROUND_MOD_EXPR:
15011 return tree_expr_nonnegative_warnv_p (op0,
15012 strict_overflow_p);
15013 default:
15014 return tree_simple_nonnegative_warnv_p (code, type);
15017 /* We don't know sign of `t', so be conservative and return false. */
15018 return false;
15021 /* Return true if T is known to be non-negative. If the return
15022 value is based on the assumption that signed overflow is undefined,
15023 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15024 *STRICT_OVERFLOW_P. */
15026 bool
15027 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15029 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15030 return true;
15032 switch (TREE_CODE (t))
15034 case INTEGER_CST:
15035 return tree_int_cst_sgn (t) >= 0;
15037 case REAL_CST:
15038 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15040 case FIXED_CST:
15041 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15043 case COND_EXPR:
15044 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15045 strict_overflow_p)
15046 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15047 strict_overflow_p));
15048 default:
15049 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15050 TREE_TYPE (t));
15052 /* We don't know sign of `t', so be conservative and return false. */
15053 return false;
15056 /* Return true if T is known to be non-negative. If the return
15057 value is based on the assumption that signed overflow is undefined,
15058 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15059 *STRICT_OVERFLOW_P. */
15061 bool
15062 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15063 tree arg0, tree arg1, bool *strict_overflow_p)
15065 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15066 switch (DECL_FUNCTION_CODE (fndecl))
15068 CASE_FLT_FN (BUILT_IN_ACOS):
15069 CASE_FLT_FN (BUILT_IN_ACOSH):
15070 CASE_FLT_FN (BUILT_IN_CABS):
15071 CASE_FLT_FN (BUILT_IN_COSH):
15072 CASE_FLT_FN (BUILT_IN_ERFC):
15073 CASE_FLT_FN (BUILT_IN_EXP):
15074 CASE_FLT_FN (BUILT_IN_EXP10):
15075 CASE_FLT_FN (BUILT_IN_EXP2):
15076 CASE_FLT_FN (BUILT_IN_FABS):
15077 CASE_FLT_FN (BUILT_IN_FDIM):
15078 CASE_FLT_FN (BUILT_IN_HYPOT):
15079 CASE_FLT_FN (BUILT_IN_POW10):
15080 CASE_INT_FN (BUILT_IN_FFS):
15081 CASE_INT_FN (BUILT_IN_PARITY):
15082 CASE_INT_FN (BUILT_IN_POPCOUNT):
15083 case BUILT_IN_BSWAP32:
15084 case BUILT_IN_BSWAP64:
15085 /* Always true. */
15086 return true;
15088 CASE_FLT_FN (BUILT_IN_SQRT):
15089 /* sqrt(-0.0) is -0.0. */
15090 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15091 return true;
15092 return tree_expr_nonnegative_warnv_p (arg0,
15093 strict_overflow_p);
15095 CASE_FLT_FN (BUILT_IN_ASINH):
15096 CASE_FLT_FN (BUILT_IN_ATAN):
15097 CASE_FLT_FN (BUILT_IN_ATANH):
15098 CASE_FLT_FN (BUILT_IN_CBRT):
15099 CASE_FLT_FN (BUILT_IN_CEIL):
15100 CASE_FLT_FN (BUILT_IN_ERF):
15101 CASE_FLT_FN (BUILT_IN_EXPM1):
15102 CASE_FLT_FN (BUILT_IN_FLOOR):
15103 CASE_FLT_FN (BUILT_IN_FMOD):
15104 CASE_FLT_FN (BUILT_IN_FREXP):
15105 CASE_FLT_FN (BUILT_IN_LCEIL):
15106 CASE_FLT_FN (BUILT_IN_LDEXP):
15107 CASE_FLT_FN (BUILT_IN_LFLOOR):
15108 CASE_FLT_FN (BUILT_IN_LLCEIL):
15109 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15110 CASE_FLT_FN (BUILT_IN_LLRINT):
15111 CASE_FLT_FN (BUILT_IN_LLROUND):
15112 CASE_FLT_FN (BUILT_IN_LRINT):
15113 CASE_FLT_FN (BUILT_IN_LROUND):
15114 CASE_FLT_FN (BUILT_IN_MODF):
15115 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15116 CASE_FLT_FN (BUILT_IN_RINT):
15117 CASE_FLT_FN (BUILT_IN_ROUND):
15118 CASE_FLT_FN (BUILT_IN_SCALB):
15119 CASE_FLT_FN (BUILT_IN_SCALBLN):
15120 CASE_FLT_FN (BUILT_IN_SCALBN):
15121 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15122 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15123 CASE_FLT_FN (BUILT_IN_SINH):
15124 CASE_FLT_FN (BUILT_IN_TANH):
15125 CASE_FLT_FN (BUILT_IN_TRUNC):
15126 /* True if the 1st argument is nonnegative. */
15127 return tree_expr_nonnegative_warnv_p (arg0,
15128 strict_overflow_p);
15130 CASE_FLT_FN (BUILT_IN_FMAX):
15131 /* True if the 1st OR 2nd arguments are nonnegative. */
15132 return (tree_expr_nonnegative_warnv_p (arg0,
15133 strict_overflow_p)
15134 || (tree_expr_nonnegative_warnv_p (arg1,
15135 strict_overflow_p)));
15137 CASE_FLT_FN (BUILT_IN_FMIN):
15138 /* True if the 1st AND 2nd arguments are nonnegative. */
15139 return (tree_expr_nonnegative_warnv_p (arg0,
15140 strict_overflow_p)
15141 && (tree_expr_nonnegative_warnv_p (arg1,
15142 strict_overflow_p)));
15144 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15145 /* True if the 2nd argument is nonnegative. */
15146 return tree_expr_nonnegative_warnv_p (arg1,
15147 strict_overflow_p);
15149 CASE_FLT_FN (BUILT_IN_POWI):
15150 /* True if the 1st argument is nonnegative or the second
15151 argument is an even integer. */
15152 if (TREE_CODE (arg1) == INTEGER_CST
15153 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15154 return true;
15155 return tree_expr_nonnegative_warnv_p (arg0,
15156 strict_overflow_p);
15158 CASE_FLT_FN (BUILT_IN_POW):
15159 /* True if the 1st argument is nonnegative or the second
15160 argument is an even integer valued real. */
15161 if (TREE_CODE (arg1) == REAL_CST)
15163 REAL_VALUE_TYPE c;
15164 HOST_WIDE_INT n;
15166 c = TREE_REAL_CST (arg1);
15167 n = real_to_integer (&c);
15168 if ((n & 1) == 0)
15170 REAL_VALUE_TYPE cint;
15171 real_from_integer (&cint, VOIDmode, n,
15172 n < 0 ? -1 : 0, 0);
15173 if (real_identical (&c, &cint))
15174 return true;
15177 return tree_expr_nonnegative_warnv_p (arg0,
15178 strict_overflow_p);
15180 default:
15181 break;
15183 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15184 type);
15187 /* Return true if T is known to be non-negative. If the return
15188 value is based on the assumption that signed overflow is undefined,
15189 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15190 *STRICT_OVERFLOW_P. */
15192 bool
15193 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15195 enum tree_code code = TREE_CODE (t);
15196 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15197 return true;
15199 switch (code)
15201 case TARGET_EXPR:
15203 tree temp = TARGET_EXPR_SLOT (t);
15204 t = TARGET_EXPR_INITIAL (t);
15206 /* If the initializer is non-void, then it's a normal expression
15207 that will be assigned to the slot. */
15208 if (!VOID_TYPE_P (t))
15209 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15211 /* Otherwise, the initializer sets the slot in some way. One common
15212 way is an assignment statement at the end of the initializer. */
15213 while (1)
15215 if (TREE_CODE (t) == BIND_EXPR)
15216 t = expr_last (BIND_EXPR_BODY (t));
15217 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15218 || TREE_CODE (t) == TRY_CATCH_EXPR)
15219 t = expr_last (TREE_OPERAND (t, 0));
15220 else if (TREE_CODE (t) == STATEMENT_LIST)
15221 t = expr_last (t);
15222 else
15223 break;
15225 if (TREE_CODE (t) == MODIFY_EXPR
15226 && TREE_OPERAND (t, 0) == temp)
15227 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15228 strict_overflow_p);
15230 return false;
15233 case CALL_EXPR:
15235 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15236 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15238 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15239 get_callee_fndecl (t),
15240 arg0,
15241 arg1,
15242 strict_overflow_p);
15244 case COMPOUND_EXPR:
15245 case MODIFY_EXPR:
15246 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15247 strict_overflow_p);
15248 case BIND_EXPR:
15249 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15250 strict_overflow_p);
15251 case SAVE_EXPR:
15252 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15253 strict_overflow_p);
15255 default:
15256 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15257 TREE_TYPE (t));
15260 /* We don't know sign of `t', so be conservative and return false. */
15261 return false;
15264 /* Return true if T is known to be non-negative. If the return
15265 value is based on the assumption that signed overflow is undefined,
15266 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15267 *STRICT_OVERFLOW_P. */
15269 bool
15270 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15272 enum tree_code code;
15273 if (t == error_mark_node)
15274 return false;
15276 code = TREE_CODE (t);
15277 switch (TREE_CODE_CLASS (code))
15279 case tcc_binary:
15280 case tcc_comparison:
15281 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15282 TREE_TYPE (t),
15283 TREE_OPERAND (t, 0),
15284 TREE_OPERAND (t, 1),
15285 strict_overflow_p);
15287 case tcc_unary:
15288 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15289 TREE_TYPE (t),
15290 TREE_OPERAND (t, 0),
15291 strict_overflow_p);
15293 case tcc_constant:
15294 case tcc_declaration:
15295 case tcc_reference:
15296 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15298 default:
15299 break;
15302 switch (code)
15304 case TRUTH_AND_EXPR:
15305 case TRUTH_OR_EXPR:
15306 case TRUTH_XOR_EXPR:
15307 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15308 TREE_TYPE (t),
15309 TREE_OPERAND (t, 0),
15310 TREE_OPERAND (t, 1),
15311 strict_overflow_p);
15312 case TRUTH_NOT_EXPR:
15313 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15314 TREE_TYPE (t),
15315 TREE_OPERAND (t, 0),
15316 strict_overflow_p);
15318 case COND_EXPR:
15319 case CONSTRUCTOR:
15320 case OBJ_TYPE_REF:
15321 case ASSERT_EXPR:
15322 case ADDR_EXPR:
15323 case WITH_SIZE_EXPR:
15324 case SSA_NAME:
15325 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15327 default:
15328 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15332 /* Return true if `t' is known to be non-negative. Handle warnings
15333 about undefined signed overflow. */
15335 bool
15336 tree_expr_nonnegative_p (tree t)
15338 bool ret, strict_overflow_p;
15340 strict_overflow_p = false;
15341 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15342 if (strict_overflow_p)
15343 fold_overflow_warning (("assuming signed overflow does not occur when "
15344 "determining that expression is always "
15345 "non-negative"),
15346 WARN_STRICT_OVERFLOW_MISC);
15347 return ret;
15351 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15352 For floating point we further ensure that T is not denormal.
15353 Similar logic is present in nonzero_address in rtlanal.h.
15355 If the return value is based on the assumption that signed overflow
15356 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15357 change *STRICT_OVERFLOW_P. */
15359 bool
15360 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15361 bool *strict_overflow_p)
15363 switch (code)
15365 case ABS_EXPR:
15366 return tree_expr_nonzero_warnv_p (op0,
15367 strict_overflow_p);
15369 case NOP_EXPR:
15371 tree inner_type = TREE_TYPE (op0);
15372 tree outer_type = type;
15374 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15375 && tree_expr_nonzero_warnv_p (op0,
15376 strict_overflow_p));
15378 break;
15380 case NON_LVALUE_EXPR:
15381 return tree_expr_nonzero_warnv_p (op0,
15382 strict_overflow_p);
15384 default:
15385 break;
15388 return false;
15391 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15392 For floating point we further ensure that T is not denormal.
15393 Similar logic is present in nonzero_address in rtlanal.h.
15395 If the return value is based on the assumption that signed overflow
15396 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15397 change *STRICT_OVERFLOW_P. */
15399 bool
15400 tree_binary_nonzero_warnv_p (enum tree_code code,
15401 tree type,
15402 tree op0,
15403 tree op1, bool *strict_overflow_p)
15405 bool sub_strict_overflow_p;
15406 switch (code)
15408 case POINTER_PLUS_EXPR:
15409 case PLUS_EXPR:
15410 if (TYPE_OVERFLOW_UNDEFINED (type))
15412 /* With the presence of negative values it is hard
15413 to say something. */
15414 sub_strict_overflow_p = false;
15415 if (!tree_expr_nonnegative_warnv_p (op0,
15416 &sub_strict_overflow_p)
15417 || !tree_expr_nonnegative_warnv_p (op1,
15418 &sub_strict_overflow_p))
15419 return false;
15420 /* One of operands must be positive and the other non-negative. */
15421 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15422 overflows, on a twos-complement machine the sum of two
15423 nonnegative numbers can never be zero. */
15424 return (tree_expr_nonzero_warnv_p (op0,
15425 strict_overflow_p)
15426 || tree_expr_nonzero_warnv_p (op1,
15427 strict_overflow_p));
15429 break;
15431 case MULT_EXPR:
15432 if (TYPE_OVERFLOW_UNDEFINED (type))
15434 if (tree_expr_nonzero_warnv_p (op0,
15435 strict_overflow_p)
15436 && tree_expr_nonzero_warnv_p (op1,
15437 strict_overflow_p))
15439 *strict_overflow_p = true;
15440 return true;
15443 break;
15445 case MIN_EXPR:
15446 sub_strict_overflow_p = false;
15447 if (tree_expr_nonzero_warnv_p (op0,
15448 &sub_strict_overflow_p)
15449 && tree_expr_nonzero_warnv_p (op1,
15450 &sub_strict_overflow_p))
15452 if (sub_strict_overflow_p)
15453 *strict_overflow_p = true;
15455 break;
15457 case MAX_EXPR:
15458 sub_strict_overflow_p = false;
15459 if (tree_expr_nonzero_warnv_p (op0,
15460 &sub_strict_overflow_p))
15462 if (sub_strict_overflow_p)
15463 *strict_overflow_p = true;
15465 /* When both operands are nonzero, then MAX must be too. */
15466 if (tree_expr_nonzero_warnv_p (op1,
15467 strict_overflow_p))
15468 return true;
15470 /* MAX where operand 0 is positive is positive. */
15471 return tree_expr_nonnegative_warnv_p (op0,
15472 strict_overflow_p);
15474 /* MAX where operand 1 is positive is positive. */
15475 else if (tree_expr_nonzero_warnv_p (op1,
15476 &sub_strict_overflow_p)
15477 && tree_expr_nonnegative_warnv_p (op1,
15478 &sub_strict_overflow_p))
15480 if (sub_strict_overflow_p)
15481 *strict_overflow_p = true;
15482 return true;
15484 break;
15486 case BIT_IOR_EXPR:
15487 return (tree_expr_nonzero_warnv_p (op1,
15488 strict_overflow_p)
15489 || tree_expr_nonzero_warnv_p (op0,
15490 strict_overflow_p));
15492 default:
15493 break;
15496 return false;
15499 /* Return true when T is an address and is known to be nonzero.
15500 For floating point we further ensure that T is not denormal.
15501 Similar logic is present in nonzero_address in rtlanal.h.
15503 If the return value is based on the assumption that signed overflow
15504 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15505 change *STRICT_OVERFLOW_P. */
15507 bool
15508 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15510 bool sub_strict_overflow_p;
15511 switch (TREE_CODE (t))
15513 case INTEGER_CST:
15514 return !integer_zerop (t);
15516 case ADDR_EXPR:
15518 tree base = get_base_address (TREE_OPERAND (t, 0));
15520 if (!base)
15521 return false;
15523 /* Weak declarations may link to NULL. Other things may also be NULL
15524 so protect with -fdelete-null-pointer-checks; but not variables
15525 allocated on the stack. */
15526 if (DECL_P (base)
15527 && (flag_delete_null_pointer_checks
15528 || (TREE_CODE (base) == VAR_DECL && !TREE_STATIC (base))))
15529 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15531 /* Constants are never weak. */
15532 if (CONSTANT_CLASS_P (base))
15533 return true;
15535 return false;
15538 case COND_EXPR:
15539 sub_strict_overflow_p = false;
15540 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15541 &sub_strict_overflow_p)
15542 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15543 &sub_strict_overflow_p))
15545 if (sub_strict_overflow_p)
15546 *strict_overflow_p = true;
15547 return true;
15549 break;
15551 default:
15552 break;
15554 return false;
15557 /* Return true when T is an address and is known to be nonzero.
15558 For floating point we further ensure that T is not denormal.
15559 Similar logic is present in nonzero_address in rtlanal.h.
15561 If the return value is based on the assumption that signed overflow
15562 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15563 change *STRICT_OVERFLOW_P. */
15565 bool
15566 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15568 tree type = TREE_TYPE (t);
15569 enum tree_code code;
15571 /* Doing something useful for floating point would need more work. */
15572 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15573 return false;
15575 code = TREE_CODE (t);
15576 switch (TREE_CODE_CLASS (code))
15578 case tcc_unary:
15579 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15580 strict_overflow_p);
15581 case tcc_binary:
15582 case tcc_comparison:
15583 return tree_binary_nonzero_warnv_p (code, type,
15584 TREE_OPERAND (t, 0),
15585 TREE_OPERAND (t, 1),
15586 strict_overflow_p);
15587 case tcc_constant:
15588 case tcc_declaration:
15589 case tcc_reference:
15590 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15592 default:
15593 break;
15596 switch (code)
15598 case TRUTH_NOT_EXPR:
15599 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15600 strict_overflow_p);
15602 case TRUTH_AND_EXPR:
15603 case TRUTH_OR_EXPR:
15604 case TRUTH_XOR_EXPR:
15605 return tree_binary_nonzero_warnv_p (code, type,
15606 TREE_OPERAND (t, 0),
15607 TREE_OPERAND (t, 1),
15608 strict_overflow_p);
15610 case COND_EXPR:
15611 case CONSTRUCTOR:
15612 case OBJ_TYPE_REF:
15613 case ASSERT_EXPR:
15614 case ADDR_EXPR:
15615 case WITH_SIZE_EXPR:
15616 case SSA_NAME:
15617 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15619 case COMPOUND_EXPR:
15620 case MODIFY_EXPR:
15621 case BIND_EXPR:
15622 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15623 strict_overflow_p);
15625 case SAVE_EXPR:
15626 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15627 strict_overflow_p);
15629 case CALL_EXPR:
15630 return alloca_call_p (t);
15632 default:
15633 break;
15635 return false;
15638 /* Return true when T is an address and is known to be nonzero.
15639 Handle warnings about undefined signed overflow. */
15641 bool
15642 tree_expr_nonzero_p (tree t)
15644 bool ret, strict_overflow_p;
15646 strict_overflow_p = false;
15647 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15648 if (strict_overflow_p)
15649 fold_overflow_warning (("assuming signed overflow does not occur when "
15650 "determining that expression is always "
15651 "non-zero"),
15652 WARN_STRICT_OVERFLOW_MISC);
15653 return ret;
15656 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15657 attempt to fold the expression to a constant without modifying TYPE,
15658 OP0 or OP1.
15660 If the expression could be simplified to a constant, then return
15661 the constant. If the expression would not be simplified to a
15662 constant, then return NULL_TREE. */
15664 tree
15665 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15667 tree tem = fold_binary (code, type, op0, op1);
15668 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15671 /* Given the components of a unary expression CODE, TYPE and OP0,
15672 attempt to fold the expression to a constant without modifying
15673 TYPE or OP0.
15675 If the expression could be simplified to a constant, then return
15676 the constant. If the expression would not be simplified to a
15677 constant, then return NULL_TREE. */
15679 tree
15680 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15682 tree tem = fold_unary (code, type, op0);
15683 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15686 /* If EXP represents referencing an element in a constant string
15687 (either via pointer arithmetic or array indexing), return the
15688 tree representing the value accessed, otherwise return NULL. */
15690 tree
15691 fold_read_from_constant_string (tree exp)
15693 if ((TREE_CODE (exp) == INDIRECT_REF
15694 || TREE_CODE (exp) == ARRAY_REF)
15695 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15697 tree exp1 = TREE_OPERAND (exp, 0);
15698 tree index;
15699 tree string;
15700 location_t loc = EXPR_LOCATION (exp);
15702 if (TREE_CODE (exp) == INDIRECT_REF)
15703 string = string_constant (exp1, &index);
15704 else
15706 tree low_bound = array_ref_low_bound (exp);
15707 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15709 /* Optimize the special-case of a zero lower bound.
15711 We convert the low_bound to sizetype to avoid some problems
15712 with constant folding. (E.g. suppose the lower bound is 1,
15713 and its mode is QI. Without the conversion,l (ARRAY
15714 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15715 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15716 if (! integer_zerop (low_bound))
15717 index = size_diffop_loc (loc, index,
15718 fold_convert_loc (loc, sizetype, low_bound));
15720 string = exp1;
15723 if (string
15724 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15725 && TREE_CODE (string) == STRING_CST
15726 && TREE_CODE (index) == INTEGER_CST
15727 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15728 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15729 == MODE_INT)
15730 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15731 return build_int_cst_type (TREE_TYPE (exp),
15732 (TREE_STRING_POINTER (string)
15733 [TREE_INT_CST_LOW (index)]));
15735 return NULL;
15738 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15739 an integer constant, real, or fixed-point constant.
15741 TYPE is the type of the result. */
15743 static tree
15744 fold_negate_const (tree arg0, tree type)
15746 tree t = NULL_TREE;
15748 switch (TREE_CODE (arg0))
15750 case INTEGER_CST:
15752 unsigned HOST_WIDE_INT low;
15753 HOST_WIDE_INT high;
15754 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15755 TREE_INT_CST_HIGH (arg0),
15756 &low, &high);
15757 t = force_fit_type_double (type, low, high, 1,
15758 (overflow | TREE_OVERFLOW (arg0))
15759 && !TYPE_UNSIGNED (type));
15760 break;
15763 case REAL_CST:
15764 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15765 break;
15767 case FIXED_CST:
15769 FIXED_VALUE_TYPE f;
15770 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15771 &(TREE_FIXED_CST (arg0)), NULL,
15772 TYPE_SATURATING (type));
15773 t = build_fixed (type, f);
15774 /* Propagate overflow flags. */
15775 if (overflow_p | TREE_OVERFLOW (arg0))
15776 TREE_OVERFLOW (t) = 1;
15777 break;
15780 default:
15781 gcc_unreachable ();
15784 return t;
15787 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15788 an integer constant or real constant.
15790 TYPE is the type of the result. */
15792 tree
15793 fold_abs_const (tree arg0, tree type)
15795 tree t = NULL_TREE;
15797 switch (TREE_CODE (arg0))
15799 case INTEGER_CST:
15800 /* If the value is unsigned, then the absolute value is
15801 the same as the ordinary value. */
15802 if (TYPE_UNSIGNED (type))
15803 t = arg0;
15804 /* Similarly, if the value is non-negative. */
15805 else if (INT_CST_LT (integer_minus_one_node, arg0))
15806 t = arg0;
15807 /* If the value is negative, then the absolute value is
15808 its negation. */
15809 else
15811 unsigned HOST_WIDE_INT low;
15812 HOST_WIDE_INT high;
15813 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15814 TREE_INT_CST_HIGH (arg0),
15815 &low, &high);
15816 t = force_fit_type_double (type, low, high, -1,
15817 overflow | TREE_OVERFLOW (arg0));
15819 break;
15821 case REAL_CST:
15822 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15823 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15824 else
15825 t = arg0;
15826 break;
15828 default:
15829 gcc_unreachable ();
15832 return t;
15835 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15836 constant. TYPE is the type of the result. */
15838 static tree
15839 fold_not_const (tree arg0, tree type)
15841 tree t = NULL_TREE;
15843 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15845 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15846 ~TREE_INT_CST_HIGH (arg0), 0,
15847 TREE_OVERFLOW (arg0));
15849 return t;
15852 /* Given CODE, a relational operator, the target type, TYPE and two
15853 constant operands OP0 and OP1, return the result of the
15854 relational operation. If the result is not a compile time
15855 constant, then return NULL_TREE. */
15857 static tree
15858 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15860 int result, invert;
15862 /* From here on, the only cases we handle are when the result is
15863 known to be a constant. */
15865 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15867 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15868 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15870 /* Handle the cases where either operand is a NaN. */
15871 if (real_isnan (c0) || real_isnan (c1))
15873 switch (code)
15875 case EQ_EXPR:
15876 case ORDERED_EXPR:
15877 result = 0;
15878 break;
15880 case NE_EXPR:
15881 case UNORDERED_EXPR:
15882 case UNLT_EXPR:
15883 case UNLE_EXPR:
15884 case UNGT_EXPR:
15885 case UNGE_EXPR:
15886 case UNEQ_EXPR:
15887 result = 1;
15888 break;
15890 case LT_EXPR:
15891 case LE_EXPR:
15892 case GT_EXPR:
15893 case GE_EXPR:
15894 case LTGT_EXPR:
15895 if (flag_trapping_math)
15896 return NULL_TREE;
15897 result = 0;
15898 break;
15900 default:
15901 gcc_unreachable ();
15904 return constant_boolean_node (result, type);
15907 return constant_boolean_node (real_compare (code, c0, c1), type);
15910 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15912 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15913 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15914 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15917 /* Handle equality/inequality of complex constants. */
15918 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15920 tree rcond = fold_relational_const (code, type,
15921 TREE_REALPART (op0),
15922 TREE_REALPART (op1));
15923 tree icond = fold_relational_const (code, type,
15924 TREE_IMAGPART (op0),
15925 TREE_IMAGPART (op1));
15926 if (code == EQ_EXPR)
15927 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15928 else if (code == NE_EXPR)
15929 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15930 else
15931 return NULL_TREE;
15934 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15936 To compute GT, swap the arguments and do LT.
15937 To compute GE, do LT and invert the result.
15938 To compute LE, swap the arguments, do LT and invert the result.
15939 To compute NE, do EQ and invert the result.
15941 Therefore, the code below must handle only EQ and LT. */
15943 if (code == LE_EXPR || code == GT_EXPR)
15945 tree tem = op0;
15946 op0 = op1;
15947 op1 = tem;
15948 code = swap_tree_comparison (code);
15951 /* Note that it is safe to invert for real values here because we
15952 have already handled the one case that it matters. */
15954 invert = 0;
15955 if (code == NE_EXPR || code == GE_EXPR)
15957 invert = 1;
15958 code = invert_tree_comparison (code, false);
15961 /* Compute a result for LT or EQ if args permit;
15962 Otherwise return T. */
15963 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15965 if (code == EQ_EXPR)
15966 result = tree_int_cst_equal (op0, op1);
15967 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15968 result = INT_CST_LT_UNSIGNED (op0, op1);
15969 else
15970 result = INT_CST_LT (op0, op1);
15972 else
15973 return NULL_TREE;
15975 if (invert)
15976 result ^= 1;
15977 return constant_boolean_node (result, type);
15980 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15981 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15982 itself. */
15984 tree
15985 fold_build_cleanup_point_expr (tree type, tree expr)
15987 /* If the expression does not have side effects then we don't have to wrap
15988 it with a cleanup point expression. */
15989 if (!TREE_SIDE_EFFECTS (expr))
15990 return expr;
15992 /* If the expression is a return, check to see if the expression inside the
15993 return has no side effects or the right hand side of the modify expression
15994 inside the return. If either don't have side effects set we don't need to
15995 wrap the expression in a cleanup point expression. Note we don't check the
15996 left hand side of the modify because it should always be a return decl. */
15997 if (TREE_CODE (expr) == RETURN_EXPR)
15999 tree op = TREE_OPERAND (expr, 0);
16000 if (!op || !TREE_SIDE_EFFECTS (op))
16001 return expr;
16002 op = TREE_OPERAND (op, 1);
16003 if (!TREE_SIDE_EFFECTS (op))
16004 return expr;
16007 return build1 (CLEANUP_POINT_EXPR, type, expr);
16010 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16011 of an indirection through OP0, or NULL_TREE if no simplification is
16012 possible. */
16014 tree
16015 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16017 tree sub = op0;
16018 tree subtype;
16020 STRIP_NOPS (sub);
16021 subtype = TREE_TYPE (sub);
16022 if (!POINTER_TYPE_P (subtype))
16023 return NULL_TREE;
16025 if (TREE_CODE (sub) == ADDR_EXPR)
16027 tree op = TREE_OPERAND (sub, 0);
16028 tree optype = TREE_TYPE (op);
16029 /* *&CONST_DECL -> to the value of the const decl. */
16030 if (TREE_CODE (op) == CONST_DECL)
16031 return DECL_INITIAL (op);
16032 /* *&p => p; make sure to handle *&"str"[cst] here. */
16033 if (type == optype)
16035 tree fop = fold_read_from_constant_string (op);
16036 if (fop)
16037 return fop;
16038 else
16039 return op;
16041 /* *(foo *)&fooarray => fooarray[0] */
16042 else if (TREE_CODE (optype) == ARRAY_TYPE
16043 && type == TREE_TYPE (optype))
16045 tree type_domain = TYPE_DOMAIN (optype);
16046 tree min_val = size_zero_node;
16047 if (type_domain && TYPE_MIN_VALUE (type_domain))
16048 min_val = TYPE_MIN_VALUE (type_domain);
16049 op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
16050 SET_EXPR_LOCATION (op0, loc);
16051 return op0;
16053 /* *(foo *)&complexfoo => __real__ complexfoo */
16054 else if (TREE_CODE (optype) == COMPLEX_TYPE
16055 && type == TREE_TYPE (optype))
16056 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16057 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16058 else if (TREE_CODE (optype) == VECTOR_TYPE
16059 && type == TREE_TYPE (optype))
16061 tree part_width = TYPE_SIZE (type);
16062 tree index = bitsize_int (0);
16063 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16067 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16068 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16069 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16071 tree op00 = TREE_OPERAND (sub, 0);
16072 tree op01 = TREE_OPERAND (sub, 1);
16073 tree op00type;
16075 STRIP_NOPS (op00);
16076 op00type = TREE_TYPE (op00);
16077 if (TREE_CODE (op00) == ADDR_EXPR
16078 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
16079 && type == TREE_TYPE (TREE_TYPE (op00type)))
16081 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16082 tree part_width = TYPE_SIZE (type);
16083 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16084 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16085 tree index = bitsize_int (indexi);
16087 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
16088 return fold_build3_loc (loc,
16089 BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
16090 part_width, index);
16096 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16097 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16098 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16100 tree op00 = TREE_OPERAND (sub, 0);
16101 tree op01 = TREE_OPERAND (sub, 1);
16102 tree op00type;
16104 STRIP_NOPS (op00);
16105 op00type = TREE_TYPE (op00);
16106 if (TREE_CODE (op00) == ADDR_EXPR
16107 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
16108 && type == TREE_TYPE (TREE_TYPE (op00type)))
16110 tree size = TYPE_SIZE_UNIT (type);
16111 if (tree_int_cst_equal (size, op01))
16112 return fold_build1_loc (loc, IMAGPART_EXPR, type,
16113 TREE_OPERAND (op00, 0));
16117 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16118 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16119 && type == TREE_TYPE (TREE_TYPE (subtype)))
16121 tree type_domain;
16122 tree min_val = size_zero_node;
16123 sub = build_fold_indirect_ref_loc (loc, sub);
16124 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16125 if (type_domain && TYPE_MIN_VALUE (type_domain))
16126 min_val = TYPE_MIN_VALUE (type_domain);
16127 op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
16128 SET_EXPR_LOCATION (op0, loc);
16129 return op0;
16132 return NULL_TREE;
16135 /* Builds an expression for an indirection through T, simplifying some
16136 cases. */
16138 tree
16139 build_fold_indirect_ref_loc (location_t loc, tree t)
16141 tree type = TREE_TYPE (TREE_TYPE (t));
16142 tree sub = fold_indirect_ref_1 (loc, type, t);
16144 if (sub)
16145 return sub;
16147 t = build1 (INDIRECT_REF, type, t);
16148 SET_EXPR_LOCATION (t, loc);
16149 return t;
16152 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16154 tree
16155 fold_indirect_ref_loc (location_t loc, tree t)
16157 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16159 if (sub)
16160 return sub;
16161 else
16162 return t;
16165 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16166 whose result is ignored. The type of the returned tree need not be
16167 the same as the original expression. */
16169 tree
16170 fold_ignored_result (tree t)
16172 if (!TREE_SIDE_EFFECTS (t))
16173 return integer_zero_node;
16175 for (;;)
16176 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16178 case tcc_unary:
16179 t = TREE_OPERAND (t, 0);
16180 break;
16182 case tcc_binary:
16183 case tcc_comparison:
16184 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16185 t = TREE_OPERAND (t, 0);
16186 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16187 t = TREE_OPERAND (t, 1);
16188 else
16189 return t;
16190 break;
16192 case tcc_expression:
16193 switch (TREE_CODE (t))
16195 case COMPOUND_EXPR:
16196 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16197 return t;
16198 t = TREE_OPERAND (t, 0);
16199 break;
16201 case COND_EXPR:
16202 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16203 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16204 return t;
16205 t = TREE_OPERAND (t, 0);
16206 break;
16208 default:
16209 return t;
16211 break;
16213 default:
16214 return t;
16218 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16219 This can only be applied to objects of a sizetype. */
16221 tree
16222 round_up_loc (location_t loc, tree value, int divisor)
16224 tree div = NULL_TREE;
16226 gcc_assert (divisor > 0);
16227 if (divisor == 1)
16228 return value;
16230 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16231 have to do anything. Only do this when we are not given a const,
16232 because in that case, this check is more expensive than just
16233 doing it. */
16234 if (TREE_CODE (value) != INTEGER_CST)
16236 div = build_int_cst (TREE_TYPE (value), divisor);
16238 if (multiple_of_p (TREE_TYPE (value), value, div))
16239 return value;
16242 /* If divisor is a power of two, simplify this to bit manipulation. */
16243 if (divisor == (divisor & -divisor))
16245 if (TREE_CODE (value) == INTEGER_CST)
16247 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
16248 unsigned HOST_WIDE_INT high;
16249 bool overflow_p;
16251 if ((low & (divisor - 1)) == 0)
16252 return value;
16254 overflow_p = TREE_OVERFLOW (value);
16255 high = TREE_INT_CST_HIGH (value);
16256 low &= ~(divisor - 1);
16257 low += divisor;
16258 if (low == 0)
16260 high++;
16261 if (high == 0)
16262 overflow_p = true;
16265 return force_fit_type_double (TREE_TYPE (value), low, high,
16266 -1, overflow_p);
16268 else
16270 tree t;
16272 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16273 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16274 t = build_int_cst (TREE_TYPE (value), -divisor);
16275 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16278 else
16280 if (!div)
16281 div = build_int_cst (TREE_TYPE (value), divisor);
16282 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16283 value = size_binop_loc (loc, MULT_EXPR, value, div);
16286 return value;
16289 /* Likewise, but round down. */
16291 tree
16292 round_down_loc (location_t loc, tree value, int divisor)
16294 tree div = NULL_TREE;
16296 gcc_assert (divisor > 0);
16297 if (divisor == 1)
16298 return value;
16300 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16301 have to do anything. Only do this when we are not given a const,
16302 because in that case, this check is more expensive than just
16303 doing it. */
16304 if (TREE_CODE (value) != INTEGER_CST)
16306 div = build_int_cst (TREE_TYPE (value), divisor);
16308 if (multiple_of_p (TREE_TYPE (value), value, div))
16309 return value;
16312 /* If divisor is a power of two, simplify this to bit manipulation. */
16313 if (divisor == (divisor & -divisor))
16315 tree t;
16317 t = build_int_cst (TREE_TYPE (value), -divisor);
16318 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16320 else
16322 if (!div)
16323 div = build_int_cst (TREE_TYPE (value), divisor);
16324 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16325 value = size_binop_loc (loc, MULT_EXPR, value, div);
16328 return value;
16331 /* Returns the pointer to the base of the object addressed by EXP and
16332 extracts the information about the offset of the access, storing it
16333 to PBITPOS and POFFSET. */
16335 static tree
16336 split_address_to_core_and_offset (tree exp,
16337 HOST_WIDE_INT *pbitpos, tree *poffset)
16339 tree core;
16340 enum machine_mode mode;
16341 int unsignedp, volatilep;
16342 HOST_WIDE_INT bitsize;
16343 location_t loc = EXPR_LOCATION (exp);
16345 if (TREE_CODE (exp) == ADDR_EXPR)
16347 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16348 poffset, &mode, &unsignedp, &volatilep,
16349 false);
16350 core = build_fold_addr_expr_loc (loc, core);
16352 else
16354 core = exp;
16355 *pbitpos = 0;
16356 *poffset = NULL_TREE;
16359 return core;
16362 /* Returns true if addresses of E1 and E2 differ by a constant, false
16363 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16365 bool
16366 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16368 tree core1, core2;
16369 HOST_WIDE_INT bitpos1, bitpos2;
16370 tree toffset1, toffset2, tdiff, type;
16372 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16373 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16375 if (bitpos1 % BITS_PER_UNIT != 0
16376 || bitpos2 % BITS_PER_UNIT != 0
16377 || !operand_equal_p (core1, core2, 0))
16378 return false;
16380 if (toffset1 && toffset2)
16382 type = TREE_TYPE (toffset1);
16383 if (type != TREE_TYPE (toffset2))
16384 toffset2 = fold_convert (type, toffset2);
16386 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16387 if (!cst_and_fits_in_hwi (tdiff))
16388 return false;
16390 *diff = int_cst_value (tdiff);
16392 else if (toffset1 || toffset2)
16394 /* If only one of the offsets is non-constant, the difference cannot
16395 be a constant. */
16396 return false;
16398 else
16399 *diff = 0;
16401 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16402 return true;
16405 /* Simplify the floating point expression EXP when the sign of the
16406 result is not significant. Return NULL_TREE if no simplification
16407 is possible. */
16409 tree
16410 fold_strip_sign_ops (tree exp)
16412 tree arg0, arg1;
16413 location_t loc = EXPR_LOCATION (exp);
16415 switch (TREE_CODE (exp))
16417 case ABS_EXPR:
16418 case NEGATE_EXPR:
16419 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16420 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16422 case MULT_EXPR:
16423 case RDIV_EXPR:
16424 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16425 return NULL_TREE;
16426 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16427 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16428 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16429 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16430 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16431 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16432 break;
16434 case COMPOUND_EXPR:
16435 arg0 = TREE_OPERAND (exp, 0);
16436 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16437 if (arg1)
16438 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16439 break;
16441 case COND_EXPR:
16442 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16443 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16444 if (arg0 || arg1)
16445 return fold_build3_loc (loc,
16446 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16447 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16448 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16449 break;
16451 case CALL_EXPR:
16453 const enum built_in_function fcode = builtin_mathfn_code (exp);
16454 switch (fcode)
16456 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16457 /* Strip copysign function call, return the 1st argument. */
16458 arg0 = CALL_EXPR_ARG (exp, 0);
16459 arg1 = CALL_EXPR_ARG (exp, 1);
16460 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16462 default:
16463 /* Strip sign ops from the argument of "odd" math functions. */
16464 if (negate_mathfn_p (fcode))
16466 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16467 if (arg0)
16468 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16470 break;
16473 break;
16475 default:
16476 break;
16478 return NULL_TREE;