* gcc.target/x86_64/abi/avx/asm-support.S (snapshot_ret): Preserve
[official-gcc/alias-decl.git] / gcc / fold-const.c
blobe1126219689c4bc1cac7dacb4634e183ef942b2a
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "toplev.h"
63 #include "intl.h"
64 #include "ggc.h"
65 #include "hashtab.h"
66 #include "langhooks.h"
67 #include "md5.h"
68 #include "gimple.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
71 otherwise. */
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
78 COMPCODE_FALSE = 0,
79 COMPCODE_LT = 1,
80 COMPCODE_EQ = 2,
81 COMPCODE_LE = 3,
82 COMPCODE_GT = 4,
83 COMPCODE_LTGT = 5,
84 COMPCODE_GE = 6,
85 COMPCODE_ORD = 7,
86 COMPCODE_UNORD = 8,
87 COMPCODE_UNLT = 9,
88 COMPCODE_UNEQ = 10,
89 COMPCODE_UNLE = 11,
90 COMPCODE_UNGT = 12,
91 COMPCODE_NE = 13,
92 COMPCODE_UNGE = 14,
93 COMPCODE_TRUE = 15
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
110 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (location_t, tree, tree,
112 HOST_WIDE_INT, HOST_WIDE_INT, int);
113 static tree optimize_bit_field_compare (location_t, enum tree_code,
114 tree, tree, tree);
115 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
116 HOST_WIDE_INT *,
117 enum machine_mode *, int *, int *,
118 tree *, tree *);
119 static int all_ones_mask_p (const_tree, int);
120 static tree sign_bit_p (tree, const_tree);
121 static int simple_operand_p (const_tree);
122 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
123 static tree range_predecessor (tree);
124 static tree range_successor (tree);
125 extern tree make_range (tree, int *, tree *, tree *, bool *);
126 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
127 tree, tree);
128 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
129 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
130 static tree unextend (tree, int, int, tree);
131 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
132 static tree optimize_minmax_comparison (location_t, enum tree_code,
133 tree, tree, tree);
134 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
135 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
136 static tree fold_binary_op_with_conditional_arg (location_t,
137 enum tree_code, tree,
138 tree, tree,
139 tree, tree, int);
140 static tree fold_mathfn_compare (location_t,
141 enum built_in_function, enum tree_code,
142 tree, tree, tree);
143 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
144 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
145 static bool reorder_operands_p (const_tree, const_tree);
146 static tree fold_negate_const (tree, tree);
147 static tree fold_not_const (tree, tree);
148 static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 static tree fold_convert_const (enum tree_code, tree, tree);
152 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
153 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
154 and SUM1. Then this yields nonzero if overflow occurred during the
155 addition.
157 Overflow occurs if A and B have the same sign, but A and SUM differ in
158 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
159 sign. */
160 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
162 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
163 We do that by representing the two-word integer in 4 words, with only
164 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
165 number. The value of the word is LOWPART + HIGHPART * BASE. */
167 #define LOWPART(x) \
168 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
169 #define HIGHPART(x) \
170 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
171 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
173 /* Unpack a two-word integer into 4 words.
174 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
175 WORDS points to the array of HOST_WIDE_INTs. */
177 static void
178 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
180 words[0] = LOWPART (low);
181 words[1] = HIGHPART (low);
182 words[2] = LOWPART (hi);
183 words[3] = HIGHPART (hi);
186 /* Pack an array of 4 words into a two-word integer.
187 WORDS points to the array of words.
188 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
190 static void
191 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
192 HOST_WIDE_INT *hi)
194 *low = words[0] + words[1] * BASE;
195 *hi = words[2] + words[3] * BASE;
198 /* Force the double-word integer L1, H1 to be within the range of the
199 integer type TYPE. Stores the properly truncated and sign-extended
200 double-word integer in *LV, *HV. Returns true if the operation
201 overflows, that is, argument and result are different. */
204 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
205 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
207 unsigned HOST_WIDE_INT low0 = l1;
208 HOST_WIDE_INT high0 = h1;
209 unsigned int prec = TYPE_PRECISION (type);
210 int sign_extended_type;
212 /* Size types *are* sign extended. */
213 sign_extended_type = (!TYPE_UNSIGNED (type)
214 || (TREE_CODE (type) == INTEGER_TYPE
215 && TYPE_IS_SIZETYPE (type)));
217 /* First clear all bits that are beyond the type's precision. */
218 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
220 else if (prec > HOST_BITS_PER_WIDE_INT)
221 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
222 else
224 h1 = 0;
225 if (prec < HOST_BITS_PER_WIDE_INT)
226 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
229 /* Then do sign extension if necessary. */
230 if (!sign_extended_type)
231 /* No sign extension */;
232 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
233 /* Correct width already. */;
234 else if (prec > HOST_BITS_PER_WIDE_INT)
236 /* Sign extend top half? */
237 if (h1 & ((unsigned HOST_WIDE_INT)1
238 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
239 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
241 else if (prec == HOST_BITS_PER_WIDE_INT)
243 if ((HOST_WIDE_INT)l1 < 0)
244 h1 = -1;
246 else
248 /* Sign extend bottom half? */
249 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
251 h1 = -1;
252 l1 |= (HOST_WIDE_INT)(-1) << prec;
256 *lv = l1;
257 *hv = h1;
259 /* If the value didn't fit, signal overflow. */
260 return l1 != low0 || h1 != high0;
263 /* We force the double-int HIGH:LOW to the range of the type TYPE by
264 sign or zero extending it.
265 OVERFLOWABLE indicates if we are interested
266 in overflow of the value, when >0 we are only interested in signed
267 overflow, for <0 we are interested in any overflow. OVERFLOWED
268 indicates whether overflow has already occurred. CONST_OVERFLOWED
269 indicates whether constant overflow has already occurred. We force
270 T's value to be within range of T's type (by setting to 0 or 1 all
271 the bits outside the type's range). We set TREE_OVERFLOWED if,
272 OVERFLOWED is nonzero,
273 or OVERFLOWABLE is >0 and signed overflow occurs
274 or OVERFLOWABLE is <0 and any overflow occurs
275 We return a new tree node for the extended double-int. The node
276 is shared if no overflow flags are set. */
278 tree
279 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
280 HOST_WIDE_INT high, int overflowable,
281 bool overflowed)
283 int sign_extended_type;
284 bool overflow;
286 /* Size types *are* sign extended. */
287 sign_extended_type = (!TYPE_UNSIGNED (type)
288 || (TREE_CODE (type) == INTEGER_TYPE
289 && TYPE_IS_SIZETYPE (type)));
291 overflow = fit_double_type (low, high, &low, &high, type);
293 /* If we need to set overflow flags, return a new unshared node. */
294 if (overflowed || overflow)
296 if (overflowed
297 || overflowable < 0
298 || (overflowable > 0 && sign_extended_type))
300 tree t = make_node (INTEGER_CST);
301 TREE_INT_CST_LOW (t) = low;
302 TREE_INT_CST_HIGH (t) = high;
303 TREE_TYPE (t) = type;
304 TREE_OVERFLOW (t) = 1;
305 return t;
309 /* Else build a shared node. */
310 return build_int_cst_wide (type, low, high);
313 /* Add two doubleword integers with doubleword result.
314 Return nonzero if the operation overflows according to UNSIGNED_P.
315 Each argument is given as two `HOST_WIDE_INT' pieces.
316 One argument is L1 and H1; the other, L2 and H2.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
322 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
323 bool unsigned_p)
325 unsigned HOST_WIDE_INT l;
326 HOST_WIDE_INT h;
328 l = l1 + l2;
329 h = h1 + h2 + (l < l1);
331 *lv = l;
332 *hv = h;
334 if (unsigned_p)
335 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
336 else
337 return OVERFLOW_SUM_SIGN (h1, h2, h);
340 /* Negate a doubleword integer with doubleword result.
341 Return nonzero if the operation overflows, assuming it's signed.
342 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
343 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
346 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
347 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
349 if (l1 == 0)
351 *lv = 0;
352 *hv = - h1;
353 return (*hv & h1) < 0;
355 else
357 *lv = -l1;
358 *hv = ~h1;
359 return 0;
363 /* Multiply two doubleword integers with doubleword result.
364 Return nonzero if the operation overflows according to UNSIGNED_P.
365 Each argument is given as two `HOST_WIDE_INT' pieces.
366 One argument is L1 and H1; the other, L2 and H2.
367 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
370 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
371 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
372 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
373 bool unsigned_p)
375 HOST_WIDE_INT arg1[4];
376 HOST_WIDE_INT arg2[4];
377 HOST_WIDE_INT prod[4 * 2];
378 unsigned HOST_WIDE_INT carry;
379 int i, j, k;
380 unsigned HOST_WIDE_INT toplow, neglow;
381 HOST_WIDE_INT tophigh, neghigh;
383 encode (arg1, l1, h1);
384 encode (arg2, l2, h2);
386 memset (prod, 0, sizeof prod);
388 for (i = 0; i < 4; i++)
390 carry = 0;
391 for (j = 0; j < 4; j++)
393 k = i + j;
394 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
395 carry += arg1[i] * arg2[j];
396 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
397 carry += prod[k];
398 prod[k] = LOWPART (carry);
399 carry = HIGHPART (carry);
401 prod[i + 4] = carry;
404 decode (prod, lv, hv);
405 decode (prod + 4, &toplow, &tophigh);
407 /* Unsigned overflow is immediate. */
408 if (unsigned_p)
409 return (toplow | tophigh) != 0;
411 /* Check for signed overflow by calculating the signed representation of the
412 top half of the result; it should agree with the low half's sign bit. */
413 if (h1 < 0)
415 neg_double (l2, h2, &neglow, &neghigh);
416 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
418 if (h2 < 0)
420 neg_double (l1, h1, &neglow, &neghigh);
421 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
423 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
426 /* Shift the doubleword integer in L1, H1 left by COUNT places
427 keeping only PREC bits of result.
428 Shift right if COUNT is negative.
429 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
430 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
432 void
433 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
434 HOST_WIDE_INT count, unsigned int prec,
435 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
437 unsigned HOST_WIDE_INT signmask;
439 if (count < 0)
441 rshift_double (l1, h1, -count, prec, lv, hv, arith);
442 return;
445 if (SHIFT_COUNT_TRUNCATED)
446 count %= prec;
448 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
450 /* Shifting by the host word size is undefined according to the
451 ANSI standard, so we must handle this as a special case. */
452 *hv = 0;
453 *lv = 0;
455 else if (count >= HOST_BITS_PER_WIDE_INT)
457 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
458 *lv = 0;
460 else
462 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
463 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
464 *lv = l1 << count;
467 /* Sign extend all bits that are beyond the precision. */
469 signmask = -((prec > HOST_BITS_PER_WIDE_INT
470 ? ((unsigned HOST_WIDE_INT) *hv
471 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
472 : (*lv >> (prec - 1))) & 1);
474 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
476 else if (prec >= HOST_BITS_PER_WIDE_INT)
478 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
479 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
481 else
483 *hv = signmask;
484 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
485 *lv |= signmask << prec;
489 /* Shift the doubleword integer in L1, H1 right by COUNT places
490 keeping only PREC bits of result. COUNT must be positive.
491 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
492 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
494 void
495 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
496 HOST_WIDE_INT count, unsigned int prec,
497 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
498 int arith)
500 unsigned HOST_WIDE_INT signmask;
502 signmask = (arith
503 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
504 : 0);
506 if (SHIFT_COUNT_TRUNCATED)
507 count %= prec;
509 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
511 /* Shifting by the host word size is undefined according to the
512 ANSI standard, so we must handle this as a special case. */
513 *hv = 0;
514 *lv = 0;
516 else if (count >= HOST_BITS_PER_WIDE_INT)
518 *hv = 0;
519 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
521 else
523 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
524 *lv = ((l1 >> count)
525 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
528 /* Zero / sign extend all bits that are beyond the precision. */
530 if (count >= (HOST_WIDE_INT)prec)
532 *hv = signmask;
533 *lv = signmask;
535 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
537 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
539 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
540 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
542 else
544 *hv = signmask;
545 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
546 *lv |= signmask << (prec - count);
550 /* Rotate the doubleword integer in L1, H1 left by COUNT places
551 keeping only PREC bits of result.
552 Rotate right if COUNT is negative.
553 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
555 void
556 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
557 HOST_WIDE_INT count, unsigned int prec,
558 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
560 unsigned HOST_WIDE_INT s1l, s2l;
561 HOST_WIDE_INT s1h, s2h;
563 count %= prec;
564 if (count < 0)
565 count += prec;
567 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
568 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
569 *lv = s1l | s2l;
570 *hv = s1h | s2h;
573 /* Rotate the doubleword integer in L1, H1 left by COUNT places
574 keeping only PREC bits of result. COUNT must be positive.
575 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
577 void
578 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
579 HOST_WIDE_INT count, unsigned int prec,
580 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
582 unsigned HOST_WIDE_INT s1l, s2l;
583 HOST_WIDE_INT s1h, s2h;
585 count %= prec;
586 if (count < 0)
587 count += prec;
589 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
590 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
591 *lv = s1l | s2l;
592 *hv = s1h | s2h;
595 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
596 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
597 CODE is a tree code for a kind of division, one of
598 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
599 or EXACT_DIV_EXPR
600 It controls how the quotient is rounded to an integer.
601 Return nonzero if the operation overflows.
602 UNS nonzero says do unsigned division. */
605 div_and_round_double (enum tree_code code, int uns,
606 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
607 HOST_WIDE_INT hnum_orig,
608 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
609 HOST_WIDE_INT hden_orig,
610 unsigned HOST_WIDE_INT *lquo,
611 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
612 HOST_WIDE_INT *hrem)
614 int quo_neg = 0;
615 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
616 HOST_WIDE_INT den[4], quo[4];
617 int i, j;
618 unsigned HOST_WIDE_INT work;
619 unsigned HOST_WIDE_INT carry = 0;
620 unsigned HOST_WIDE_INT lnum = lnum_orig;
621 HOST_WIDE_INT hnum = hnum_orig;
622 unsigned HOST_WIDE_INT lden = lden_orig;
623 HOST_WIDE_INT hden = hden_orig;
624 int overflow = 0;
626 if (hden == 0 && lden == 0)
627 overflow = 1, lden = 1;
629 /* Calculate quotient sign and convert operands to unsigned. */
630 if (!uns)
632 if (hnum < 0)
634 quo_neg = ~ quo_neg;
635 /* (minimum integer) / (-1) is the only overflow case. */
636 if (neg_double (lnum, hnum, &lnum, &hnum)
637 && ((HOST_WIDE_INT) lden & hden) == -1)
638 overflow = 1;
640 if (hden < 0)
642 quo_neg = ~ quo_neg;
643 neg_double (lden, hden, &lden, &hden);
647 if (hnum == 0 && hden == 0)
648 { /* single precision */
649 *hquo = *hrem = 0;
650 /* This unsigned division rounds toward zero. */
651 *lquo = lnum / lden;
652 goto finish_up;
655 if (hnum == 0)
656 { /* trivial case: dividend < divisor */
657 /* hden != 0 already checked. */
658 *hquo = *lquo = 0;
659 *hrem = hnum;
660 *lrem = lnum;
661 goto finish_up;
664 memset (quo, 0, sizeof quo);
666 memset (num, 0, sizeof num); /* to zero 9th element */
667 memset (den, 0, sizeof den);
669 encode (num, lnum, hnum);
670 encode (den, lden, hden);
672 /* Special code for when the divisor < BASE. */
673 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
675 /* hnum != 0 already checked. */
676 for (i = 4 - 1; i >= 0; i--)
678 work = num[i] + carry * BASE;
679 quo[i] = work / lden;
680 carry = work % lden;
683 else
685 /* Full double precision division,
686 with thanks to Don Knuth's "Seminumerical Algorithms". */
687 int num_hi_sig, den_hi_sig;
688 unsigned HOST_WIDE_INT quo_est, scale;
690 /* Find the highest nonzero divisor digit. */
691 for (i = 4 - 1;; i--)
692 if (den[i] != 0)
694 den_hi_sig = i;
695 break;
698 /* Insure that the first digit of the divisor is at least BASE/2.
699 This is required by the quotient digit estimation algorithm. */
701 scale = BASE / (den[den_hi_sig] + 1);
702 if (scale > 1)
703 { /* scale divisor and dividend */
704 carry = 0;
705 for (i = 0; i <= 4 - 1; i++)
707 work = (num[i] * scale) + carry;
708 num[i] = LOWPART (work);
709 carry = HIGHPART (work);
712 num[4] = carry;
713 carry = 0;
714 for (i = 0; i <= 4 - 1; i++)
716 work = (den[i] * scale) + carry;
717 den[i] = LOWPART (work);
718 carry = HIGHPART (work);
719 if (den[i] != 0) den_hi_sig = i;
723 num_hi_sig = 4;
725 /* Main loop */
726 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
728 /* Guess the next quotient digit, quo_est, by dividing the first
729 two remaining dividend digits by the high order quotient digit.
730 quo_est is never low and is at most 2 high. */
731 unsigned HOST_WIDE_INT tmp;
733 num_hi_sig = i + den_hi_sig + 1;
734 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
735 if (num[num_hi_sig] != den[den_hi_sig])
736 quo_est = work / den[den_hi_sig];
737 else
738 quo_est = BASE - 1;
740 /* Refine quo_est so it's usually correct, and at most one high. */
741 tmp = work - quo_est * den[den_hi_sig];
742 if (tmp < BASE
743 && (den[den_hi_sig - 1] * quo_est
744 > (tmp * BASE + num[num_hi_sig - 2])))
745 quo_est--;
747 /* Try QUO_EST as the quotient digit, by multiplying the
748 divisor by QUO_EST and subtracting from the remaining dividend.
749 Keep in mind that QUO_EST is the I - 1st digit. */
751 carry = 0;
752 for (j = 0; j <= den_hi_sig; j++)
754 work = quo_est * den[j] + carry;
755 carry = HIGHPART (work);
756 work = num[i + j] - LOWPART (work);
757 num[i + j] = LOWPART (work);
758 carry += HIGHPART (work) != 0;
761 /* If quo_est was high by one, then num[i] went negative and
762 we need to correct things. */
763 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
765 quo_est--;
766 carry = 0; /* add divisor back in */
767 for (j = 0; j <= den_hi_sig; j++)
769 work = num[i + j] + den[j] + carry;
770 carry = HIGHPART (work);
771 num[i + j] = LOWPART (work);
774 num [num_hi_sig] += carry;
777 /* Store the quotient digit. */
778 quo[i] = quo_est;
782 decode (quo, lquo, hquo);
784 finish_up:
785 /* If result is negative, make it so. */
786 if (quo_neg)
787 neg_double (*lquo, *hquo, lquo, hquo);
789 /* Compute trial remainder: rem = num - (quo * den) */
790 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
791 neg_double (*lrem, *hrem, lrem, hrem);
792 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
794 switch (code)
796 case TRUNC_DIV_EXPR:
797 case TRUNC_MOD_EXPR: /* round toward zero */
798 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
799 return overflow;
801 case FLOOR_DIV_EXPR:
802 case FLOOR_MOD_EXPR: /* round toward negative infinity */
803 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
805 /* quo = quo - 1; */
806 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
807 lquo, hquo);
809 else
810 return overflow;
811 break;
813 case CEIL_DIV_EXPR:
814 case CEIL_MOD_EXPR: /* round toward positive infinity */
815 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
817 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
818 lquo, hquo);
820 else
821 return overflow;
822 break;
824 case ROUND_DIV_EXPR:
825 case ROUND_MOD_EXPR: /* round to closest integer */
827 unsigned HOST_WIDE_INT labs_rem = *lrem;
828 HOST_WIDE_INT habs_rem = *hrem;
829 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
830 HOST_WIDE_INT habs_den = hden, htwice;
832 /* Get absolute values. */
833 if (*hrem < 0)
834 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
835 if (hden < 0)
836 neg_double (lden, hden, &labs_den, &habs_den);
838 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
839 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
840 labs_rem, habs_rem, &ltwice, &htwice);
842 if (((unsigned HOST_WIDE_INT) habs_den
843 < (unsigned HOST_WIDE_INT) htwice)
844 || (((unsigned HOST_WIDE_INT) habs_den
845 == (unsigned HOST_WIDE_INT) htwice)
846 && (labs_den <= ltwice)))
848 if (*hquo < 0)
849 /* quo = quo - 1; */
850 add_double (*lquo, *hquo,
851 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
852 else
853 /* quo = quo + 1; */
854 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
855 lquo, hquo);
857 else
858 return overflow;
860 break;
862 default:
863 gcc_unreachable ();
866 /* Compute true remainder: rem = num - (quo * den) */
867 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
868 neg_double (*lrem, *hrem, lrem, hrem);
869 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
870 return overflow;
873 /* If ARG2 divides ARG1 with zero remainder, carries out the division
874 of type CODE and returns the quotient.
875 Otherwise returns NULL_TREE. */
877 tree
878 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
880 unsigned HOST_WIDE_INT int1l, int2l;
881 HOST_WIDE_INT int1h, int2h;
882 unsigned HOST_WIDE_INT quol, reml;
883 HOST_WIDE_INT quoh, remh;
884 int uns;
886 /* The sign of the division is according to operand two, that
887 does the correct thing for POINTER_PLUS_EXPR where we want
888 a signed division. */
889 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
890 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
891 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
892 uns = false;
894 int1l = TREE_INT_CST_LOW (arg1);
895 int1h = TREE_INT_CST_HIGH (arg1);
896 int2l = TREE_INT_CST_LOW (arg2);
897 int2h = TREE_INT_CST_HIGH (arg2);
899 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
900 &quol, &quoh, &reml, &remh);
901 if (remh != 0 || reml != 0)
902 return NULL_TREE;
904 return build_int_cst_wide (TREE_TYPE (arg1), quol, quoh);
907 /* This is nonzero if we should defer warnings about undefined
908 overflow. This facility exists because these warnings are a
909 special case. The code to estimate loop iterations does not want
910 to issue any warnings, since it works with expressions which do not
911 occur in user code. Various bits of cleanup code call fold(), but
912 only use the result if it has certain characteristics (e.g., is a
913 constant); that code only wants to issue a warning if the result is
914 used. */
916 static int fold_deferring_overflow_warnings;
918 /* If a warning about undefined overflow is deferred, this is the
919 warning. Note that this may cause us to turn two warnings into
920 one, but that is fine since it is sufficient to only give one
921 warning per expression. */
923 static const char* fold_deferred_overflow_warning;
925 /* If a warning about undefined overflow is deferred, this is the
926 level at which the warning should be emitted. */
928 static enum warn_strict_overflow_code fold_deferred_overflow_code;
930 /* Start deferring overflow warnings. We could use a stack here to
931 permit nested calls, but at present it is not necessary. */
933 void
934 fold_defer_overflow_warnings (void)
936 ++fold_deferring_overflow_warnings;
939 /* Stop deferring overflow warnings. If there is a pending warning,
940 and ISSUE is true, then issue the warning if appropriate. STMT is
941 the statement with which the warning should be associated (used for
942 location information); STMT may be NULL. CODE is the level of the
943 warning--a warn_strict_overflow_code value. This function will use
944 the smaller of CODE and the deferred code when deciding whether to
945 issue the warning. CODE may be zero to mean to always use the
946 deferred code. */
948 void
949 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
951 const char *warnmsg;
952 location_t locus;
954 gcc_assert (fold_deferring_overflow_warnings > 0);
955 --fold_deferring_overflow_warnings;
956 if (fold_deferring_overflow_warnings > 0)
958 if (fold_deferred_overflow_warning != NULL
959 && code != 0
960 && code < (int) fold_deferred_overflow_code)
961 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
962 return;
965 warnmsg = fold_deferred_overflow_warning;
966 fold_deferred_overflow_warning = NULL;
968 if (!issue || warnmsg == NULL)
969 return;
971 if (gimple_no_warning_p (stmt))
972 return;
974 /* Use the smallest code level when deciding to issue the
975 warning. */
976 if (code == 0 || code > (int) fold_deferred_overflow_code)
977 code = fold_deferred_overflow_code;
979 if (!issue_strict_overflow_warning (code))
980 return;
982 if (stmt == NULL)
983 locus = input_location;
984 else
985 locus = gimple_location (stmt);
986 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
989 /* Stop deferring overflow warnings, ignoring any deferred
990 warnings. */
992 void
993 fold_undefer_and_ignore_overflow_warnings (void)
995 fold_undefer_overflow_warnings (false, NULL, 0);
998 /* Whether we are deferring overflow warnings. */
1000 bool
1001 fold_deferring_overflow_warnings_p (void)
1003 return fold_deferring_overflow_warnings > 0;
1006 /* This is called when we fold something based on the fact that signed
1007 overflow is undefined. */
1009 static void
1010 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1012 if (fold_deferring_overflow_warnings > 0)
1014 if (fold_deferred_overflow_warning == NULL
1015 || wc < fold_deferred_overflow_code)
1017 fold_deferred_overflow_warning = gmsgid;
1018 fold_deferred_overflow_code = wc;
1021 else if (issue_strict_overflow_warning (wc))
1022 warning (OPT_Wstrict_overflow, gmsgid);
1025 /* Return true if the built-in mathematical function specified by CODE
1026 is odd, i.e. -f(x) == f(-x). */
1028 static bool
1029 negate_mathfn_p (enum built_in_function code)
1031 switch (code)
1033 CASE_FLT_FN (BUILT_IN_ASIN):
1034 CASE_FLT_FN (BUILT_IN_ASINH):
1035 CASE_FLT_FN (BUILT_IN_ATAN):
1036 CASE_FLT_FN (BUILT_IN_ATANH):
1037 CASE_FLT_FN (BUILT_IN_CASIN):
1038 CASE_FLT_FN (BUILT_IN_CASINH):
1039 CASE_FLT_FN (BUILT_IN_CATAN):
1040 CASE_FLT_FN (BUILT_IN_CATANH):
1041 CASE_FLT_FN (BUILT_IN_CBRT):
1042 CASE_FLT_FN (BUILT_IN_CPROJ):
1043 CASE_FLT_FN (BUILT_IN_CSIN):
1044 CASE_FLT_FN (BUILT_IN_CSINH):
1045 CASE_FLT_FN (BUILT_IN_CTAN):
1046 CASE_FLT_FN (BUILT_IN_CTANH):
1047 CASE_FLT_FN (BUILT_IN_ERF):
1048 CASE_FLT_FN (BUILT_IN_LLROUND):
1049 CASE_FLT_FN (BUILT_IN_LROUND):
1050 CASE_FLT_FN (BUILT_IN_ROUND):
1051 CASE_FLT_FN (BUILT_IN_SIN):
1052 CASE_FLT_FN (BUILT_IN_SINH):
1053 CASE_FLT_FN (BUILT_IN_TAN):
1054 CASE_FLT_FN (BUILT_IN_TANH):
1055 CASE_FLT_FN (BUILT_IN_TRUNC):
1056 return true;
1058 CASE_FLT_FN (BUILT_IN_LLRINT):
1059 CASE_FLT_FN (BUILT_IN_LRINT):
1060 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1061 CASE_FLT_FN (BUILT_IN_RINT):
1062 return !flag_rounding_math;
1064 default:
1065 break;
1067 return false;
1070 /* Check whether we may negate an integer constant T without causing
1071 overflow. */
1073 bool
1074 may_negate_without_overflow_p (const_tree t)
1076 unsigned HOST_WIDE_INT val;
1077 unsigned int prec;
1078 tree type;
1080 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1082 type = TREE_TYPE (t);
1083 if (TYPE_UNSIGNED (type))
1084 return false;
1086 prec = TYPE_PRECISION (type);
1087 if (prec > HOST_BITS_PER_WIDE_INT)
1089 if (TREE_INT_CST_LOW (t) != 0)
1090 return true;
1091 prec -= HOST_BITS_PER_WIDE_INT;
1092 val = TREE_INT_CST_HIGH (t);
1094 else
1095 val = TREE_INT_CST_LOW (t);
1096 if (prec < HOST_BITS_PER_WIDE_INT)
1097 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1098 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1101 /* Determine whether an expression T can be cheaply negated using
1102 the function negate_expr without introducing undefined overflow. */
1104 static bool
1105 negate_expr_p (tree t)
1107 tree type;
1109 if (t == 0)
1110 return false;
1112 type = TREE_TYPE (t);
1114 STRIP_SIGN_NOPS (t);
1115 switch (TREE_CODE (t))
1117 case INTEGER_CST:
1118 if (TYPE_OVERFLOW_WRAPS (type))
1119 return true;
1121 /* Check that -CST will not overflow type. */
1122 return may_negate_without_overflow_p (t);
1123 case BIT_NOT_EXPR:
1124 return (INTEGRAL_TYPE_P (type)
1125 && TYPE_OVERFLOW_WRAPS (type));
1127 case FIXED_CST:
1128 case REAL_CST:
1129 case NEGATE_EXPR:
1130 return true;
1132 case COMPLEX_CST:
1133 return negate_expr_p (TREE_REALPART (t))
1134 && negate_expr_p (TREE_IMAGPART (t));
1136 case COMPLEX_EXPR:
1137 return negate_expr_p (TREE_OPERAND (t, 0))
1138 && negate_expr_p (TREE_OPERAND (t, 1));
1140 case CONJ_EXPR:
1141 return negate_expr_p (TREE_OPERAND (t, 0));
1143 case PLUS_EXPR:
1144 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1145 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1146 return false;
1147 /* -(A + B) -> (-B) - A. */
1148 if (negate_expr_p (TREE_OPERAND (t, 1))
1149 && reorder_operands_p (TREE_OPERAND (t, 0),
1150 TREE_OPERAND (t, 1)))
1151 return true;
1152 /* -(A + B) -> (-A) - B. */
1153 return negate_expr_p (TREE_OPERAND (t, 0));
1155 case MINUS_EXPR:
1156 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1157 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1158 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1159 && reorder_operands_p (TREE_OPERAND (t, 0),
1160 TREE_OPERAND (t, 1));
1162 case MULT_EXPR:
1163 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1164 break;
1166 /* Fall through. */
1168 case RDIV_EXPR:
1169 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1170 return negate_expr_p (TREE_OPERAND (t, 1))
1171 || negate_expr_p (TREE_OPERAND (t, 0));
1172 break;
1174 case TRUNC_DIV_EXPR:
1175 case ROUND_DIV_EXPR:
1176 case FLOOR_DIV_EXPR:
1177 case CEIL_DIV_EXPR:
1178 case EXACT_DIV_EXPR:
1179 /* In general we can't negate A / B, because if A is INT_MIN and
1180 B is 1, we may turn this into INT_MIN / -1 which is undefined
1181 and actually traps on some architectures. But if overflow is
1182 undefined, we can negate, because - (INT_MIN / 1) is an
1183 overflow. */
1184 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1185 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1186 break;
1187 return negate_expr_p (TREE_OPERAND (t, 1))
1188 || negate_expr_p (TREE_OPERAND (t, 0));
1190 case NOP_EXPR:
1191 /* Negate -((double)float) as (double)(-float). */
1192 if (TREE_CODE (type) == REAL_TYPE)
1194 tree tem = strip_float_extensions (t);
1195 if (tem != t)
1196 return negate_expr_p (tem);
1198 break;
1200 case CALL_EXPR:
1201 /* Negate -f(x) as f(-x). */
1202 if (negate_mathfn_p (builtin_mathfn_code (t)))
1203 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1204 break;
1206 case RSHIFT_EXPR:
1207 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1208 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1210 tree op1 = TREE_OPERAND (t, 1);
1211 if (TREE_INT_CST_HIGH (op1) == 0
1212 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1213 == TREE_INT_CST_LOW (op1))
1214 return true;
1216 break;
1218 default:
1219 break;
1221 return false;
1224 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1225 simplification is possible.
1226 If negate_expr_p would return true for T, NULL_TREE will never be
1227 returned. */
1229 static tree
1230 fold_negate_expr (location_t loc, tree t)
1232 tree type = TREE_TYPE (t);
1233 tree tem;
1235 switch (TREE_CODE (t))
1237 /* Convert - (~A) to A + 1. */
1238 case BIT_NOT_EXPR:
1239 if (INTEGRAL_TYPE_P (type))
1240 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
1241 build_int_cst (type, 1));
1242 break;
1244 case INTEGER_CST:
1245 tem = fold_negate_const (t, type);
1246 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1247 || !TYPE_OVERFLOW_TRAPS (type))
1248 return tem;
1249 break;
1251 case REAL_CST:
1252 tem = fold_negate_const (t, type);
1253 /* Two's complement FP formats, such as c4x, may overflow. */
1254 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1255 return tem;
1256 break;
1258 case FIXED_CST:
1259 tem = fold_negate_const (t, type);
1260 return tem;
1262 case COMPLEX_CST:
1264 tree rpart = negate_expr (TREE_REALPART (t));
1265 tree ipart = negate_expr (TREE_IMAGPART (t));
1267 if ((TREE_CODE (rpart) == REAL_CST
1268 && TREE_CODE (ipart) == REAL_CST)
1269 || (TREE_CODE (rpart) == INTEGER_CST
1270 && TREE_CODE (ipart) == INTEGER_CST))
1271 return build_complex (type, rpart, ipart);
1273 break;
1275 case COMPLEX_EXPR:
1276 if (negate_expr_p (t))
1277 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1278 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
1279 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
1280 break;
1282 case CONJ_EXPR:
1283 if (negate_expr_p (t))
1284 return fold_build1_loc (loc, CONJ_EXPR, type,
1285 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
1286 break;
1288 case NEGATE_EXPR:
1289 return TREE_OPERAND (t, 0);
1291 case PLUS_EXPR:
1292 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1293 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1295 /* -(A + B) -> (-B) - A. */
1296 if (negate_expr_p (TREE_OPERAND (t, 1))
1297 && reorder_operands_p (TREE_OPERAND (t, 0),
1298 TREE_OPERAND (t, 1)))
1300 tem = negate_expr (TREE_OPERAND (t, 1));
1301 return fold_build2_loc (loc, MINUS_EXPR, type,
1302 tem, TREE_OPERAND (t, 0));
1305 /* -(A + B) -> (-A) - B. */
1306 if (negate_expr_p (TREE_OPERAND (t, 0)))
1308 tem = negate_expr (TREE_OPERAND (t, 0));
1309 return fold_build2_loc (loc, MINUS_EXPR, type,
1310 tem, TREE_OPERAND (t, 1));
1313 break;
1315 case MINUS_EXPR:
1316 /* - (A - B) -> B - A */
1317 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1318 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1319 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1320 return fold_build2_loc (loc, MINUS_EXPR, type,
1321 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1322 break;
1324 case MULT_EXPR:
1325 if (TYPE_UNSIGNED (type))
1326 break;
1328 /* Fall through. */
1330 case RDIV_EXPR:
1331 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1333 tem = TREE_OPERAND (t, 1);
1334 if (negate_expr_p (tem))
1335 return fold_build2_loc (loc, TREE_CODE (t), type,
1336 TREE_OPERAND (t, 0), negate_expr (tem));
1337 tem = TREE_OPERAND (t, 0);
1338 if (negate_expr_p (tem))
1339 return fold_build2_loc (loc, TREE_CODE (t), type,
1340 negate_expr (tem), TREE_OPERAND (t, 1));
1342 break;
1344 case TRUNC_DIV_EXPR:
1345 case ROUND_DIV_EXPR:
1346 case FLOOR_DIV_EXPR:
1347 case CEIL_DIV_EXPR:
1348 case EXACT_DIV_EXPR:
1349 /* In general we can't negate A / B, because if A is INT_MIN and
1350 B is 1, we may turn this into INT_MIN / -1 which is undefined
1351 and actually traps on some architectures. But if overflow is
1352 undefined, we can negate, because - (INT_MIN / 1) is an
1353 overflow. */
1354 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1356 const char * const warnmsg = G_("assuming signed overflow does not "
1357 "occur when negating a division");
1358 tem = TREE_OPERAND (t, 1);
1359 if (negate_expr_p (tem))
1361 if (INTEGRAL_TYPE_P (type)
1362 && (TREE_CODE (tem) != INTEGER_CST
1363 || integer_onep (tem)))
1364 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1365 return fold_build2_loc (loc, TREE_CODE (t), type,
1366 TREE_OPERAND (t, 0), negate_expr (tem));
1368 tem = TREE_OPERAND (t, 0);
1369 if (negate_expr_p (tem))
1371 if (INTEGRAL_TYPE_P (type)
1372 && (TREE_CODE (tem) != INTEGER_CST
1373 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1374 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1375 return fold_build2_loc (loc, TREE_CODE (t), type,
1376 negate_expr (tem), TREE_OPERAND (t, 1));
1379 break;
1381 case NOP_EXPR:
1382 /* Convert -((double)float) into (double)(-float). */
1383 if (TREE_CODE (type) == REAL_TYPE)
1385 tem = strip_float_extensions (t);
1386 if (tem != t && negate_expr_p (tem))
1387 return fold_convert_loc (loc, type, negate_expr (tem));
1389 break;
1391 case CALL_EXPR:
1392 /* Negate -f(x) as f(-x). */
1393 if (negate_mathfn_p (builtin_mathfn_code (t))
1394 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1396 tree fndecl, arg;
1398 fndecl = get_callee_fndecl (t);
1399 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1400 return build_call_expr_loc (loc, fndecl, 1, arg);
1402 break;
1404 case RSHIFT_EXPR:
1405 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1406 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1408 tree op1 = TREE_OPERAND (t, 1);
1409 if (TREE_INT_CST_HIGH (op1) == 0
1410 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1411 == TREE_INT_CST_LOW (op1))
1413 tree ntype = TYPE_UNSIGNED (type)
1414 ? signed_type_for (type)
1415 : unsigned_type_for (type);
1416 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
1417 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
1418 return fold_convert_loc (loc, type, temp);
1421 break;
1423 default:
1424 break;
1427 return NULL_TREE;
1430 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1431 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1432 return NULL_TREE. */
1434 static tree
1435 negate_expr (tree t)
1437 tree type, tem;
1438 location_t loc;
1440 if (t == NULL_TREE)
1441 return NULL_TREE;
1443 loc = EXPR_LOCATION (t);
1444 type = TREE_TYPE (t);
1445 STRIP_SIGN_NOPS (t);
1447 tem = fold_negate_expr (loc, t);
1448 if (!tem)
1450 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1451 SET_EXPR_LOCATION (tem, loc);
1453 return fold_convert_loc (loc, type, tem);
1456 /* Split a tree IN into a constant, literal and variable parts that could be
1457 combined with CODE to make IN. "constant" means an expression with
1458 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1459 commutative arithmetic operation. Store the constant part into *CONP,
1460 the literal in *LITP and return the variable part. If a part isn't
1461 present, set it to null. If the tree does not decompose in this way,
1462 return the entire tree as the variable part and the other parts as null.
1464 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1465 case, we negate an operand that was subtracted. Except if it is a
1466 literal for which we use *MINUS_LITP instead.
1468 If NEGATE_P is true, we are negating all of IN, again except a literal
1469 for which we use *MINUS_LITP instead.
1471 If IN is itself a literal or constant, return it as appropriate.
1473 Note that we do not guarantee that any of the three values will be the
1474 same type as IN, but they will have the same signedness and mode. */
1476 static tree
1477 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1478 tree *minus_litp, int negate_p)
1480 tree var = 0;
1482 *conp = 0;
1483 *litp = 0;
1484 *minus_litp = 0;
1486 /* Strip any conversions that don't change the machine mode or signedness. */
1487 STRIP_SIGN_NOPS (in);
1489 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1490 || TREE_CODE (in) == FIXED_CST)
1491 *litp = in;
1492 else if (TREE_CODE (in) == code
1493 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1494 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1495 /* We can associate addition and subtraction together (even
1496 though the C standard doesn't say so) for integers because
1497 the value is not affected. For reals, the value might be
1498 affected, so we can't. */
1499 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1500 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1502 tree op0 = TREE_OPERAND (in, 0);
1503 tree op1 = TREE_OPERAND (in, 1);
1504 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1505 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1507 /* First see if either of the operands is a literal, then a constant. */
1508 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1509 || TREE_CODE (op0) == FIXED_CST)
1510 *litp = op0, op0 = 0;
1511 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1512 || TREE_CODE (op1) == FIXED_CST)
1513 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1515 if (op0 != 0 && TREE_CONSTANT (op0))
1516 *conp = op0, op0 = 0;
1517 else if (op1 != 0 && TREE_CONSTANT (op1))
1518 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1520 /* If we haven't dealt with either operand, this is not a case we can
1521 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1522 if (op0 != 0 && op1 != 0)
1523 var = in;
1524 else if (op0 != 0)
1525 var = op0;
1526 else
1527 var = op1, neg_var_p = neg1_p;
1529 /* Now do any needed negations. */
1530 if (neg_litp_p)
1531 *minus_litp = *litp, *litp = 0;
1532 if (neg_conp_p)
1533 *conp = negate_expr (*conp);
1534 if (neg_var_p)
1535 var = negate_expr (var);
1537 else if (TREE_CONSTANT (in))
1538 *conp = in;
1539 else
1540 var = in;
1542 if (negate_p)
1544 if (*litp)
1545 *minus_litp = *litp, *litp = 0;
1546 else if (*minus_litp)
1547 *litp = *minus_litp, *minus_litp = 0;
1548 *conp = negate_expr (*conp);
1549 var = negate_expr (var);
1552 return var;
1555 /* Re-associate trees split by the above function. T1 and T2 are
1556 either expressions to associate or null. Return the new
1557 expression, if any. LOC is the location of the new expression. If
1558 we build an operation, do it in TYPE and with CODE. */
1560 static tree
1561 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1563 tree tem;
1565 if (t1 == 0)
1566 return t2;
1567 else if (t2 == 0)
1568 return t1;
1570 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1571 try to fold this since we will have infinite recursion. But do
1572 deal with any NEGATE_EXPRs. */
1573 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1574 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1576 if (code == PLUS_EXPR)
1578 if (TREE_CODE (t1) == NEGATE_EXPR)
1579 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
1580 fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
1581 else if (TREE_CODE (t2) == NEGATE_EXPR)
1582 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
1583 fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
1584 else if (integer_zerop (t2))
1585 return fold_convert_loc (loc, type, t1);
1587 else if (code == MINUS_EXPR)
1589 if (integer_zerop (t2))
1590 return fold_convert_loc (loc, type, t1);
1593 tem = build2 (code, type, fold_convert_loc (loc, type, t1),
1594 fold_convert_loc (loc, type, t2));
1595 goto associate_trees_exit;
1598 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1599 fold_convert_loc (loc, type, t2));
1600 associate_trees_exit:
1601 protected_set_expr_location (tem, loc);
1602 return tem;
1605 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1606 for use in int_const_binop, size_binop and size_diffop. */
1608 static bool
1609 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1611 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1612 return false;
1613 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1614 return false;
1616 switch (code)
1618 case LSHIFT_EXPR:
1619 case RSHIFT_EXPR:
1620 case LROTATE_EXPR:
1621 case RROTATE_EXPR:
1622 return true;
1624 default:
1625 break;
1628 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1629 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1630 && TYPE_MODE (type1) == TYPE_MODE (type2);
1634 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1635 to produce a new constant. Return NULL_TREE if we don't know how
1636 to evaluate CODE at compile-time.
1638 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1640 tree
1641 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1643 unsigned HOST_WIDE_INT int1l, int2l;
1644 HOST_WIDE_INT int1h, int2h;
1645 unsigned HOST_WIDE_INT low;
1646 HOST_WIDE_INT hi;
1647 unsigned HOST_WIDE_INT garbagel;
1648 HOST_WIDE_INT garbageh;
1649 tree t;
1650 tree type = TREE_TYPE (arg1);
1651 int uns = TYPE_UNSIGNED (type);
1652 int is_sizetype
1653 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1654 int overflow = 0;
1656 int1l = TREE_INT_CST_LOW (arg1);
1657 int1h = TREE_INT_CST_HIGH (arg1);
1658 int2l = TREE_INT_CST_LOW (arg2);
1659 int2h = TREE_INT_CST_HIGH (arg2);
1661 switch (code)
1663 case BIT_IOR_EXPR:
1664 low = int1l | int2l, hi = int1h | int2h;
1665 break;
1667 case BIT_XOR_EXPR:
1668 low = int1l ^ int2l, hi = int1h ^ int2h;
1669 break;
1671 case BIT_AND_EXPR:
1672 low = int1l & int2l, hi = int1h & int2h;
1673 break;
1675 case RSHIFT_EXPR:
1676 int2l = -int2l;
1677 case LSHIFT_EXPR:
1678 /* It's unclear from the C standard whether shifts can overflow.
1679 The following code ignores overflow; perhaps a C standard
1680 interpretation ruling is needed. */
1681 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1682 &low, &hi, !uns);
1683 break;
1685 case RROTATE_EXPR:
1686 int2l = - int2l;
1687 case LROTATE_EXPR:
1688 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1689 &low, &hi);
1690 break;
1692 case PLUS_EXPR:
1693 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1694 break;
1696 case MINUS_EXPR:
1697 neg_double (int2l, int2h, &low, &hi);
1698 add_double (int1l, int1h, low, hi, &low, &hi);
1699 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1700 break;
1702 case MULT_EXPR:
1703 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1704 break;
1706 case TRUNC_DIV_EXPR:
1707 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1708 case EXACT_DIV_EXPR:
1709 /* This is a shortcut for a common special case. */
1710 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1711 && !TREE_OVERFLOW (arg1)
1712 && !TREE_OVERFLOW (arg2)
1713 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1715 if (code == CEIL_DIV_EXPR)
1716 int1l += int2l - 1;
1718 low = int1l / int2l, hi = 0;
1719 break;
1722 /* ... fall through ... */
1724 case ROUND_DIV_EXPR:
1725 if (int2h == 0 && int2l == 0)
1726 return NULL_TREE;
1727 if (int2h == 0 && int2l == 1)
1729 low = int1l, hi = int1h;
1730 break;
1732 if (int1l == int2l && int1h == int2h
1733 && ! (int1l == 0 && int1h == 0))
1735 low = 1, hi = 0;
1736 break;
1738 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1739 &low, &hi, &garbagel, &garbageh);
1740 break;
1742 case TRUNC_MOD_EXPR:
1743 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1744 /* This is a shortcut for a common special case. */
1745 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1746 && !TREE_OVERFLOW (arg1)
1747 && !TREE_OVERFLOW (arg2)
1748 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1750 if (code == CEIL_MOD_EXPR)
1751 int1l += int2l - 1;
1752 low = int1l % int2l, hi = 0;
1753 break;
1756 /* ... fall through ... */
1758 case ROUND_MOD_EXPR:
1759 if (int2h == 0 && int2l == 0)
1760 return NULL_TREE;
1761 overflow = div_and_round_double (code, uns,
1762 int1l, int1h, int2l, int2h,
1763 &garbagel, &garbageh, &low, &hi);
1764 break;
1766 case MIN_EXPR:
1767 case MAX_EXPR:
1768 if (uns)
1769 low = (((unsigned HOST_WIDE_INT) int1h
1770 < (unsigned HOST_WIDE_INT) int2h)
1771 || (((unsigned HOST_WIDE_INT) int1h
1772 == (unsigned HOST_WIDE_INT) int2h)
1773 && int1l < int2l));
1774 else
1775 low = (int1h < int2h
1776 || (int1h == int2h && int1l < int2l));
1778 if (low == (code == MIN_EXPR))
1779 low = int1l, hi = int1h;
1780 else
1781 low = int2l, hi = int2h;
1782 break;
1784 default:
1785 return NULL_TREE;
1788 if (notrunc)
1790 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1792 /* Propagate overflow flags ourselves. */
1793 if (((!uns || is_sizetype) && overflow)
1794 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1796 t = copy_node (t);
1797 TREE_OVERFLOW (t) = 1;
1800 else
1801 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1802 ((!uns || is_sizetype) && overflow)
1803 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1805 return t;
1808 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1809 constant. We assume ARG1 and ARG2 have the same data type, or at least
1810 are the same kind of constant and the same machine mode. Return zero if
1811 combining the constants is not allowed in the current operating mode.
1813 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1815 static tree
1816 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1818 /* Sanity check for the recursive cases. */
1819 if (!arg1 || !arg2)
1820 return NULL_TREE;
1822 STRIP_NOPS (arg1);
1823 STRIP_NOPS (arg2);
1825 if (TREE_CODE (arg1) == INTEGER_CST)
1826 return int_const_binop (code, arg1, arg2, notrunc);
1828 if (TREE_CODE (arg1) == REAL_CST)
1830 enum machine_mode mode;
1831 REAL_VALUE_TYPE d1;
1832 REAL_VALUE_TYPE d2;
1833 REAL_VALUE_TYPE value;
1834 REAL_VALUE_TYPE result;
1835 bool inexact;
1836 tree t, type;
1838 /* The following codes are handled by real_arithmetic. */
1839 switch (code)
1841 case PLUS_EXPR:
1842 case MINUS_EXPR:
1843 case MULT_EXPR:
1844 case RDIV_EXPR:
1845 case MIN_EXPR:
1846 case MAX_EXPR:
1847 break;
1849 default:
1850 return NULL_TREE;
1853 d1 = TREE_REAL_CST (arg1);
1854 d2 = TREE_REAL_CST (arg2);
1856 type = TREE_TYPE (arg1);
1857 mode = TYPE_MODE (type);
1859 /* Don't perform operation if we honor signaling NaNs and
1860 either operand is a NaN. */
1861 if (HONOR_SNANS (mode)
1862 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1863 return NULL_TREE;
1865 /* Don't perform operation if it would raise a division
1866 by zero exception. */
1867 if (code == RDIV_EXPR
1868 && REAL_VALUES_EQUAL (d2, dconst0)
1869 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1870 return NULL_TREE;
1872 /* If either operand is a NaN, just return it. Otherwise, set up
1873 for floating-point trap; we return an overflow. */
1874 if (REAL_VALUE_ISNAN (d1))
1875 return arg1;
1876 else if (REAL_VALUE_ISNAN (d2))
1877 return arg2;
1879 inexact = real_arithmetic (&value, code, &d1, &d2);
1880 real_convert (&result, mode, &value);
1882 /* Don't constant fold this floating point operation if
1883 the result has overflowed and flag_trapping_math. */
1884 if (flag_trapping_math
1885 && MODE_HAS_INFINITIES (mode)
1886 && REAL_VALUE_ISINF (result)
1887 && !REAL_VALUE_ISINF (d1)
1888 && !REAL_VALUE_ISINF (d2))
1889 return NULL_TREE;
1891 /* Don't constant fold this floating point operation if the
1892 result may dependent upon the run-time rounding mode and
1893 flag_rounding_math is set, or if GCC's software emulation
1894 is unable to accurately represent the result. */
1895 if ((flag_rounding_math
1896 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1897 && (inexact || !real_identical (&result, &value)))
1898 return NULL_TREE;
1900 t = build_real (type, result);
1902 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1903 return t;
1906 if (TREE_CODE (arg1) == FIXED_CST)
1908 FIXED_VALUE_TYPE f1;
1909 FIXED_VALUE_TYPE f2;
1910 FIXED_VALUE_TYPE result;
1911 tree t, type;
1912 int sat_p;
1913 bool overflow_p;
1915 /* The following codes are handled by fixed_arithmetic. */
1916 switch (code)
1918 case PLUS_EXPR:
1919 case MINUS_EXPR:
1920 case MULT_EXPR:
1921 case TRUNC_DIV_EXPR:
1922 f2 = TREE_FIXED_CST (arg2);
1923 break;
1925 case LSHIFT_EXPR:
1926 case RSHIFT_EXPR:
1927 f2.data.high = TREE_INT_CST_HIGH (arg2);
1928 f2.data.low = TREE_INT_CST_LOW (arg2);
1929 f2.mode = SImode;
1930 break;
1932 default:
1933 return NULL_TREE;
1936 f1 = TREE_FIXED_CST (arg1);
1937 type = TREE_TYPE (arg1);
1938 sat_p = TYPE_SATURATING (type);
1939 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1940 t = build_fixed (type, result);
1941 /* Propagate overflow flags. */
1942 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1943 TREE_OVERFLOW (t) = 1;
1944 return t;
1947 if (TREE_CODE (arg1) == COMPLEX_CST)
1949 tree type = TREE_TYPE (arg1);
1950 tree r1 = TREE_REALPART (arg1);
1951 tree i1 = TREE_IMAGPART (arg1);
1952 tree r2 = TREE_REALPART (arg2);
1953 tree i2 = TREE_IMAGPART (arg2);
1954 tree real, imag;
1956 switch (code)
1958 case PLUS_EXPR:
1959 case MINUS_EXPR:
1960 real = const_binop (code, r1, r2, notrunc);
1961 imag = const_binop (code, i1, i2, notrunc);
1962 break;
1964 case MULT_EXPR:
1965 if (COMPLEX_FLOAT_TYPE_P (type))
1966 return do_mpc_arg2 (arg1, arg2, type,
1967 /* do_nonfinite= */ folding_initializer,
1968 mpc_mul);
1970 real = const_binop (MINUS_EXPR,
1971 const_binop (MULT_EXPR, r1, r2, notrunc),
1972 const_binop (MULT_EXPR, i1, i2, notrunc),
1973 notrunc);
1974 imag = const_binop (PLUS_EXPR,
1975 const_binop (MULT_EXPR, r1, i2, notrunc),
1976 const_binop (MULT_EXPR, i1, r2, notrunc),
1977 notrunc);
1978 break;
1980 case RDIV_EXPR:
1981 if (COMPLEX_FLOAT_TYPE_P (type))
1982 return do_mpc_arg2 (arg1, arg2, type,
1983 /* do_nonfinite= */ folding_initializer,
1984 mpc_div);
1985 /* Fallthru ... */
1986 case TRUNC_DIV_EXPR:
1987 case CEIL_DIV_EXPR:
1988 case FLOOR_DIV_EXPR:
1989 case ROUND_DIV_EXPR:
1990 if (flag_complex_method == 0)
1992 /* Keep this algorithm in sync with
1993 tree-complex.c:expand_complex_div_straight().
1995 Expand complex division to scalars, straightforward algorithm.
1996 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1997 t = br*br + bi*bi
1999 tree magsquared
2000 = const_binop (PLUS_EXPR,
2001 const_binop (MULT_EXPR, r2, r2, notrunc),
2002 const_binop (MULT_EXPR, i2, i2, notrunc),
2003 notrunc);
2004 tree t1
2005 = const_binop (PLUS_EXPR,
2006 const_binop (MULT_EXPR, r1, r2, notrunc),
2007 const_binop (MULT_EXPR, i1, i2, notrunc),
2008 notrunc);
2009 tree t2
2010 = const_binop (MINUS_EXPR,
2011 const_binop (MULT_EXPR, i1, r2, notrunc),
2012 const_binop (MULT_EXPR, r1, i2, notrunc),
2013 notrunc);
2015 real = const_binop (code, t1, magsquared, notrunc);
2016 imag = const_binop (code, t2, magsquared, notrunc);
2018 else
2020 /* Keep this algorithm in sync with
2021 tree-complex.c:expand_complex_div_wide().
2023 Expand complex division to scalars, modified algorithm to minimize
2024 overflow with wide input ranges. */
2025 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
2026 fold_abs_const (r2, TREE_TYPE (type)),
2027 fold_abs_const (i2, TREE_TYPE (type)));
2029 if (integer_nonzerop (compare))
2031 /* In the TRUE branch, we compute
2032 ratio = br/bi;
2033 div = (br * ratio) + bi;
2034 tr = (ar * ratio) + ai;
2035 ti = (ai * ratio) - ar;
2036 tr = tr / div;
2037 ti = ti / div; */
2038 tree ratio = const_binop (code, r2, i2, notrunc);
2039 tree div = const_binop (PLUS_EXPR, i2,
2040 const_binop (MULT_EXPR, r2, ratio,
2041 notrunc),
2042 notrunc);
2043 real = const_binop (MULT_EXPR, r1, ratio, notrunc);
2044 real = const_binop (PLUS_EXPR, real, i1, notrunc);
2045 real = const_binop (code, real, div, notrunc);
2047 imag = const_binop (MULT_EXPR, i1, ratio, notrunc);
2048 imag = const_binop (MINUS_EXPR, imag, r1, notrunc);
2049 imag = const_binop (code, imag, div, notrunc);
2051 else
2053 /* In the FALSE branch, we compute
2054 ratio = d/c;
2055 divisor = (d * ratio) + c;
2056 tr = (b * ratio) + a;
2057 ti = b - (a * ratio);
2058 tr = tr / div;
2059 ti = ti / div; */
2060 tree ratio = const_binop (code, i2, r2, notrunc);
2061 tree div = const_binop (PLUS_EXPR, r2,
2062 const_binop (MULT_EXPR, i2, ratio,
2063 notrunc),
2064 notrunc);
2066 real = const_binop (MULT_EXPR, i1, ratio, notrunc);
2067 real = const_binop (PLUS_EXPR, real, r1, notrunc);
2068 real = const_binop (code, real, div, notrunc);
2070 imag = const_binop (MULT_EXPR, r1, ratio, notrunc);
2071 imag = const_binop (MINUS_EXPR, i1, imag, notrunc);
2072 imag = const_binop (code, imag, div, notrunc);
2075 break;
2077 default:
2078 return NULL_TREE;
2081 if (real && imag)
2082 return build_complex (type, real, imag);
2085 if (TREE_CODE (arg1) == VECTOR_CST)
2087 tree type = TREE_TYPE(arg1);
2088 int count = TYPE_VECTOR_SUBPARTS (type), i;
2089 tree elements1, elements2, list = NULL_TREE;
2091 if(TREE_CODE(arg2) != VECTOR_CST)
2092 return NULL_TREE;
2094 elements1 = TREE_VECTOR_CST_ELTS (arg1);
2095 elements2 = TREE_VECTOR_CST_ELTS (arg2);
2097 for (i = 0; i < count; i++)
2099 tree elem1, elem2, elem;
2101 /* The trailing elements can be empty and should be treated as 0 */
2102 if(!elements1)
2103 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2104 else
2106 elem1 = TREE_VALUE(elements1);
2107 elements1 = TREE_CHAIN (elements1);
2110 if(!elements2)
2111 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2112 else
2114 elem2 = TREE_VALUE(elements2);
2115 elements2 = TREE_CHAIN (elements2);
2118 elem = const_binop (code, elem1, elem2, notrunc);
2120 /* It is possible that const_binop cannot handle the given
2121 code and return NULL_TREE */
2122 if(elem == NULL_TREE)
2123 return NULL_TREE;
2125 list = tree_cons (NULL_TREE, elem, list);
2127 return build_vector(type, nreverse(list));
2129 return NULL_TREE;
2132 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2133 indicates which particular sizetype to create. */
2135 tree
2136 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2138 return build_int_cst (sizetype_tab[(int) kind], number);
2141 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2142 is a tree code. The type of the result is taken from the operands.
2143 Both must be equivalent integer types, ala int_binop_types_match_p.
2144 If the operands are constant, so is the result. */
2146 tree
2147 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2149 tree type = TREE_TYPE (arg0);
2151 if (arg0 == error_mark_node || arg1 == error_mark_node)
2152 return error_mark_node;
2154 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2155 TREE_TYPE (arg1)));
2157 /* Handle the special case of two integer constants faster. */
2158 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2160 /* And some specific cases even faster than that. */
2161 if (code == PLUS_EXPR)
2163 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2164 return arg1;
2165 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2166 return arg0;
2168 else if (code == MINUS_EXPR)
2170 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2171 return arg0;
2173 else if (code == MULT_EXPR)
2175 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2176 return arg1;
2179 /* Handle general case of two integer constants. */
2180 return int_const_binop (code, arg0, arg1, 0);
2183 return fold_build2_loc (loc, code, type, arg0, arg1);
2186 /* Given two values, either both of sizetype or both of bitsizetype,
2187 compute the difference between the two values. Return the value
2188 in signed type corresponding to the type of the operands. */
2190 tree
2191 size_diffop_loc (location_t loc, tree arg0, tree arg1)
2193 tree type = TREE_TYPE (arg0);
2194 tree ctype;
2196 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2197 TREE_TYPE (arg1)));
2199 /* If the type is already signed, just do the simple thing. */
2200 if (!TYPE_UNSIGNED (type))
2201 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2203 if (type == sizetype)
2204 ctype = ssizetype;
2205 else if (type == bitsizetype)
2206 ctype = sbitsizetype;
2207 else
2208 ctype = signed_type_for (type);
2210 /* If either operand is not a constant, do the conversions to the signed
2211 type and subtract. The hardware will do the right thing with any
2212 overflow in the subtraction. */
2213 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2214 return size_binop_loc (loc, MINUS_EXPR,
2215 fold_convert_loc (loc, ctype, arg0),
2216 fold_convert_loc (loc, ctype, arg1));
2218 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2219 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2220 overflow) and negate (which can't either). Special-case a result
2221 of zero while we're here. */
2222 if (tree_int_cst_equal (arg0, arg1))
2223 return build_int_cst (ctype, 0);
2224 else if (tree_int_cst_lt (arg1, arg0))
2225 return fold_convert_loc (loc, ctype,
2226 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2227 else
2228 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2229 fold_convert_loc (loc, ctype,
2230 size_binop_loc (loc,
2231 MINUS_EXPR,
2232 arg1, arg0)));
2235 /* A subroutine of fold_convert_const handling conversions of an
2236 INTEGER_CST to another integer type. */
2238 static tree
2239 fold_convert_const_int_from_int (tree type, const_tree arg1)
2241 tree t;
2243 /* Given an integer constant, make new constant with new type,
2244 appropriately sign-extended or truncated. */
2245 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2246 TREE_INT_CST_HIGH (arg1),
2247 /* Don't set the overflow when
2248 converting from a pointer, */
2249 !POINTER_TYPE_P (TREE_TYPE (arg1))
2250 /* or to a sizetype with same signedness
2251 and the precision is unchanged.
2252 ??? sizetype is always sign-extended,
2253 but its signedness depends on the
2254 frontend. Thus we see spurious overflows
2255 here if we do not check this. */
2256 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2257 == TYPE_PRECISION (type))
2258 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2259 == TYPE_UNSIGNED (type))
2260 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2261 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2262 || (TREE_CODE (type) == INTEGER_TYPE
2263 && TYPE_IS_SIZETYPE (type)))),
2264 (TREE_INT_CST_HIGH (arg1) < 0
2265 && (TYPE_UNSIGNED (type)
2266 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2267 | TREE_OVERFLOW (arg1));
2269 return t;
2272 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2273 to an integer type. */
2275 static tree
2276 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2278 int overflow = 0;
2279 tree t;
2281 /* The following code implements the floating point to integer
2282 conversion rules required by the Java Language Specification,
2283 that IEEE NaNs are mapped to zero and values that overflow
2284 the target precision saturate, i.e. values greater than
2285 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2286 are mapped to INT_MIN. These semantics are allowed by the
2287 C and C++ standards that simply state that the behavior of
2288 FP-to-integer conversion is unspecified upon overflow. */
2290 HOST_WIDE_INT high, low;
2291 REAL_VALUE_TYPE r;
2292 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2294 switch (code)
2296 case FIX_TRUNC_EXPR:
2297 real_trunc (&r, VOIDmode, &x);
2298 break;
2300 default:
2301 gcc_unreachable ();
2304 /* If R is NaN, return zero and show we have an overflow. */
2305 if (REAL_VALUE_ISNAN (r))
2307 overflow = 1;
2308 high = 0;
2309 low = 0;
2312 /* See if R is less than the lower bound or greater than the
2313 upper bound. */
2315 if (! overflow)
2317 tree lt = TYPE_MIN_VALUE (type);
2318 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2319 if (REAL_VALUES_LESS (r, l))
2321 overflow = 1;
2322 high = TREE_INT_CST_HIGH (lt);
2323 low = TREE_INT_CST_LOW (lt);
2327 if (! overflow)
2329 tree ut = TYPE_MAX_VALUE (type);
2330 if (ut)
2332 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2333 if (REAL_VALUES_LESS (u, r))
2335 overflow = 1;
2336 high = TREE_INT_CST_HIGH (ut);
2337 low = TREE_INT_CST_LOW (ut);
2342 if (! overflow)
2343 REAL_VALUE_TO_INT (&low, &high, r);
2345 t = force_fit_type_double (type, low, high, -1,
2346 overflow | TREE_OVERFLOW (arg1));
2347 return t;
2350 /* A subroutine of fold_convert_const handling conversions of a
2351 FIXED_CST to an integer type. */
2353 static tree
2354 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2356 tree t;
2357 double_int temp, temp_trunc;
2358 unsigned int mode;
2360 /* Right shift FIXED_CST to temp by fbit. */
2361 temp = TREE_FIXED_CST (arg1).data;
2362 mode = TREE_FIXED_CST (arg1).mode;
2363 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2365 lshift_double (temp.low, temp.high,
2366 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2367 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2369 /* Left shift temp to temp_trunc by fbit. */
2370 lshift_double (temp.low, temp.high,
2371 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2372 &temp_trunc.low, &temp_trunc.high,
2373 SIGNED_FIXED_POINT_MODE_P (mode));
2375 else
2377 temp.low = 0;
2378 temp.high = 0;
2379 temp_trunc.low = 0;
2380 temp_trunc.high = 0;
2383 /* If FIXED_CST is negative, we need to round the value toward 0.
2384 By checking if the fractional bits are not zero to add 1 to temp. */
2385 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2386 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2388 double_int one;
2389 one.low = 1;
2390 one.high = 0;
2391 temp = double_int_add (temp, one);
2394 /* Given a fixed-point constant, make new constant with new type,
2395 appropriately sign-extended or truncated. */
2396 t = force_fit_type_double (type, temp.low, temp.high, -1,
2397 (temp.high < 0
2398 && (TYPE_UNSIGNED (type)
2399 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2400 | TREE_OVERFLOW (arg1));
2402 return t;
2405 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2406 to another floating point type. */
2408 static tree
2409 fold_convert_const_real_from_real (tree type, const_tree arg1)
2411 REAL_VALUE_TYPE value;
2412 tree t;
2414 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2415 t = build_real (type, value);
2417 /* If converting an infinity or NAN to a representation that doesn't
2418 have one, set the overflow bit so that we can produce some kind of
2419 error message at the appropriate point if necessary. It's not the
2420 most user-friendly message, but it's better than nothing. */
2421 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2422 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2423 TREE_OVERFLOW (t) = 1;
2424 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2425 && !MODE_HAS_NANS (TYPE_MODE (type)))
2426 TREE_OVERFLOW (t) = 1;
2427 /* Regular overflow, conversion produced an infinity in a mode that
2428 can't represent them. */
2429 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2430 && REAL_VALUE_ISINF (value)
2431 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2432 TREE_OVERFLOW (t) = 1;
2433 else
2434 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2435 return t;
2438 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2439 to a floating point type. */
2441 static tree
2442 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2444 REAL_VALUE_TYPE value;
2445 tree t;
2447 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2448 t = build_real (type, value);
2450 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2451 return t;
2454 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2455 to another fixed-point type. */
2457 static tree
2458 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2460 FIXED_VALUE_TYPE value;
2461 tree t;
2462 bool overflow_p;
2464 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2465 TYPE_SATURATING (type));
2466 t = build_fixed (type, value);
2468 /* Propagate overflow flags. */
2469 if (overflow_p | TREE_OVERFLOW (arg1))
2470 TREE_OVERFLOW (t) = 1;
2471 return t;
2474 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2475 to a fixed-point type. */
2477 static tree
2478 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2480 FIXED_VALUE_TYPE value;
2481 tree t;
2482 bool overflow_p;
2484 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2485 TREE_INT_CST (arg1),
2486 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2487 TYPE_SATURATING (type));
2488 t = build_fixed (type, value);
2490 /* Propagate overflow flags. */
2491 if (overflow_p | TREE_OVERFLOW (arg1))
2492 TREE_OVERFLOW (t) = 1;
2493 return t;
2496 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2497 to a fixed-point type. */
2499 static tree
2500 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2502 FIXED_VALUE_TYPE value;
2503 tree t;
2504 bool overflow_p;
2506 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2507 &TREE_REAL_CST (arg1),
2508 TYPE_SATURATING (type));
2509 t = build_fixed (type, value);
2511 /* Propagate overflow flags. */
2512 if (overflow_p | TREE_OVERFLOW (arg1))
2513 TREE_OVERFLOW (t) = 1;
2514 return t;
2517 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2518 type TYPE. If no simplification can be done return NULL_TREE. */
2520 static tree
2521 fold_convert_const (enum tree_code code, tree type, tree arg1)
2523 if (TREE_TYPE (arg1) == type)
2524 return arg1;
2526 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2527 || TREE_CODE (type) == OFFSET_TYPE)
2529 if (TREE_CODE (arg1) == INTEGER_CST)
2530 return fold_convert_const_int_from_int (type, arg1);
2531 else if (TREE_CODE (arg1) == REAL_CST)
2532 return fold_convert_const_int_from_real (code, type, arg1);
2533 else if (TREE_CODE (arg1) == FIXED_CST)
2534 return fold_convert_const_int_from_fixed (type, arg1);
2536 else if (TREE_CODE (type) == REAL_TYPE)
2538 if (TREE_CODE (arg1) == INTEGER_CST)
2539 return build_real_from_int_cst (type, arg1);
2540 else if (TREE_CODE (arg1) == REAL_CST)
2541 return fold_convert_const_real_from_real (type, arg1);
2542 else if (TREE_CODE (arg1) == FIXED_CST)
2543 return fold_convert_const_real_from_fixed (type, arg1);
2545 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2547 if (TREE_CODE (arg1) == FIXED_CST)
2548 return fold_convert_const_fixed_from_fixed (type, arg1);
2549 else if (TREE_CODE (arg1) == INTEGER_CST)
2550 return fold_convert_const_fixed_from_int (type, arg1);
2551 else if (TREE_CODE (arg1) == REAL_CST)
2552 return fold_convert_const_fixed_from_real (type, arg1);
2554 return NULL_TREE;
2557 /* Construct a vector of zero elements of vector type TYPE. */
2559 static tree
2560 build_zero_vector (tree type)
2562 tree elem, list;
2563 int i, units;
2565 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2566 units = TYPE_VECTOR_SUBPARTS (type);
2568 list = NULL_TREE;
2569 for (i = 0; i < units; i++)
2570 list = tree_cons (NULL_TREE, elem, list);
2571 return build_vector (type, list);
2574 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2576 bool
2577 fold_convertible_p (const_tree type, const_tree arg)
2579 tree orig = TREE_TYPE (arg);
2581 if (type == orig)
2582 return true;
2584 if (TREE_CODE (arg) == ERROR_MARK
2585 || TREE_CODE (type) == ERROR_MARK
2586 || TREE_CODE (orig) == ERROR_MARK)
2587 return false;
2589 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2590 return true;
2592 switch (TREE_CODE (type))
2594 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2595 case POINTER_TYPE: case REFERENCE_TYPE:
2596 case OFFSET_TYPE:
2597 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2598 || TREE_CODE (orig) == OFFSET_TYPE)
2599 return true;
2600 return (TREE_CODE (orig) == VECTOR_TYPE
2601 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2603 case REAL_TYPE:
2604 case FIXED_POINT_TYPE:
2605 case COMPLEX_TYPE:
2606 case VECTOR_TYPE:
2607 case VOID_TYPE:
2608 return TREE_CODE (type) == TREE_CODE (orig);
2610 default:
2611 return false;
2615 /* Convert expression ARG to type TYPE. Used by the middle-end for
2616 simple conversions in preference to calling the front-end's convert. */
2618 tree
2619 fold_convert_loc (location_t loc, tree type, tree arg)
2621 tree orig = TREE_TYPE (arg);
2622 tree tem;
2624 if (type == orig)
2625 return arg;
2627 if (TREE_CODE (arg) == ERROR_MARK
2628 || TREE_CODE (type) == ERROR_MARK
2629 || TREE_CODE (orig) == ERROR_MARK)
2630 return error_mark_node;
2632 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2633 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2635 switch (TREE_CODE (type))
2637 case POINTER_TYPE:
2638 case REFERENCE_TYPE:
2639 /* Handle conversions between pointers to different address spaces. */
2640 if (POINTER_TYPE_P (orig)
2641 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2642 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2643 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2644 /* fall through */
2646 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2647 case OFFSET_TYPE:
2648 if (TREE_CODE (arg) == INTEGER_CST)
2650 tem = fold_convert_const (NOP_EXPR, type, arg);
2651 if (tem != NULL_TREE)
2652 return tem;
2654 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2655 || TREE_CODE (orig) == OFFSET_TYPE)
2656 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2657 if (TREE_CODE (orig) == COMPLEX_TYPE)
2658 return fold_convert_loc (loc, type,
2659 fold_build1_loc (loc, REALPART_EXPR,
2660 TREE_TYPE (orig), arg));
2661 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2662 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2663 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2665 case REAL_TYPE:
2666 if (TREE_CODE (arg) == INTEGER_CST)
2668 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2669 if (tem != NULL_TREE)
2670 return tem;
2672 else if (TREE_CODE (arg) == REAL_CST)
2674 tem = fold_convert_const (NOP_EXPR, type, arg);
2675 if (tem != NULL_TREE)
2676 return tem;
2678 else if (TREE_CODE (arg) == FIXED_CST)
2680 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2681 if (tem != NULL_TREE)
2682 return tem;
2685 switch (TREE_CODE (orig))
2687 case INTEGER_TYPE:
2688 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2689 case POINTER_TYPE: case REFERENCE_TYPE:
2690 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2692 case REAL_TYPE:
2693 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2695 case FIXED_POINT_TYPE:
2696 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2698 case COMPLEX_TYPE:
2699 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2700 return fold_convert_loc (loc, type, tem);
2702 default:
2703 gcc_unreachable ();
2706 case FIXED_POINT_TYPE:
2707 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2708 || TREE_CODE (arg) == REAL_CST)
2710 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2711 if (tem != NULL_TREE)
2712 goto fold_convert_exit;
2715 switch (TREE_CODE (orig))
2717 case FIXED_POINT_TYPE:
2718 case INTEGER_TYPE:
2719 case ENUMERAL_TYPE:
2720 case BOOLEAN_TYPE:
2721 case REAL_TYPE:
2722 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2724 case COMPLEX_TYPE:
2725 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2726 return fold_convert_loc (loc, type, tem);
2728 default:
2729 gcc_unreachable ();
2732 case COMPLEX_TYPE:
2733 switch (TREE_CODE (orig))
2735 case INTEGER_TYPE:
2736 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2737 case POINTER_TYPE: case REFERENCE_TYPE:
2738 case REAL_TYPE:
2739 case FIXED_POINT_TYPE:
2740 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2741 fold_convert_loc (loc, TREE_TYPE (type), arg),
2742 fold_convert_loc (loc, TREE_TYPE (type),
2743 integer_zero_node));
2744 case COMPLEX_TYPE:
2746 tree rpart, ipart;
2748 if (TREE_CODE (arg) == COMPLEX_EXPR)
2750 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2751 TREE_OPERAND (arg, 0));
2752 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2753 TREE_OPERAND (arg, 1));
2754 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2757 arg = save_expr (arg);
2758 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2759 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2760 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2761 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2762 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2765 default:
2766 gcc_unreachable ();
2769 case VECTOR_TYPE:
2770 if (integer_zerop (arg))
2771 return build_zero_vector (type);
2772 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2773 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2774 || TREE_CODE (orig) == VECTOR_TYPE);
2775 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2777 case VOID_TYPE:
2778 tem = fold_ignored_result (arg);
2779 if (TREE_CODE (tem) == MODIFY_EXPR)
2780 goto fold_convert_exit;
2781 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2783 default:
2784 gcc_unreachable ();
2786 fold_convert_exit:
2787 protected_set_expr_location (tem, loc);
2788 return tem;
2791 /* Return false if expr can be assumed not to be an lvalue, true
2792 otherwise. */
2794 static bool
2795 maybe_lvalue_p (const_tree x)
2797 /* We only need to wrap lvalue tree codes. */
2798 switch (TREE_CODE (x))
2800 case VAR_DECL:
2801 case PARM_DECL:
2802 case RESULT_DECL:
2803 case LABEL_DECL:
2804 case FUNCTION_DECL:
2805 case SSA_NAME:
2807 case COMPONENT_REF:
2808 case INDIRECT_REF:
2809 case ALIGN_INDIRECT_REF:
2810 case MISALIGNED_INDIRECT_REF:
2811 case ARRAY_REF:
2812 case ARRAY_RANGE_REF:
2813 case BIT_FIELD_REF:
2814 case OBJ_TYPE_REF:
2816 case REALPART_EXPR:
2817 case IMAGPART_EXPR:
2818 case PREINCREMENT_EXPR:
2819 case PREDECREMENT_EXPR:
2820 case SAVE_EXPR:
2821 case TRY_CATCH_EXPR:
2822 case WITH_CLEANUP_EXPR:
2823 case COMPOUND_EXPR:
2824 case MODIFY_EXPR:
2825 case TARGET_EXPR:
2826 case COND_EXPR:
2827 case BIND_EXPR:
2828 break;
2830 default:
2831 /* Assume the worst for front-end tree codes. */
2832 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2833 break;
2834 return false;
2837 return true;
2840 /* Return an expr equal to X but certainly not valid as an lvalue. */
2842 tree
2843 non_lvalue_loc (location_t loc, tree x)
2845 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2846 us. */
2847 if (in_gimple_form)
2848 return x;
2850 if (! maybe_lvalue_p (x))
2851 return x;
2852 x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2853 SET_EXPR_LOCATION (x, loc);
2854 return x;
2857 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2858 Zero means allow extended lvalues. */
2860 int pedantic_lvalues;
2862 /* When pedantic, return an expr equal to X but certainly not valid as a
2863 pedantic lvalue. Otherwise, return X. */
2865 static tree
2866 pedantic_non_lvalue_loc (location_t loc, tree x)
2868 if (pedantic_lvalues)
2869 return non_lvalue_loc (loc, x);
2870 protected_set_expr_location (x, loc);
2871 return x;
2874 /* Given a tree comparison code, return the code that is the logical inverse
2875 of the given code. It is not safe to do this for floating-point
2876 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2877 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2879 enum tree_code
2880 invert_tree_comparison (enum tree_code code, bool honor_nans)
2882 if (honor_nans && flag_trapping_math)
2883 return ERROR_MARK;
2885 switch (code)
2887 case EQ_EXPR:
2888 return NE_EXPR;
2889 case NE_EXPR:
2890 return EQ_EXPR;
2891 case GT_EXPR:
2892 return honor_nans ? UNLE_EXPR : LE_EXPR;
2893 case GE_EXPR:
2894 return honor_nans ? UNLT_EXPR : LT_EXPR;
2895 case LT_EXPR:
2896 return honor_nans ? UNGE_EXPR : GE_EXPR;
2897 case LE_EXPR:
2898 return honor_nans ? UNGT_EXPR : GT_EXPR;
2899 case LTGT_EXPR:
2900 return UNEQ_EXPR;
2901 case UNEQ_EXPR:
2902 return LTGT_EXPR;
2903 case UNGT_EXPR:
2904 return LE_EXPR;
2905 case UNGE_EXPR:
2906 return LT_EXPR;
2907 case UNLT_EXPR:
2908 return GE_EXPR;
2909 case UNLE_EXPR:
2910 return GT_EXPR;
2911 case ORDERED_EXPR:
2912 return UNORDERED_EXPR;
2913 case UNORDERED_EXPR:
2914 return ORDERED_EXPR;
2915 default:
2916 gcc_unreachable ();
2920 /* Similar, but return the comparison that results if the operands are
2921 swapped. This is safe for floating-point. */
2923 enum tree_code
2924 swap_tree_comparison (enum tree_code code)
2926 switch (code)
2928 case EQ_EXPR:
2929 case NE_EXPR:
2930 case ORDERED_EXPR:
2931 case UNORDERED_EXPR:
2932 case LTGT_EXPR:
2933 case UNEQ_EXPR:
2934 return code;
2935 case GT_EXPR:
2936 return LT_EXPR;
2937 case GE_EXPR:
2938 return LE_EXPR;
2939 case LT_EXPR:
2940 return GT_EXPR;
2941 case LE_EXPR:
2942 return GE_EXPR;
2943 case UNGT_EXPR:
2944 return UNLT_EXPR;
2945 case UNGE_EXPR:
2946 return UNLE_EXPR;
2947 case UNLT_EXPR:
2948 return UNGT_EXPR;
2949 case UNLE_EXPR:
2950 return UNGE_EXPR;
2951 default:
2952 gcc_unreachable ();
2957 /* Convert a comparison tree code from an enum tree_code representation
2958 into a compcode bit-based encoding. This function is the inverse of
2959 compcode_to_comparison. */
2961 static enum comparison_code
2962 comparison_to_compcode (enum tree_code code)
2964 switch (code)
2966 case LT_EXPR:
2967 return COMPCODE_LT;
2968 case EQ_EXPR:
2969 return COMPCODE_EQ;
2970 case LE_EXPR:
2971 return COMPCODE_LE;
2972 case GT_EXPR:
2973 return COMPCODE_GT;
2974 case NE_EXPR:
2975 return COMPCODE_NE;
2976 case GE_EXPR:
2977 return COMPCODE_GE;
2978 case ORDERED_EXPR:
2979 return COMPCODE_ORD;
2980 case UNORDERED_EXPR:
2981 return COMPCODE_UNORD;
2982 case UNLT_EXPR:
2983 return COMPCODE_UNLT;
2984 case UNEQ_EXPR:
2985 return COMPCODE_UNEQ;
2986 case UNLE_EXPR:
2987 return COMPCODE_UNLE;
2988 case UNGT_EXPR:
2989 return COMPCODE_UNGT;
2990 case LTGT_EXPR:
2991 return COMPCODE_LTGT;
2992 case UNGE_EXPR:
2993 return COMPCODE_UNGE;
2994 default:
2995 gcc_unreachable ();
2999 /* Convert a compcode bit-based encoding of a comparison operator back
3000 to GCC's enum tree_code representation. This function is the
3001 inverse of comparison_to_compcode. */
3003 static enum tree_code
3004 compcode_to_comparison (enum comparison_code code)
3006 switch (code)
3008 case COMPCODE_LT:
3009 return LT_EXPR;
3010 case COMPCODE_EQ:
3011 return EQ_EXPR;
3012 case COMPCODE_LE:
3013 return LE_EXPR;
3014 case COMPCODE_GT:
3015 return GT_EXPR;
3016 case COMPCODE_NE:
3017 return NE_EXPR;
3018 case COMPCODE_GE:
3019 return GE_EXPR;
3020 case COMPCODE_ORD:
3021 return ORDERED_EXPR;
3022 case COMPCODE_UNORD:
3023 return UNORDERED_EXPR;
3024 case COMPCODE_UNLT:
3025 return UNLT_EXPR;
3026 case COMPCODE_UNEQ:
3027 return UNEQ_EXPR;
3028 case COMPCODE_UNLE:
3029 return UNLE_EXPR;
3030 case COMPCODE_UNGT:
3031 return UNGT_EXPR;
3032 case COMPCODE_LTGT:
3033 return LTGT_EXPR;
3034 case COMPCODE_UNGE:
3035 return UNGE_EXPR;
3036 default:
3037 gcc_unreachable ();
3041 /* Return a tree for the comparison which is the combination of
3042 doing the AND or OR (depending on CODE) of the two operations LCODE
3043 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
3044 the possibility of trapping if the mode has NaNs, and return NULL_TREE
3045 if this makes the transformation invalid. */
3047 tree
3048 combine_comparisons (location_t loc,
3049 enum tree_code code, enum tree_code lcode,
3050 enum tree_code rcode, tree truth_type,
3051 tree ll_arg, tree lr_arg)
3053 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
3054 enum comparison_code lcompcode = comparison_to_compcode (lcode);
3055 enum comparison_code rcompcode = comparison_to_compcode (rcode);
3056 int compcode;
3058 switch (code)
3060 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
3061 compcode = lcompcode & rcompcode;
3062 break;
3064 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
3065 compcode = lcompcode | rcompcode;
3066 break;
3068 default:
3069 return NULL_TREE;
3072 if (!honor_nans)
3074 /* Eliminate unordered comparisons, as well as LTGT and ORD
3075 which are not used unless the mode has NaNs. */
3076 compcode &= ~COMPCODE_UNORD;
3077 if (compcode == COMPCODE_LTGT)
3078 compcode = COMPCODE_NE;
3079 else if (compcode == COMPCODE_ORD)
3080 compcode = COMPCODE_TRUE;
3082 else if (flag_trapping_math)
3084 /* Check that the original operation and the optimized ones will trap
3085 under the same condition. */
3086 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
3087 && (lcompcode != COMPCODE_EQ)
3088 && (lcompcode != COMPCODE_ORD);
3089 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3090 && (rcompcode != COMPCODE_EQ)
3091 && (rcompcode != COMPCODE_ORD);
3092 bool trap = (compcode & COMPCODE_UNORD) == 0
3093 && (compcode != COMPCODE_EQ)
3094 && (compcode != COMPCODE_ORD);
3096 /* In a short-circuited boolean expression the LHS might be
3097 such that the RHS, if evaluated, will never trap. For
3098 example, in ORD (x, y) && (x < y), we evaluate the RHS only
3099 if neither x nor y is NaN. (This is a mixed blessing: for
3100 example, the expression above will never trap, hence
3101 optimizing it to x < y would be invalid). */
3102 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3103 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3104 rtrap = false;
3106 /* If the comparison was short-circuited, and only the RHS
3107 trapped, we may now generate a spurious trap. */
3108 if (rtrap && !ltrap
3109 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3110 return NULL_TREE;
3112 /* If we changed the conditions that cause a trap, we lose. */
3113 if ((ltrap || rtrap) != trap)
3114 return NULL_TREE;
3117 if (compcode == COMPCODE_TRUE)
3118 return constant_boolean_node (true, truth_type);
3119 else if (compcode == COMPCODE_FALSE)
3120 return constant_boolean_node (false, truth_type);
3121 else
3123 enum tree_code tcode;
3125 tcode = compcode_to_comparison ((enum comparison_code) compcode);
3126 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3130 /* Return nonzero if two operands (typically of the same tree node)
3131 are necessarily equal. If either argument has side-effects this
3132 function returns zero. FLAGS modifies behavior as follows:
3134 If OEP_ONLY_CONST is set, only return nonzero for constants.
3135 This function tests whether the operands are indistinguishable;
3136 it does not test whether they are equal using C's == operation.
3137 The distinction is important for IEEE floating point, because
3138 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3139 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3141 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3142 even though it may hold multiple values during a function.
3143 This is because a GCC tree node guarantees that nothing else is
3144 executed between the evaluation of its "operands" (which may often
3145 be evaluated in arbitrary order). Hence if the operands themselves
3146 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3147 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3148 unset means assuming isochronic (or instantaneous) tree equivalence.
3149 Unless comparing arbitrary expression trees, such as from different
3150 statements, this flag can usually be left unset.
3152 If OEP_PURE_SAME is set, then pure functions with identical arguments
3153 are considered the same. It is used when the caller has other ways
3154 to ensure that global memory is unchanged in between. */
3157 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3159 /* If either is ERROR_MARK, they aren't equal. */
3160 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3161 return 0;
3163 /* Check equality of integer constants before bailing out due to
3164 precision differences. */
3165 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3166 return tree_int_cst_equal (arg0, arg1);
3168 /* If both types don't have the same signedness, then we can't consider
3169 them equal. We must check this before the STRIP_NOPS calls
3170 because they may change the signedness of the arguments. As pointers
3171 strictly don't have a signedness, require either two pointers or
3172 two non-pointers as well. */
3173 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3174 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3175 return 0;
3177 /* We cannot consider pointers to different address space equal. */
3178 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
3179 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3180 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3181 return 0;
3183 /* If both types don't have the same precision, then it is not safe
3184 to strip NOPs. */
3185 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3186 return 0;
3188 STRIP_NOPS (arg0);
3189 STRIP_NOPS (arg1);
3191 /* In case both args are comparisons but with different comparison
3192 code, try to swap the comparison operands of one arg to produce
3193 a match and compare that variant. */
3194 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3195 && COMPARISON_CLASS_P (arg0)
3196 && COMPARISON_CLASS_P (arg1))
3198 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3200 if (TREE_CODE (arg0) == swap_code)
3201 return operand_equal_p (TREE_OPERAND (arg0, 0),
3202 TREE_OPERAND (arg1, 1), flags)
3203 && operand_equal_p (TREE_OPERAND (arg0, 1),
3204 TREE_OPERAND (arg1, 0), flags);
3207 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3208 /* This is needed for conversions and for COMPONENT_REF.
3209 Might as well play it safe and always test this. */
3210 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3211 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3212 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3213 return 0;
3215 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3216 We don't care about side effects in that case because the SAVE_EXPR
3217 takes care of that for us. In all other cases, two expressions are
3218 equal if they have no side effects. If we have two identical
3219 expressions with side effects that should be treated the same due
3220 to the only side effects being identical SAVE_EXPR's, that will
3221 be detected in the recursive calls below. */
3222 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3223 && (TREE_CODE (arg0) == SAVE_EXPR
3224 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3225 return 1;
3227 /* Next handle constant cases, those for which we can return 1 even
3228 if ONLY_CONST is set. */
3229 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3230 switch (TREE_CODE (arg0))
3232 case INTEGER_CST:
3233 return tree_int_cst_equal (arg0, arg1);
3235 case FIXED_CST:
3236 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3237 TREE_FIXED_CST (arg1));
3239 case REAL_CST:
3240 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3241 TREE_REAL_CST (arg1)))
3242 return 1;
3245 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3247 /* If we do not distinguish between signed and unsigned zero,
3248 consider them equal. */
3249 if (real_zerop (arg0) && real_zerop (arg1))
3250 return 1;
3252 return 0;
3254 case VECTOR_CST:
3256 tree v1, v2;
3258 v1 = TREE_VECTOR_CST_ELTS (arg0);
3259 v2 = TREE_VECTOR_CST_ELTS (arg1);
3260 while (v1 && v2)
3262 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3263 flags))
3264 return 0;
3265 v1 = TREE_CHAIN (v1);
3266 v2 = TREE_CHAIN (v2);
3269 return v1 == v2;
3272 case COMPLEX_CST:
3273 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3274 flags)
3275 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3276 flags));
3278 case STRING_CST:
3279 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3280 && ! memcmp (TREE_STRING_POINTER (arg0),
3281 TREE_STRING_POINTER (arg1),
3282 TREE_STRING_LENGTH (arg0)));
3284 case ADDR_EXPR:
3285 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3287 default:
3288 break;
3291 if (flags & OEP_ONLY_CONST)
3292 return 0;
3294 /* Define macros to test an operand from arg0 and arg1 for equality and a
3295 variant that allows null and views null as being different from any
3296 non-null value. In the latter case, if either is null, the both
3297 must be; otherwise, do the normal comparison. */
3298 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3299 TREE_OPERAND (arg1, N), flags)
3301 #define OP_SAME_WITH_NULL(N) \
3302 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3303 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3305 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3307 case tcc_unary:
3308 /* Two conversions are equal only if signedness and modes match. */
3309 switch (TREE_CODE (arg0))
3311 CASE_CONVERT:
3312 case FIX_TRUNC_EXPR:
3313 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3314 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3315 return 0;
3316 break;
3317 default:
3318 break;
3321 return OP_SAME (0);
3324 case tcc_comparison:
3325 case tcc_binary:
3326 if (OP_SAME (0) && OP_SAME (1))
3327 return 1;
3329 /* For commutative ops, allow the other order. */
3330 return (commutative_tree_code (TREE_CODE (arg0))
3331 && operand_equal_p (TREE_OPERAND (arg0, 0),
3332 TREE_OPERAND (arg1, 1), flags)
3333 && operand_equal_p (TREE_OPERAND (arg0, 1),
3334 TREE_OPERAND (arg1, 0), flags));
3336 case tcc_reference:
3337 /* If either of the pointer (or reference) expressions we are
3338 dereferencing contain a side effect, these cannot be equal. */
3339 if (TREE_SIDE_EFFECTS (arg0)
3340 || TREE_SIDE_EFFECTS (arg1))
3341 return 0;
3343 switch (TREE_CODE (arg0))
3345 case INDIRECT_REF:
3346 case ALIGN_INDIRECT_REF:
3347 case MISALIGNED_INDIRECT_REF:
3348 case REALPART_EXPR:
3349 case IMAGPART_EXPR:
3350 return OP_SAME (0);
3352 case ARRAY_REF:
3353 case ARRAY_RANGE_REF:
3354 /* Operands 2 and 3 may be null.
3355 Compare the array index by value if it is constant first as we
3356 may have different types but same value here. */
3357 return (OP_SAME (0)
3358 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3359 TREE_OPERAND (arg1, 1))
3360 || OP_SAME (1))
3361 && OP_SAME_WITH_NULL (2)
3362 && OP_SAME_WITH_NULL (3));
3364 case COMPONENT_REF:
3365 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3366 may be NULL when we're called to compare MEM_EXPRs. */
3367 return OP_SAME_WITH_NULL (0)
3368 && OP_SAME (1)
3369 && OP_SAME_WITH_NULL (2);
3371 case BIT_FIELD_REF:
3372 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3374 default:
3375 return 0;
3378 case tcc_expression:
3379 switch (TREE_CODE (arg0))
3381 case ADDR_EXPR:
3382 case TRUTH_NOT_EXPR:
3383 return OP_SAME (0);
3385 case TRUTH_ANDIF_EXPR:
3386 case TRUTH_ORIF_EXPR:
3387 return OP_SAME (0) && OP_SAME (1);
3389 case TRUTH_AND_EXPR:
3390 case TRUTH_OR_EXPR:
3391 case TRUTH_XOR_EXPR:
3392 if (OP_SAME (0) && OP_SAME (1))
3393 return 1;
3395 /* Otherwise take into account this is a commutative operation. */
3396 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3397 TREE_OPERAND (arg1, 1), flags)
3398 && operand_equal_p (TREE_OPERAND (arg0, 1),
3399 TREE_OPERAND (arg1, 0), flags));
3401 case COND_EXPR:
3402 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3404 default:
3405 return 0;
3408 case tcc_vl_exp:
3409 switch (TREE_CODE (arg0))
3411 case CALL_EXPR:
3412 /* If the CALL_EXPRs call different functions, then they
3413 clearly can not be equal. */
3414 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3415 flags))
3416 return 0;
3419 unsigned int cef = call_expr_flags (arg0);
3420 if (flags & OEP_PURE_SAME)
3421 cef &= ECF_CONST | ECF_PURE;
3422 else
3423 cef &= ECF_CONST;
3424 if (!cef)
3425 return 0;
3428 /* Now see if all the arguments are the same. */
3430 const_call_expr_arg_iterator iter0, iter1;
3431 const_tree a0, a1;
3432 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3433 a1 = first_const_call_expr_arg (arg1, &iter1);
3434 a0 && a1;
3435 a0 = next_const_call_expr_arg (&iter0),
3436 a1 = next_const_call_expr_arg (&iter1))
3437 if (! operand_equal_p (a0, a1, flags))
3438 return 0;
3440 /* If we get here and both argument lists are exhausted
3441 then the CALL_EXPRs are equal. */
3442 return ! (a0 || a1);
3444 default:
3445 return 0;
3448 case tcc_declaration:
3449 /* Consider __builtin_sqrt equal to sqrt. */
3450 return (TREE_CODE (arg0) == FUNCTION_DECL
3451 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3452 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3453 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3455 default:
3456 return 0;
3459 #undef OP_SAME
3460 #undef OP_SAME_WITH_NULL
3463 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3464 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3466 When in doubt, return 0. */
3468 static int
3469 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3471 int unsignedp1, unsignedpo;
3472 tree primarg0, primarg1, primother;
3473 unsigned int correct_width;
3475 if (operand_equal_p (arg0, arg1, 0))
3476 return 1;
3478 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3479 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3480 return 0;
3482 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3483 and see if the inner values are the same. This removes any
3484 signedness comparison, which doesn't matter here. */
3485 primarg0 = arg0, primarg1 = arg1;
3486 STRIP_NOPS (primarg0);
3487 STRIP_NOPS (primarg1);
3488 if (operand_equal_p (primarg0, primarg1, 0))
3489 return 1;
3491 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3492 actual comparison operand, ARG0.
3494 First throw away any conversions to wider types
3495 already present in the operands. */
3497 primarg1 = get_narrower (arg1, &unsignedp1);
3498 primother = get_narrower (other, &unsignedpo);
3500 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3501 if (unsignedp1 == unsignedpo
3502 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3503 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3505 tree type = TREE_TYPE (arg0);
3507 /* Make sure shorter operand is extended the right way
3508 to match the longer operand. */
3509 primarg1 = fold_convert (signed_or_unsigned_type_for
3510 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3512 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3513 return 1;
3516 return 0;
3519 /* See if ARG is an expression that is either a comparison or is performing
3520 arithmetic on comparisons. The comparisons must only be comparing
3521 two different values, which will be stored in *CVAL1 and *CVAL2; if
3522 they are nonzero it means that some operands have already been found.
3523 No variables may be used anywhere else in the expression except in the
3524 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3525 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3527 If this is true, return 1. Otherwise, return zero. */
3529 static int
3530 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3532 enum tree_code code = TREE_CODE (arg);
3533 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3535 /* We can handle some of the tcc_expression cases here. */
3536 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3537 tclass = tcc_unary;
3538 else if (tclass == tcc_expression
3539 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3540 || code == COMPOUND_EXPR))
3541 tclass = tcc_binary;
3543 else if (tclass == tcc_expression && code == SAVE_EXPR
3544 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3546 /* If we've already found a CVAL1 or CVAL2, this expression is
3547 two complex to handle. */
3548 if (*cval1 || *cval2)
3549 return 0;
3551 tclass = tcc_unary;
3552 *save_p = 1;
3555 switch (tclass)
3557 case tcc_unary:
3558 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3560 case tcc_binary:
3561 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3562 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3563 cval1, cval2, save_p));
3565 case tcc_constant:
3566 return 1;
3568 case tcc_expression:
3569 if (code == COND_EXPR)
3570 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3571 cval1, cval2, save_p)
3572 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3573 cval1, cval2, save_p)
3574 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3575 cval1, cval2, save_p));
3576 return 0;
3578 case tcc_comparison:
3579 /* First see if we can handle the first operand, then the second. For
3580 the second operand, we know *CVAL1 can't be zero. It must be that
3581 one side of the comparison is each of the values; test for the
3582 case where this isn't true by failing if the two operands
3583 are the same. */
3585 if (operand_equal_p (TREE_OPERAND (arg, 0),
3586 TREE_OPERAND (arg, 1), 0))
3587 return 0;
3589 if (*cval1 == 0)
3590 *cval1 = TREE_OPERAND (arg, 0);
3591 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3593 else if (*cval2 == 0)
3594 *cval2 = TREE_OPERAND (arg, 0);
3595 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3597 else
3598 return 0;
3600 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3602 else if (*cval2 == 0)
3603 *cval2 = TREE_OPERAND (arg, 1);
3604 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3606 else
3607 return 0;
3609 return 1;
3611 default:
3612 return 0;
3616 /* ARG is a tree that is known to contain just arithmetic operations and
3617 comparisons. Evaluate the operations in the tree substituting NEW0 for
3618 any occurrence of OLD0 as an operand of a comparison and likewise for
3619 NEW1 and OLD1. */
3621 static tree
3622 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3623 tree old1, tree new1)
3625 tree type = TREE_TYPE (arg);
3626 enum tree_code code = TREE_CODE (arg);
3627 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3629 /* We can handle some of the tcc_expression cases here. */
3630 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3631 tclass = tcc_unary;
3632 else if (tclass == tcc_expression
3633 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3634 tclass = tcc_binary;
3636 switch (tclass)
3638 case tcc_unary:
3639 return fold_build1_loc (loc, code, type,
3640 eval_subst (loc, TREE_OPERAND (arg, 0),
3641 old0, new0, old1, new1));
3643 case tcc_binary:
3644 return fold_build2_loc (loc, code, type,
3645 eval_subst (loc, TREE_OPERAND (arg, 0),
3646 old0, new0, old1, new1),
3647 eval_subst (loc, TREE_OPERAND (arg, 1),
3648 old0, new0, old1, new1));
3650 case tcc_expression:
3651 switch (code)
3653 case SAVE_EXPR:
3654 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3655 old1, new1);
3657 case COMPOUND_EXPR:
3658 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3659 old1, new1);
3661 case COND_EXPR:
3662 return fold_build3_loc (loc, code, type,
3663 eval_subst (loc, TREE_OPERAND (arg, 0),
3664 old0, new0, old1, new1),
3665 eval_subst (loc, TREE_OPERAND (arg, 1),
3666 old0, new0, old1, new1),
3667 eval_subst (loc, TREE_OPERAND (arg, 2),
3668 old0, new0, old1, new1));
3669 default:
3670 break;
3672 /* Fall through - ??? */
3674 case tcc_comparison:
3676 tree arg0 = TREE_OPERAND (arg, 0);
3677 tree arg1 = TREE_OPERAND (arg, 1);
3679 /* We need to check both for exact equality and tree equality. The
3680 former will be true if the operand has a side-effect. In that
3681 case, we know the operand occurred exactly once. */
3683 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3684 arg0 = new0;
3685 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3686 arg0 = new1;
3688 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3689 arg1 = new0;
3690 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3691 arg1 = new1;
3693 return fold_build2_loc (loc, code, type, arg0, arg1);
3696 default:
3697 return arg;
3701 /* Return a tree for the case when the result of an expression is RESULT
3702 converted to TYPE and OMITTED was previously an operand of the expression
3703 but is now not needed (e.g., we folded OMITTED * 0).
3705 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3706 the conversion of RESULT to TYPE. */
3708 tree
3709 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3711 tree t = fold_convert_loc (loc, type, result);
3713 /* If the resulting operand is an empty statement, just return the omitted
3714 statement casted to void. */
3715 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3717 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3718 goto omit_one_operand_exit;
3721 if (TREE_SIDE_EFFECTS (omitted))
3723 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3724 goto omit_one_operand_exit;
3727 return non_lvalue_loc (loc, t);
3729 omit_one_operand_exit:
3730 protected_set_expr_location (t, loc);
3731 return t;
3734 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3736 static tree
3737 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3738 tree omitted)
3740 tree t = fold_convert_loc (loc, type, result);
3742 /* If the resulting operand is an empty statement, just return the omitted
3743 statement casted to void. */
3744 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3746 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3747 goto pedantic_omit_one_operand_exit;
3750 if (TREE_SIDE_EFFECTS (omitted))
3752 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3753 goto pedantic_omit_one_operand_exit;
3756 return pedantic_non_lvalue_loc (loc, t);
3758 pedantic_omit_one_operand_exit:
3759 protected_set_expr_location (t, loc);
3760 return t;
3763 /* Return a tree for the case when the result of an expression is RESULT
3764 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3765 of the expression but are now not needed.
3767 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3768 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3769 evaluated before OMITTED2. Otherwise, if neither has side effects,
3770 just do the conversion of RESULT to TYPE. */
3772 tree
3773 omit_two_operands_loc (location_t loc, tree type, tree result,
3774 tree omitted1, tree omitted2)
3776 tree t = fold_convert_loc (loc, type, result);
3778 if (TREE_SIDE_EFFECTS (omitted2))
3780 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3781 SET_EXPR_LOCATION (t, loc);
3783 if (TREE_SIDE_EFFECTS (omitted1))
3785 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3786 SET_EXPR_LOCATION (t, loc);
3789 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3793 /* Return a simplified tree node for the truth-negation of ARG. This
3794 never alters ARG itself. We assume that ARG is an operation that
3795 returns a truth value (0 or 1).
3797 FIXME: one would think we would fold the result, but it causes
3798 problems with the dominator optimizer. */
3800 tree
3801 fold_truth_not_expr (location_t loc, tree arg)
3803 tree t, type = TREE_TYPE (arg);
3804 enum tree_code code = TREE_CODE (arg);
3805 location_t loc1, loc2;
3807 /* If this is a comparison, we can simply invert it, except for
3808 floating-point non-equality comparisons, in which case we just
3809 enclose a TRUTH_NOT_EXPR around what we have. */
3811 if (TREE_CODE_CLASS (code) == tcc_comparison)
3813 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3814 if (FLOAT_TYPE_P (op_type)
3815 && flag_trapping_math
3816 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3817 && code != NE_EXPR && code != EQ_EXPR)
3818 return NULL_TREE;
3820 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3821 if (code == ERROR_MARK)
3822 return NULL_TREE;
3824 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3825 SET_EXPR_LOCATION (t, loc);
3826 return t;
3829 switch (code)
3831 case INTEGER_CST:
3832 return constant_boolean_node (integer_zerop (arg), type);
3834 case TRUTH_AND_EXPR:
3835 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3836 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3837 if (loc1 == UNKNOWN_LOCATION)
3838 loc1 = loc;
3839 if (loc2 == UNKNOWN_LOCATION)
3840 loc2 = loc;
3841 t = build2 (TRUTH_OR_EXPR, type,
3842 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3843 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3844 break;
3846 case TRUTH_OR_EXPR:
3847 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3848 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3849 if (loc1 == UNKNOWN_LOCATION)
3850 loc1 = loc;
3851 if (loc2 == UNKNOWN_LOCATION)
3852 loc2 = loc;
3853 t = build2 (TRUTH_AND_EXPR, type,
3854 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3855 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3856 break;
3858 case TRUTH_XOR_EXPR:
3859 /* Here we can invert either operand. We invert the first operand
3860 unless the second operand is a TRUTH_NOT_EXPR in which case our
3861 result is the XOR of the first operand with the inside of the
3862 negation of the second operand. */
3864 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3865 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3866 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3867 else
3868 t = build2 (TRUTH_XOR_EXPR, type,
3869 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3870 TREE_OPERAND (arg, 1));
3871 break;
3873 case TRUTH_ANDIF_EXPR:
3874 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3875 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3876 if (loc1 == UNKNOWN_LOCATION)
3877 loc1 = loc;
3878 if (loc2 == UNKNOWN_LOCATION)
3879 loc2 = loc;
3880 t = build2 (TRUTH_ORIF_EXPR, type,
3881 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3882 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3883 break;
3885 case TRUTH_ORIF_EXPR:
3886 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3887 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3888 if (loc1 == UNKNOWN_LOCATION)
3889 loc1 = loc;
3890 if (loc2 == UNKNOWN_LOCATION)
3891 loc2 = loc;
3892 t = build2 (TRUTH_ANDIF_EXPR, type,
3893 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3894 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3895 break;
3897 case TRUTH_NOT_EXPR:
3898 return TREE_OPERAND (arg, 0);
3900 case COND_EXPR:
3902 tree arg1 = TREE_OPERAND (arg, 1);
3903 tree arg2 = TREE_OPERAND (arg, 2);
3905 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3906 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3907 if (loc1 == UNKNOWN_LOCATION)
3908 loc1 = loc;
3909 if (loc2 == UNKNOWN_LOCATION)
3910 loc2 = loc;
3912 /* A COND_EXPR may have a throw as one operand, which
3913 then has void type. Just leave void operands
3914 as they are. */
3915 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3916 VOID_TYPE_P (TREE_TYPE (arg1))
3917 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3918 VOID_TYPE_P (TREE_TYPE (arg2))
3919 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3920 break;
3923 case COMPOUND_EXPR:
3924 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3925 if (loc1 == UNKNOWN_LOCATION)
3926 loc1 = loc;
3927 t = build2 (COMPOUND_EXPR, type,
3928 TREE_OPERAND (arg, 0),
3929 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3930 break;
3932 case NON_LVALUE_EXPR:
3933 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3934 if (loc1 == UNKNOWN_LOCATION)
3935 loc1 = loc;
3936 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3938 CASE_CONVERT:
3939 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3941 t = build1 (TRUTH_NOT_EXPR, type, arg);
3942 break;
3945 /* ... fall through ... */
3947 case FLOAT_EXPR:
3948 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3949 if (loc1 == UNKNOWN_LOCATION)
3950 loc1 = loc;
3951 t = build1 (TREE_CODE (arg), type,
3952 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3953 break;
3955 case BIT_AND_EXPR:
3956 if (!integer_onep (TREE_OPERAND (arg, 1)))
3957 return NULL_TREE;
3958 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3959 break;
3961 case SAVE_EXPR:
3962 t = build1 (TRUTH_NOT_EXPR, type, arg);
3963 break;
3965 case CLEANUP_POINT_EXPR:
3966 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3967 if (loc1 == UNKNOWN_LOCATION)
3968 loc1 = loc;
3969 t = build1 (CLEANUP_POINT_EXPR, type,
3970 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3971 break;
3973 default:
3974 t = NULL_TREE;
3975 break;
3978 if (t)
3979 SET_EXPR_LOCATION (t, loc);
3981 return t;
3984 /* Return a simplified tree node for the truth-negation of ARG. This
3985 never alters ARG itself. We assume that ARG is an operation that
3986 returns a truth value (0 or 1).
3988 FIXME: one would think we would fold the result, but it causes
3989 problems with the dominator optimizer. */
3991 tree
3992 invert_truthvalue_loc (location_t loc, tree arg)
3994 tree tem;
3996 if (TREE_CODE (arg) == ERROR_MARK)
3997 return arg;
3999 tem = fold_truth_not_expr (loc, arg);
4000 if (!tem)
4002 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
4003 SET_EXPR_LOCATION (tem, loc);
4006 return tem;
4009 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
4010 operands are another bit-wise operation with a common input. If so,
4011 distribute the bit operations to save an operation and possibly two if
4012 constants are involved. For example, convert
4013 (A | B) & (A | C) into A | (B & C)
4014 Further simplification will occur if B and C are constants.
4016 If this optimization cannot be done, 0 will be returned. */
4018 static tree
4019 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
4020 tree arg0, tree arg1)
4022 tree common;
4023 tree left, right;
4025 if (TREE_CODE (arg0) != TREE_CODE (arg1)
4026 || TREE_CODE (arg0) == code
4027 || (TREE_CODE (arg0) != BIT_AND_EXPR
4028 && TREE_CODE (arg0) != BIT_IOR_EXPR))
4029 return 0;
4031 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
4033 common = TREE_OPERAND (arg0, 0);
4034 left = TREE_OPERAND (arg0, 1);
4035 right = TREE_OPERAND (arg1, 1);
4037 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
4039 common = TREE_OPERAND (arg0, 0);
4040 left = TREE_OPERAND (arg0, 1);
4041 right = TREE_OPERAND (arg1, 0);
4043 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
4045 common = TREE_OPERAND (arg0, 1);
4046 left = TREE_OPERAND (arg0, 0);
4047 right = TREE_OPERAND (arg1, 1);
4049 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
4051 common = TREE_OPERAND (arg0, 1);
4052 left = TREE_OPERAND (arg0, 0);
4053 right = TREE_OPERAND (arg1, 0);
4055 else
4056 return 0;
4058 common = fold_convert_loc (loc, type, common);
4059 left = fold_convert_loc (loc, type, left);
4060 right = fold_convert_loc (loc, type, right);
4061 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
4062 fold_build2_loc (loc, code, type, left, right));
4065 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
4066 with code CODE. This optimization is unsafe. */
4067 static tree
4068 distribute_real_division (location_t loc, enum tree_code code, tree type,
4069 tree arg0, tree arg1)
4071 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
4072 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
4074 /* (A / C) +- (B / C) -> (A +- B) / C. */
4075 if (mul0 == mul1
4076 && operand_equal_p (TREE_OPERAND (arg0, 1),
4077 TREE_OPERAND (arg1, 1), 0))
4078 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
4079 fold_build2_loc (loc, code, type,
4080 TREE_OPERAND (arg0, 0),
4081 TREE_OPERAND (arg1, 0)),
4082 TREE_OPERAND (arg0, 1));
4084 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
4085 if (operand_equal_p (TREE_OPERAND (arg0, 0),
4086 TREE_OPERAND (arg1, 0), 0)
4087 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
4088 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
4090 REAL_VALUE_TYPE r0, r1;
4091 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
4092 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
4093 if (!mul0)
4094 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
4095 if (!mul1)
4096 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
4097 real_arithmetic (&r0, code, &r0, &r1);
4098 return fold_build2_loc (loc, MULT_EXPR, type,
4099 TREE_OPERAND (arg0, 0),
4100 build_real (type, r0));
4103 return NULL_TREE;
4106 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4107 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
4109 static tree
4110 make_bit_field_ref (location_t loc, tree inner, tree type,
4111 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
4113 tree result, bftype;
4115 if (bitpos == 0)
4117 tree size = TYPE_SIZE (TREE_TYPE (inner));
4118 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4119 || POINTER_TYPE_P (TREE_TYPE (inner)))
4120 && host_integerp (size, 0)
4121 && tree_low_cst (size, 0) == bitsize)
4122 return fold_convert_loc (loc, type, inner);
4125 bftype = type;
4126 if (TYPE_PRECISION (bftype) != bitsize
4127 || TYPE_UNSIGNED (bftype) == !unsignedp)
4128 bftype = build_nonstandard_integer_type (bitsize, 0);
4130 result = build3 (BIT_FIELD_REF, bftype, inner,
4131 size_int (bitsize), bitsize_int (bitpos));
4132 SET_EXPR_LOCATION (result, loc);
4134 if (bftype != type)
4135 result = fold_convert_loc (loc, type, result);
4137 return result;
4140 /* Optimize a bit-field compare.
4142 There are two cases: First is a compare against a constant and the
4143 second is a comparison of two items where the fields are at the same
4144 bit position relative to the start of a chunk (byte, halfword, word)
4145 large enough to contain it. In these cases we can avoid the shift
4146 implicit in bitfield extractions.
4148 For constants, we emit a compare of the shifted constant with the
4149 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4150 compared. For two fields at the same position, we do the ANDs with the
4151 similar mask and compare the result of the ANDs.
4153 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4154 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4155 are the left and right operands of the comparison, respectively.
4157 If the optimization described above can be done, we return the resulting
4158 tree. Otherwise we return zero. */
4160 static tree
4161 optimize_bit_field_compare (location_t loc, enum tree_code code,
4162 tree compare_type, tree lhs, tree rhs)
4164 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
4165 tree type = TREE_TYPE (lhs);
4166 tree signed_type, unsigned_type;
4167 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4168 enum machine_mode lmode, rmode, nmode;
4169 int lunsignedp, runsignedp;
4170 int lvolatilep = 0, rvolatilep = 0;
4171 tree linner, rinner = NULL_TREE;
4172 tree mask;
4173 tree offset;
4175 /* Get all the information about the extractions being done. If the bit size
4176 if the same as the size of the underlying object, we aren't doing an
4177 extraction at all and so can do nothing. We also don't want to
4178 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4179 then will no longer be able to replace it. */
4180 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
4181 &lunsignedp, &lvolatilep, false);
4182 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
4183 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
4184 return 0;
4186 if (!const_p)
4188 /* If this is not a constant, we can only do something if bit positions,
4189 sizes, and signedness are the same. */
4190 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4191 &runsignedp, &rvolatilep, false);
4193 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
4194 || lunsignedp != runsignedp || offset != 0
4195 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
4196 return 0;
4199 /* See if we can find a mode to refer to this field. We should be able to,
4200 but fail if we can't. */
4201 nmode = get_best_mode (lbitsize, lbitpos,
4202 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4203 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4204 TYPE_ALIGN (TREE_TYPE (rinner))),
4205 word_mode, lvolatilep || rvolatilep);
4206 if (nmode == VOIDmode)
4207 return 0;
4209 /* Set signed and unsigned types of the precision of this mode for the
4210 shifts below. */
4211 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
4212 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4214 /* Compute the bit position and size for the new reference and our offset
4215 within it. If the new reference is the same size as the original, we
4216 won't optimize anything, so return zero. */
4217 nbitsize = GET_MODE_BITSIZE (nmode);
4218 nbitpos = lbitpos & ~ (nbitsize - 1);
4219 lbitpos -= nbitpos;
4220 if (nbitsize == lbitsize)
4221 return 0;
4223 if (BYTES_BIG_ENDIAN)
4224 lbitpos = nbitsize - lbitsize - lbitpos;
4226 /* Make the mask to be used against the extracted field. */
4227 mask = build_int_cst_type (unsigned_type, -1);
4228 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
4229 mask = const_binop (RSHIFT_EXPR, mask,
4230 size_int (nbitsize - lbitsize - lbitpos), 0);
4232 if (! const_p)
4233 /* If not comparing with constant, just rework the comparison
4234 and return. */
4235 return fold_build2_loc (loc, code, compare_type,
4236 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4237 make_bit_field_ref (loc, linner,
4238 unsigned_type,
4239 nbitsize, nbitpos,
4241 mask),
4242 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4243 make_bit_field_ref (loc, rinner,
4244 unsigned_type,
4245 nbitsize, nbitpos,
4247 mask));
4249 /* Otherwise, we are handling the constant case. See if the constant is too
4250 big for the field. Warn and return a tree of for 0 (false) if so. We do
4251 this not only for its own sake, but to avoid having to test for this
4252 error case below. If we didn't, we might generate wrong code.
4254 For unsigned fields, the constant shifted right by the field length should
4255 be all zero. For signed fields, the high-order bits should agree with
4256 the sign bit. */
4258 if (lunsignedp)
4260 if (! integer_zerop (const_binop (RSHIFT_EXPR,
4261 fold_convert_loc (loc,
4262 unsigned_type, rhs),
4263 size_int (lbitsize), 0)))
4265 warning (0, "comparison is always %d due to width of bit-field",
4266 code == NE_EXPR);
4267 return constant_boolean_node (code == NE_EXPR, compare_type);
4270 else
4272 tree tem = const_binop (RSHIFT_EXPR,
4273 fold_convert_loc (loc, signed_type, rhs),
4274 size_int (lbitsize - 1), 0);
4275 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4277 warning (0, "comparison is always %d due to width of bit-field",
4278 code == NE_EXPR);
4279 return constant_boolean_node (code == NE_EXPR, compare_type);
4283 /* Single-bit compares should always be against zero. */
4284 if (lbitsize == 1 && ! integer_zerop (rhs))
4286 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4287 rhs = build_int_cst (type, 0);
4290 /* Make a new bitfield reference, shift the constant over the
4291 appropriate number of bits and mask it with the computed mask
4292 (in case this was a signed field). If we changed it, make a new one. */
4293 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
4294 if (lvolatilep)
4296 TREE_SIDE_EFFECTS (lhs) = 1;
4297 TREE_THIS_VOLATILE (lhs) = 1;
4300 rhs = const_binop (BIT_AND_EXPR,
4301 const_binop (LSHIFT_EXPR,
4302 fold_convert_loc (loc, unsigned_type, rhs),
4303 size_int (lbitpos), 0),
4304 mask, 0);
4306 lhs = build2 (code, compare_type,
4307 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4308 rhs);
4309 SET_EXPR_LOCATION (lhs, loc);
4310 return lhs;
4313 /* Subroutine for fold_truthop: decode a field reference.
4315 If EXP is a comparison reference, we return the innermost reference.
4317 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4318 set to the starting bit number.
4320 If the innermost field can be completely contained in a mode-sized
4321 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4323 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4324 otherwise it is not changed.
4326 *PUNSIGNEDP is set to the signedness of the field.
4328 *PMASK is set to the mask used. This is either contained in a
4329 BIT_AND_EXPR or derived from the width of the field.
4331 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4333 Return 0 if this is not a component reference or is one that we can't
4334 do anything with. */
4336 static tree
4337 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
4338 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4339 int *punsignedp, int *pvolatilep,
4340 tree *pmask, tree *pand_mask)
4342 tree outer_type = 0;
4343 tree and_mask = 0;
4344 tree mask, inner, offset;
4345 tree unsigned_type;
4346 unsigned int precision;
4348 /* All the optimizations using this function assume integer fields.
4349 There are problems with FP fields since the type_for_size call
4350 below can fail for, e.g., XFmode. */
4351 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4352 return 0;
4354 /* We are interested in the bare arrangement of bits, so strip everything
4355 that doesn't affect the machine mode. However, record the type of the
4356 outermost expression if it may matter below. */
4357 if (CONVERT_EXPR_P (exp)
4358 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4359 outer_type = TREE_TYPE (exp);
4360 STRIP_NOPS (exp);
4362 if (TREE_CODE (exp) == BIT_AND_EXPR)
4364 and_mask = TREE_OPERAND (exp, 1);
4365 exp = TREE_OPERAND (exp, 0);
4366 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4367 if (TREE_CODE (and_mask) != INTEGER_CST)
4368 return 0;
4371 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4372 punsignedp, pvolatilep, false);
4373 if ((inner == exp && and_mask == 0)
4374 || *pbitsize < 0 || offset != 0
4375 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4376 return 0;
4378 /* If the number of bits in the reference is the same as the bitsize of
4379 the outer type, then the outer type gives the signedness. Otherwise
4380 (in case of a small bitfield) the signedness is unchanged. */
4381 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4382 *punsignedp = TYPE_UNSIGNED (outer_type);
4384 /* Compute the mask to access the bitfield. */
4385 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4386 precision = TYPE_PRECISION (unsigned_type);
4388 mask = build_int_cst_type (unsigned_type, -1);
4390 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4391 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4393 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4394 if (and_mask != 0)
4395 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4396 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4398 *pmask = mask;
4399 *pand_mask = and_mask;
4400 return inner;
4403 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4404 bit positions. */
4406 static int
4407 all_ones_mask_p (const_tree mask, int size)
4409 tree type = TREE_TYPE (mask);
4410 unsigned int precision = TYPE_PRECISION (type);
4411 tree tmask;
4413 tmask = build_int_cst_type (signed_type_for (type), -1);
4415 return
4416 tree_int_cst_equal (mask,
4417 const_binop (RSHIFT_EXPR,
4418 const_binop (LSHIFT_EXPR, tmask,
4419 size_int (precision - size),
4421 size_int (precision - size), 0));
4424 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4425 represents the sign bit of EXP's type. If EXP represents a sign
4426 or zero extension, also test VAL against the unextended type.
4427 The return value is the (sub)expression whose sign bit is VAL,
4428 or NULL_TREE otherwise. */
4430 static tree
4431 sign_bit_p (tree exp, const_tree val)
4433 unsigned HOST_WIDE_INT mask_lo, lo;
4434 HOST_WIDE_INT mask_hi, hi;
4435 int width;
4436 tree t;
4438 /* Tree EXP must have an integral type. */
4439 t = TREE_TYPE (exp);
4440 if (! INTEGRAL_TYPE_P (t))
4441 return NULL_TREE;
4443 /* Tree VAL must be an integer constant. */
4444 if (TREE_CODE (val) != INTEGER_CST
4445 || TREE_OVERFLOW (val))
4446 return NULL_TREE;
4448 width = TYPE_PRECISION (t);
4449 if (width > HOST_BITS_PER_WIDE_INT)
4451 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4452 lo = 0;
4454 mask_hi = ((unsigned HOST_WIDE_INT) -1
4455 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4456 mask_lo = -1;
4458 else
4460 hi = 0;
4461 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4463 mask_hi = 0;
4464 mask_lo = ((unsigned HOST_WIDE_INT) -1
4465 >> (HOST_BITS_PER_WIDE_INT - width));
4468 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4469 treat VAL as if it were unsigned. */
4470 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4471 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4472 return exp;
4474 /* Handle extension from a narrower type. */
4475 if (TREE_CODE (exp) == NOP_EXPR
4476 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4477 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4479 return NULL_TREE;
4482 /* Subroutine for fold_truthop: determine if an operand is simple enough
4483 to be evaluated unconditionally. */
4485 static int
4486 simple_operand_p (const_tree exp)
4488 /* Strip any conversions that don't change the machine mode. */
4489 STRIP_NOPS (exp);
4491 return (CONSTANT_CLASS_P (exp)
4492 || TREE_CODE (exp) == SSA_NAME
4493 || (DECL_P (exp)
4494 && ! TREE_ADDRESSABLE (exp)
4495 && ! TREE_THIS_VOLATILE (exp)
4496 && ! DECL_NONLOCAL (exp)
4497 /* Don't regard global variables as simple. They may be
4498 allocated in ways unknown to the compiler (shared memory,
4499 #pragma weak, etc). */
4500 && ! TREE_PUBLIC (exp)
4501 && ! DECL_EXTERNAL (exp)
4502 /* Loading a static variable is unduly expensive, but global
4503 registers aren't expensive. */
4504 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4507 /* The following functions are subroutines to fold_range_test and allow it to
4508 try to change a logical combination of comparisons into a range test.
4510 For example, both
4511 X == 2 || X == 3 || X == 4 || X == 5
4513 X >= 2 && X <= 5
4514 are converted to
4515 (unsigned) (X - 2) <= 3
4517 We describe each set of comparisons as being either inside or outside
4518 a range, using a variable named like IN_P, and then describe the
4519 range with a lower and upper bound. If one of the bounds is omitted,
4520 it represents either the highest or lowest value of the type.
4522 In the comments below, we represent a range by two numbers in brackets
4523 preceded by a "+" to designate being inside that range, or a "-" to
4524 designate being outside that range, so the condition can be inverted by
4525 flipping the prefix. An omitted bound is represented by a "-". For
4526 example, "- [-, 10]" means being outside the range starting at the lowest
4527 possible value and ending at 10, in other words, being greater than 10.
4528 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4529 always false.
4531 We set up things so that the missing bounds are handled in a consistent
4532 manner so neither a missing bound nor "true" and "false" need to be
4533 handled using a special case. */
4535 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4536 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4537 and UPPER1_P are nonzero if the respective argument is an upper bound
4538 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4539 must be specified for a comparison. ARG1 will be converted to ARG0's
4540 type if both are specified. */
4542 static tree
4543 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4544 tree arg1, int upper1_p)
4546 tree tem;
4547 int result;
4548 int sgn0, sgn1;
4550 /* If neither arg represents infinity, do the normal operation.
4551 Else, if not a comparison, return infinity. Else handle the special
4552 comparison rules. Note that most of the cases below won't occur, but
4553 are handled for consistency. */
4555 if (arg0 != 0 && arg1 != 0)
4557 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4558 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4559 STRIP_NOPS (tem);
4560 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4563 if (TREE_CODE_CLASS (code) != tcc_comparison)
4564 return 0;
4566 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4567 for neither. In real maths, we cannot assume open ended ranges are
4568 the same. But, this is computer arithmetic, where numbers are finite.
4569 We can therefore make the transformation of any unbounded range with
4570 the value Z, Z being greater than any representable number. This permits
4571 us to treat unbounded ranges as equal. */
4572 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4573 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4574 switch (code)
4576 case EQ_EXPR:
4577 result = sgn0 == sgn1;
4578 break;
4579 case NE_EXPR:
4580 result = sgn0 != sgn1;
4581 break;
4582 case LT_EXPR:
4583 result = sgn0 < sgn1;
4584 break;
4585 case LE_EXPR:
4586 result = sgn0 <= sgn1;
4587 break;
4588 case GT_EXPR:
4589 result = sgn0 > sgn1;
4590 break;
4591 case GE_EXPR:
4592 result = sgn0 >= sgn1;
4593 break;
4594 default:
4595 gcc_unreachable ();
4598 return constant_boolean_node (result, type);
4601 /* Given EXP, a logical expression, set the range it is testing into
4602 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4603 actually being tested. *PLOW and *PHIGH will be made of the same
4604 type as the returned expression. If EXP is not a comparison, we
4605 will most likely not be returning a useful value and range. Set
4606 *STRICT_OVERFLOW_P to true if the return value is only valid
4607 because signed overflow is undefined; otherwise, do not change
4608 *STRICT_OVERFLOW_P. */
4610 tree
4611 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4612 bool *strict_overflow_p)
4614 enum tree_code code;
4615 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4616 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4617 int in_p, n_in_p;
4618 tree low, high, n_low, n_high;
4619 location_t loc = EXPR_LOCATION (exp);
4621 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4622 and see if we can refine the range. Some of the cases below may not
4623 happen, but it doesn't seem worth worrying about this. We "continue"
4624 the outer loop when we've changed something; otherwise we "break"
4625 the switch, which will "break" the while. */
4627 in_p = 0;
4628 low = high = build_int_cst (TREE_TYPE (exp), 0);
4630 while (1)
4632 code = TREE_CODE (exp);
4633 exp_type = TREE_TYPE (exp);
4635 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4637 if (TREE_OPERAND_LENGTH (exp) > 0)
4638 arg0 = TREE_OPERAND (exp, 0);
4639 if (TREE_CODE_CLASS (code) == tcc_comparison
4640 || TREE_CODE_CLASS (code) == tcc_unary
4641 || TREE_CODE_CLASS (code) == tcc_binary)
4642 arg0_type = TREE_TYPE (arg0);
4643 if (TREE_CODE_CLASS (code) == tcc_binary
4644 || TREE_CODE_CLASS (code) == tcc_comparison
4645 || (TREE_CODE_CLASS (code) == tcc_expression
4646 && TREE_OPERAND_LENGTH (exp) > 1))
4647 arg1 = TREE_OPERAND (exp, 1);
4650 switch (code)
4652 case TRUTH_NOT_EXPR:
4653 in_p = ! in_p, exp = arg0;
4654 continue;
4656 case EQ_EXPR: case NE_EXPR:
4657 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4658 /* We can only do something if the range is testing for zero
4659 and if the second operand is an integer constant. Note that
4660 saying something is "in" the range we make is done by
4661 complementing IN_P since it will set in the initial case of
4662 being not equal to zero; "out" is leaving it alone. */
4663 if (low == 0 || high == 0
4664 || ! integer_zerop (low) || ! integer_zerop (high)
4665 || TREE_CODE (arg1) != INTEGER_CST)
4666 break;
4668 switch (code)
4670 case NE_EXPR: /* - [c, c] */
4671 low = high = arg1;
4672 break;
4673 case EQ_EXPR: /* + [c, c] */
4674 in_p = ! in_p, low = high = arg1;
4675 break;
4676 case GT_EXPR: /* - [-, c] */
4677 low = 0, high = arg1;
4678 break;
4679 case GE_EXPR: /* + [c, -] */
4680 in_p = ! in_p, low = arg1, high = 0;
4681 break;
4682 case LT_EXPR: /* - [c, -] */
4683 low = arg1, high = 0;
4684 break;
4685 case LE_EXPR: /* + [-, c] */
4686 in_p = ! in_p, low = 0, high = arg1;
4687 break;
4688 default:
4689 gcc_unreachable ();
4692 /* If this is an unsigned comparison, we also know that EXP is
4693 greater than or equal to zero. We base the range tests we make
4694 on that fact, so we record it here so we can parse existing
4695 range tests. We test arg0_type since often the return type
4696 of, e.g. EQ_EXPR, is boolean. */
4697 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4699 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4700 in_p, low, high, 1,
4701 build_int_cst (arg0_type, 0),
4702 NULL_TREE))
4703 break;
4705 in_p = n_in_p, low = n_low, high = n_high;
4707 /* If the high bound is missing, but we have a nonzero low
4708 bound, reverse the range so it goes from zero to the low bound
4709 minus 1. */
4710 if (high == 0 && low && ! integer_zerop (low))
4712 in_p = ! in_p;
4713 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4714 integer_one_node, 0);
4715 low = build_int_cst (arg0_type, 0);
4719 exp = arg0;
4720 continue;
4722 case NEGATE_EXPR:
4723 /* (-x) IN [a,b] -> x in [-b, -a] */
4724 n_low = range_binop (MINUS_EXPR, exp_type,
4725 build_int_cst (exp_type, 0),
4726 0, high, 1);
4727 n_high = range_binop (MINUS_EXPR, exp_type,
4728 build_int_cst (exp_type, 0),
4729 0, low, 0);
4730 low = n_low, high = n_high;
4731 exp = arg0;
4732 continue;
4734 case BIT_NOT_EXPR:
4735 /* ~ X -> -X - 1 */
4736 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4737 build_int_cst (exp_type, 1));
4738 SET_EXPR_LOCATION (exp, loc);
4739 continue;
4741 case PLUS_EXPR: case MINUS_EXPR:
4742 if (TREE_CODE (arg1) != INTEGER_CST)
4743 break;
4745 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4746 move a constant to the other side. */
4747 if (!TYPE_UNSIGNED (arg0_type)
4748 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4749 break;
4751 /* If EXP is signed, any overflow in the computation is undefined,
4752 so we don't worry about it so long as our computations on
4753 the bounds don't overflow. For unsigned, overflow is defined
4754 and this is exactly the right thing. */
4755 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4756 arg0_type, low, 0, arg1, 0);
4757 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4758 arg0_type, high, 1, arg1, 0);
4759 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4760 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4761 break;
4763 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4764 *strict_overflow_p = true;
4766 /* Check for an unsigned range which has wrapped around the maximum
4767 value thus making n_high < n_low, and normalize it. */
4768 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4770 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4771 integer_one_node, 0);
4772 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4773 integer_one_node, 0);
4775 /* If the range is of the form +/- [ x+1, x ], we won't
4776 be able to normalize it. But then, it represents the
4777 whole range or the empty set, so make it
4778 +/- [ -, - ]. */
4779 if (tree_int_cst_equal (n_low, low)
4780 && tree_int_cst_equal (n_high, high))
4781 low = high = 0;
4782 else
4783 in_p = ! in_p;
4785 else
4786 low = n_low, high = n_high;
4788 exp = arg0;
4789 continue;
4791 CASE_CONVERT: case NON_LVALUE_EXPR:
4792 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4793 break;
4795 if (! INTEGRAL_TYPE_P (arg0_type)
4796 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4797 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4798 break;
4800 n_low = low, n_high = high;
4802 if (n_low != 0)
4803 n_low = fold_convert_loc (loc, arg0_type, n_low);
4805 if (n_high != 0)
4806 n_high = fold_convert_loc (loc, arg0_type, n_high);
4809 /* If we're converting arg0 from an unsigned type, to exp,
4810 a signed type, we will be doing the comparison as unsigned.
4811 The tests above have already verified that LOW and HIGH
4812 are both positive.
4814 So we have to ensure that we will handle large unsigned
4815 values the same way that the current signed bounds treat
4816 negative values. */
4818 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4820 tree high_positive;
4821 tree equiv_type;
4822 /* For fixed-point modes, we need to pass the saturating flag
4823 as the 2nd parameter. */
4824 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4825 equiv_type = lang_hooks.types.type_for_mode
4826 (TYPE_MODE (arg0_type),
4827 TYPE_SATURATING (arg0_type));
4828 else
4829 equiv_type = lang_hooks.types.type_for_mode
4830 (TYPE_MODE (arg0_type), 1);
4832 /* A range without an upper bound is, naturally, unbounded.
4833 Since convert would have cropped a very large value, use
4834 the max value for the destination type. */
4835 high_positive
4836 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4837 : TYPE_MAX_VALUE (arg0_type);
4839 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4840 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4841 fold_convert_loc (loc, arg0_type,
4842 high_positive),
4843 build_int_cst (arg0_type, 1));
4845 /* If the low bound is specified, "and" the range with the
4846 range for which the original unsigned value will be
4847 positive. */
4848 if (low != 0)
4850 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4851 1, n_low, n_high, 1,
4852 fold_convert_loc (loc, arg0_type,
4853 integer_zero_node),
4854 high_positive))
4855 break;
4857 in_p = (n_in_p == in_p);
4859 else
4861 /* Otherwise, "or" the range with the range of the input
4862 that will be interpreted as negative. */
4863 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4864 0, n_low, n_high, 1,
4865 fold_convert_loc (loc, arg0_type,
4866 integer_zero_node),
4867 high_positive))
4868 break;
4870 in_p = (in_p != n_in_p);
4874 exp = arg0;
4875 low = n_low, high = n_high;
4876 continue;
4878 default:
4879 break;
4882 break;
4885 /* If EXP is a constant, we can evaluate whether this is true or false. */
4886 if (TREE_CODE (exp) == INTEGER_CST)
4888 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4889 exp, 0, low, 0))
4890 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4891 exp, 1, high, 1)));
4892 low = high = 0;
4893 exp = 0;
4896 *pin_p = in_p, *plow = low, *phigh = high;
4897 return exp;
4900 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4901 type, TYPE, return an expression to test if EXP is in (or out of, depending
4902 on IN_P) the range. Return 0 if the test couldn't be created. */
4904 tree
4905 build_range_check (location_t loc, tree type, tree exp, int in_p,
4906 tree low, tree high)
4908 tree etype = TREE_TYPE (exp), value;
4910 #ifdef HAVE_canonicalize_funcptr_for_compare
4911 /* Disable this optimization for function pointer expressions
4912 on targets that require function pointer canonicalization. */
4913 if (HAVE_canonicalize_funcptr_for_compare
4914 && TREE_CODE (etype) == POINTER_TYPE
4915 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4916 return NULL_TREE;
4917 #endif
4919 if (! in_p)
4921 value = build_range_check (loc, type, exp, 1, low, high);
4922 if (value != 0)
4923 return invert_truthvalue_loc (loc, value);
4925 return 0;
4928 if (low == 0 && high == 0)
4929 return build_int_cst (type, 1);
4931 if (low == 0)
4932 return fold_build2_loc (loc, LE_EXPR, type, exp,
4933 fold_convert_loc (loc, etype, high));
4935 if (high == 0)
4936 return fold_build2_loc (loc, GE_EXPR, type, exp,
4937 fold_convert_loc (loc, etype, low));
4939 if (operand_equal_p (low, high, 0))
4940 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4941 fold_convert_loc (loc, etype, low));
4943 if (integer_zerop (low))
4945 if (! TYPE_UNSIGNED (etype))
4947 etype = unsigned_type_for (etype);
4948 high = fold_convert_loc (loc, etype, high);
4949 exp = fold_convert_loc (loc, etype, exp);
4951 return build_range_check (loc, type, exp, 1, 0, high);
4954 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4955 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4957 unsigned HOST_WIDE_INT lo;
4958 HOST_WIDE_INT hi;
4959 int prec;
4961 prec = TYPE_PRECISION (etype);
4962 if (prec <= HOST_BITS_PER_WIDE_INT)
4964 hi = 0;
4965 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4967 else
4969 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4970 lo = (unsigned HOST_WIDE_INT) -1;
4973 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4975 if (TYPE_UNSIGNED (etype))
4977 tree signed_etype = signed_type_for (etype);
4978 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4979 etype
4980 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4981 else
4982 etype = signed_etype;
4983 exp = fold_convert_loc (loc, etype, exp);
4985 return fold_build2_loc (loc, GT_EXPR, type, exp,
4986 build_int_cst (etype, 0));
4990 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4991 This requires wrap-around arithmetics for the type of the expression.
4992 First make sure that arithmetics in this type is valid, then make sure
4993 that it wraps around. */
4994 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4995 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4996 TYPE_UNSIGNED (etype));
4998 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
5000 tree utype, minv, maxv;
5002 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5003 for the type in question, as we rely on this here. */
5004 utype = unsigned_type_for (etype);
5005 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
5006 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5007 integer_one_node, 1);
5008 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
5010 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5011 minv, 1, maxv, 1)))
5012 etype = utype;
5013 else
5014 return 0;
5017 high = fold_convert_loc (loc, etype, high);
5018 low = fold_convert_loc (loc, etype, low);
5019 exp = fold_convert_loc (loc, etype, exp);
5021 value = const_binop (MINUS_EXPR, high, low, 0);
5024 if (POINTER_TYPE_P (etype))
5026 if (value != 0 && !TREE_OVERFLOW (value))
5028 low = fold_convert_loc (loc, sizetype, low);
5029 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
5030 return build_range_check (loc, type,
5031 fold_build2_loc (loc, POINTER_PLUS_EXPR,
5032 etype, exp, low),
5033 1, build_int_cst (etype, 0), value);
5035 return 0;
5038 if (value != 0 && !TREE_OVERFLOW (value))
5039 return build_range_check (loc, type,
5040 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5041 1, build_int_cst (etype, 0), value);
5043 return 0;
5046 /* Return the predecessor of VAL in its type, handling the infinite case. */
5048 static tree
5049 range_predecessor (tree val)
5051 tree type = TREE_TYPE (val);
5053 if (INTEGRAL_TYPE_P (type)
5054 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5055 return 0;
5056 else
5057 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
5060 /* Return the successor of VAL in its type, handling the infinite case. */
5062 static tree
5063 range_successor (tree val)
5065 tree type = TREE_TYPE (val);
5067 if (INTEGRAL_TYPE_P (type)
5068 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5069 return 0;
5070 else
5071 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
5074 /* Given two ranges, see if we can merge them into one. Return 1 if we
5075 can, 0 if we can't. Set the output range into the specified parameters. */
5077 bool
5078 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5079 tree high0, int in1_p, tree low1, tree high1)
5081 int no_overlap;
5082 int subset;
5083 int temp;
5084 tree tem;
5085 int in_p;
5086 tree low, high;
5087 int lowequal = ((low0 == 0 && low1 == 0)
5088 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5089 low0, 0, low1, 0)));
5090 int highequal = ((high0 == 0 && high1 == 0)
5091 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5092 high0, 1, high1, 1)));
5094 /* Make range 0 be the range that starts first, or ends last if they
5095 start at the same value. Swap them if it isn't. */
5096 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5097 low0, 0, low1, 0))
5098 || (lowequal
5099 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5100 high1, 1, high0, 1))))
5102 temp = in0_p, in0_p = in1_p, in1_p = temp;
5103 tem = low0, low0 = low1, low1 = tem;
5104 tem = high0, high0 = high1, high1 = tem;
5107 /* Now flag two cases, whether the ranges are disjoint or whether the
5108 second range is totally subsumed in the first. Note that the tests
5109 below are simplified by the ones above. */
5110 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5111 high0, 1, low1, 0));
5112 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5113 high1, 1, high0, 1));
5115 /* We now have four cases, depending on whether we are including or
5116 excluding the two ranges. */
5117 if (in0_p && in1_p)
5119 /* If they don't overlap, the result is false. If the second range
5120 is a subset it is the result. Otherwise, the range is from the start
5121 of the second to the end of the first. */
5122 if (no_overlap)
5123 in_p = 0, low = high = 0;
5124 else if (subset)
5125 in_p = 1, low = low1, high = high1;
5126 else
5127 in_p = 1, low = low1, high = high0;
5130 else if (in0_p && ! in1_p)
5132 /* If they don't overlap, the result is the first range. If they are
5133 equal, the result is false. If the second range is a subset of the
5134 first, and the ranges begin at the same place, we go from just after
5135 the end of the second range to the end of the first. If the second
5136 range is not a subset of the first, or if it is a subset and both
5137 ranges end at the same place, the range starts at the start of the
5138 first range and ends just before the second range.
5139 Otherwise, we can't describe this as a single range. */
5140 if (no_overlap)
5141 in_p = 1, low = low0, high = high0;
5142 else if (lowequal && highequal)
5143 in_p = 0, low = high = 0;
5144 else if (subset && lowequal)
5146 low = range_successor (high1);
5147 high = high0;
5148 in_p = 1;
5149 if (low == 0)
5151 /* We are in the weird situation where high0 > high1 but
5152 high1 has no successor. Punt. */
5153 return 0;
5156 else if (! subset || highequal)
5158 low = low0;
5159 high = range_predecessor (low1);
5160 in_p = 1;
5161 if (high == 0)
5163 /* low0 < low1 but low1 has no predecessor. Punt. */
5164 return 0;
5167 else
5168 return 0;
5171 else if (! in0_p && in1_p)
5173 /* If they don't overlap, the result is the second range. If the second
5174 is a subset of the first, the result is false. Otherwise,
5175 the range starts just after the first range and ends at the
5176 end of the second. */
5177 if (no_overlap)
5178 in_p = 1, low = low1, high = high1;
5179 else if (subset || highequal)
5180 in_p = 0, low = high = 0;
5181 else
5183 low = range_successor (high0);
5184 high = high1;
5185 in_p = 1;
5186 if (low == 0)
5188 /* high1 > high0 but high0 has no successor. Punt. */
5189 return 0;
5194 else
5196 /* The case where we are excluding both ranges. Here the complex case
5197 is if they don't overlap. In that case, the only time we have a
5198 range is if they are adjacent. If the second is a subset of the
5199 first, the result is the first. Otherwise, the range to exclude
5200 starts at the beginning of the first range and ends at the end of the
5201 second. */
5202 if (no_overlap)
5204 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5205 range_successor (high0),
5206 1, low1, 0)))
5207 in_p = 0, low = low0, high = high1;
5208 else
5210 /* Canonicalize - [min, x] into - [-, x]. */
5211 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5212 switch (TREE_CODE (TREE_TYPE (low0)))
5214 case ENUMERAL_TYPE:
5215 if (TYPE_PRECISION (TREE_TYPE (low0))
5216 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5217 break;
5218 /* FALLTHROUGH */
5219 case INTEGER_TYPE:
5220 if (tree_int_cst_equal (low0,
5221 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5222 low0 = 0;
5223 break;
5224 case POINTER_TYPE:
5225 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5226 && integer_zerop (low0))
5227 low0 = 0;
5228 break;
5229 default:
5230 break;
5233 /* Canonicalize - [x, max] into - [x, -]. */
5234 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5235 switch (TREE_CODE (TREE_TYPE (high1)))
5237 case ENUMERAL_TYPE:
5238 if (TYPE_PRECISION (TREE_TYPE (high1))
5239 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5240 break;
5241 /* FALLTHROUGH */
5242 case INTEGER_TYPE:
5243 if (tree_int_cst_equal (high1,
5244 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5245 high1 = 0;
5246 break;
5247 case POINTER_TYPE:
5248 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5249 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5250 high1, 1,
5251 integer_one_node, 1)))
5252 high1 = 0;
5253 break;
5254 default:
5255 break;
5258 /* The ranges might be also adjacent between the maximum and
5259 minimum values of the given type. For
5260 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5261 return + [x + 1, y - 1]. */
5262 if (low0 == 0 && high1 == 0)
5264 low = range_successor (high0);
5265 high = range_predecessor (low1);
5266 if (low == 0 || high == 0)
5267 return 0;
5269 in_p = 1;
5271 else
5272 return 0;
5275 else if (subset)
5276 in_p = 0, low = low0, high = high0;
5277 else
5278 in_p = 0, low = low0, high = high1;
5281 *pin_p = in_p, *plow = low, *phigh = high;
5282 return 1;
5286 /* Subroutine of fold, looking inside expressions of the form
5287 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5288 of the COND_EXPR. This function is being used also to optimize
5289 A op B ? C : A, by reversing the comparison first.
5291 Return a folded expression whose code is not a COND_EXPR
5292 anymore, or NULL_TREE if no folding opportunity is found. */
5294 static tree
5295 fold_cond_expr_with_comparison (location_t loc, tree type,
5296 tree arg0, tree arg1, tree arg2)
5298 enum tree_code comp_code = TREE_CODE (arg0);
5299 tree arg00 = TREE_OPERAND (arg0, 0);
5300 tree arg01 = TREE_OPERAND (arg0, 1);
5301 tree arg1_type = TREE_TYPE (arg1);
5302 tree tem;
5304 STRIP_NOPS (arg1);
5305 STRIP_NOPS (arg2);
5307 /* If we have A op 0 ? A : -A, consider applying the following
5308 transformations:
5310 A == 0? A : -A same as -A
5311 A != 0? A : -A same as A
5312 A >= 0? A : -A same as abs (A)
5313 A > 0? A : -A same as abs (A)
5314 A <= 0? A : -A same as -abs (A)
5315 A < 0? A : -A same as -abs (A)
5317 None of these transformations work for modes with signed
5318 zeros. If A is +/-0, the first two transformations will
5319 change the sign of the result (from +0 to -0, or vice
5320 versa). The last four will fix the sign of the result,
5321 even though the original expressions could be positive or
5322 negative, depending on the sign of A.
5324 Note that all these transformations are correct if A is
5325 NaN, since the two alternatives (A and -A) are also NaNs. */
5326 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5327 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5328 ? real_zerop (arg01)
5329 : integer_zerop (arg01))
5330 && ((TREE_CODE (arg2) == NEGATE_EXPR
5331 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5332 /* In the case that A is of the form X-Y, '-A' (arg2) may
5333 have already been folded to Y-X, check for that. */
5334 || (TREE_CODE (arg1) == MINUS_EXPR
5335 && TREE_CODE (arg2) == MINUS_EXPR
5336 && operand_equal_p (TREE_OPERAND (arg1, 0),
5337 TREE_OPERAND (arg2, 1), 0)
5338 && operand_equal_p (TREE_OPERAND (arg1, 1),
5339 TREE_OPERAND (arg2, 0), 0))))
5340 switch (comp_code)
5342 case EQ_EXPR:
5343 case UNEQ_EXPR:
5344 tem = fold_convert_loc (loc, arg1_type, arg1);
5345 return pedantic_non_lvalue_loc (loc,
5346 fold_convert_loc (loc, type,
5347 negate_expr (tem)));
5348 case NE_EXPR:
5349 case LTGT_EXPR:
5350 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5351 case UNGE_EXPR:
5352 case UNGT_EXPR:
5353 if (flag_trapping_math)
5354 break;
5355 /* Fall through. */
5356 case GE_EXPR:
5357 case GT_EXPR:
5358 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5359 arg1 = fold_convert_loc (loc, signed_type_for
5360 (TREE_TYPE (arg1)), arg1);
5361 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5362 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5363 case UNLE_EXPR:
5364 case UNLT_EXPR:
5365 if (flag_trapping_math)
5366 break;
5367 case LE_EXPR:
5368 case LT_EXPR:
5369 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5370 arg1 = fold_convert_loc (loc, signed_type_for
5371 (TREE_TYPE (arg1)), arg1);
5372 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5373 return negate_expr (fold_convert_loc (loc, type, tem));
5374 default:
5375 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5376 break;
5379 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5380 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5381 both transformations are correct when A is NaN: A != 0
5382 is then true, and A == 0 is false. */
5384 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5385 && integer_zerop (arg01) && integer_zerop (arg2))
5387 if (comp_code == NE_EXPR)
5388 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5389 else if (comp_code == EQ_EXPR)
5390 return build_int_cst (type, 0);
5393 /* Try some transformations of A op B ? A : B.
5395 A == B? A : B same as B
5396 A != B? A : B same as A
5397 A >= B? A : B same as max (A, B)
5398 A > B? A : B same as max (B, A)
5399 A <= B? A : B same as min (A, B)
5400 A < B? A : B same as min (B, A)
5402 As above, these transformations don't work in the presence
5403 of signed zeros. For example, if A and B are zeros of
5404 opposite sign, the first two transformations will change
5405 the sign of the result. In the last four, the original
5406 expressions give different results for (A=+0, B=-0) and
5407 (A=-0, B=+0), but the transformed expressions do not.
5409 The first two transformations are correct if either A or B
5410 is a NaN. In the first transformation, the condition will
5411 be false, and B will indeed be chosen. In the case of the
5412 second transformation, the condition A != B will be true,
5413 and A will be chosen.
5415 The conversions to max() and min() are not correct if B is
5416 a number and A is not. The conditions in the original
5417 expressions will be false, so all four give B. The min()
5418 and max() versions would give a NaN instead. */
5419 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5420 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5421 /* Avoid these transformations if the COND_EXPR may be used
5422 as an lvalue in the C++ front-end. PR c++/19199. */
5423 && (in_gimple_form
5424 || (strcmp (lang_hooks.name, "GNU C++") != 0
5425 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5426 || ! maybe_lvalue_p (arg1)
5427 || ! maybe_lvalue_p (arg2)))
5429 tree comp_op0 = arg00;
5430 tree comp_op1 = arg01;
5431 tree comp_type = TREE_TYPE (comp_op0);
5433 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5434 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5436 comp_type = type;
5437 comp_op0 = arg1;
5438 comp_op1 = arg2;
5441 switch (comp_code)
5443 case EQ_EXPR:
5444 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5445 case NE_EXPR:
5446 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5447 case LE_EXPR:
5448 case LT_EXPR:
5449 case UNLE_EXPR:
5450 case UNLT_EXPR:
5451 /* In C++ a ?: expression can be an lvalue, so put the
5452 operand which will be used if they are equal first
5453 so that we can convert this back to the
5454 corresponding COND_EXPR. */
5455 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5457 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5458 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5459 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5460 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5461 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5462 comp_op1, comp_op0);
5463 return pedantic_non_lvalue_loc (loc,
5464 fold_convert_loc (loc, type, tem));
5466 break;
5467 case GE_EXPR:
5468 case GT_EXPR:
5469 case UNGE_EXPR:
5470 case UNGT_EXPR:
5471 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5473 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5474 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5475 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5476 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5477 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5478 comp_op1, comp_op0);
5479 return pedantic_non_lvalue_loc (loc,
5480 fold_convert_loc (loc, type, tem));
5482 break;
5483 case UNEQ_EXPR:
5484 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5485 return pedantic_non_lvalue_loc (loc,
5486 fold_convert_loc (loc, type, arg2));
5487 break;
5488 case LTGT_EXPR:
5489 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5490 return pedantic_non_lvalue_loc (loc,
5491 fold_convert_loc (loc, type, arg1));
5492 break;
5493 default:
5494 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5495 break;
5499 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5500 we might still be able to simplify this. For example,
5501 if C1 is one less or one more than C2, this might have started
5502 out as a MIN or MAX and been transformed by this function.
5503 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5505 if (INTEGRAL_TYPE_P (type)
5506 && TREE_CODE (arg01) == INTEGER_CST
5507 && TREE_CODE (arg2) == INTEGER_CST)
5508 switch (comp_code)
5510 case EQ_EXPR:
5511 if (TREE_CODE (arg1) == INTEGER_CST)
5512 break;
5513 /* We can replace A with C1 in this case. */
5514 arg1 = fold_convert_loc (loc, type, arg01);
5515 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5517 case LT_EXPR:
5518 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5519 MIN_EXPR, to preserve the signedness of the comparison. */
5520 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5521 OEP_ONLY_CONST)
5522 && operand_equal_p (arg01,
5523 const_binop (PLUS_EXPR, arg2,
5524 build_int_cst (type, 1), 0),
5525 OEP_ONLY_CONST))
5527 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5528 fold_convert_loc (loc, TREE_TYPE (arg00),
5529 arg2));
5530 return pedantic_non_lvalue_loc (loc,
5531 fold_convert_loc (loc, type, tem));
5533 break;
5535 case LE_EXPR:
5536 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5537 as above. */
5538 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5539 OEP_ONLY_CONST)
5540 && operand_equal_p (arg01,
5541 const_binop (MINUS_EXPR, arg2,
5542 build_int_cst (type, 1), 0),
5543 OEP_ONLY_CONST))
5545 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5546 fold_convert_loc (loc, TREE_TYPE (arg00),
5547 arg2));
5548 return pedantic_non_lvalue_loc (loc,
5549 fold_convert_loc (loc, type, tem));
5551 break;
5553 case GT_EXPR:
5554 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5555 MAX_EXPR, to preserve the signedness of the comparison. */
5556 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5557 OEP_ONLY_CONST)
5558 && operand_equal_p (arg01,
5559 const_binop (MINUS_EXPR, arg2,
5560 build_int_cst (type, 1), 0),
5561 OEP_ONLY_CONST))
5563 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5564 fold_convert_loc (loc, TREE_TYPE (arg00),
5565 arg2));
5566 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5568 break;
5570 case GE_EXPR:
5571 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5572 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5573 OEP_ONLY_CONST)
5574 && operand_equal_p (arg01,
5575 const_binop (PLUS_EXPR, arg2,
5576 build_int_cst (type, 1), 0),
5577 OEP_ONLY_CONST))
5579 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5580 fold_convert_loc (loc, TREE_TYPE (arg00),
5581 arg2));
5582 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5584 break;
5585 case NE_EXPR:
5586 break;
5587 default:
5588 gcc_unreachable ();
5591 return NULL_TREE;
5596 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5597 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5598 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5599 false) >= 2)
5600 #endif
5602 /* EXP is some logical combination of boolean tests. See if we can
5603 merge it into some range test. Return the new tree if so. */
5605 static tree
5606 fold_range_test (location_t loc, enum tree_code code, tree type,
5607 tree op0, tree op1)
5609 int or_op = (code == TRUTH_ORIF_EXPR
5610 || code == TRUTH_OR_EXPR);
5611 int in0_p, in1_p, in_p;
5612 tree low0, low1, low, high0, high1, high;
5613 bool strict_overflow_p = false;
5614 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5615 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5616 tree tem;
5617 const char * const warnmsg = G_("assuming signed overflow does not occur "
5618 "when simplifying range test");
5620 /* If this is an OR operation, invert both sides; we will invert
5621 again at the end. */
5622 if (or_op)
5623 in0_p = ! in0_p, in1_p = ! in1_p;
5625 /* If both expressions are the same, if we can merge the ranges, and we
5626 can build the range test, return it or it inverted. If one of the
5627 ranges is always true or always false, consider it to be the same
5628 expression as the other. */
5629 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5630 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5631 in1_p, low1, high1)
5632 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
5633 lhs != 0 ? lhs
5634 : rhs != 0 ? rhs : integer_zero_node,
5635 in_p, low, high))))
5637 if (strict_overflow_p)
5638 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5639 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5642 /* On machines where the branch cost is expensive, if this is a
5643 short-circuited branch and the underlying object on both sides
5644 is the same, make a non-short-circuit operation. */
5645 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5646 && lhs != 0 && rhs != 0
5647 && (code == TRUTH_ANDIF_EXPR
5648 || code == TRUTH_ORIF_EXPR)
5649 && operand_equal_p (lhs, rhs, 0))
5651 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5652 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5653 which cases we can't do this. */
5654 if (simple_operand_p (lhs))
5656 tem = build2 (code == TRUTH_ANDIF_EXPR
5657 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5658 type, op0, op1);
5659 SET_EXPR_LOCATION (tem, loc);
5660 return tem;
5663 else if (lang_hooks.decls.global_bindings_p () == 0
5664 && ! CONTAINS_PLACEHOLDER_P (lhs))
5666 tree common = save_expr (lhs);
5668 if (0 != (lhs = build_range_check (loc, type, common,
5669 or_op ? ! in0_p : in0_p,
5670 low0, high0))
5671 && (0 != (rhs = build_range_check (loc, type, common,
5672 or_op ? ! in1_p : in1_p,
5673 low1, high1))))
5675 if (strict_overflow_p)
5676 fold_overflow_warning (warnmsg,
5677 WARN_STRICT_OVERFLOW_COMPARISON);
5678 tem = build2 (code == TRUTH_ANDIF_EXPR
5679 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5680 type, lhs, rhs);
5681 SET_EXPR_LOCATION (tem, loc);
5682 return tem;
5687 return 0;
5690 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5691 bit value. Arrange things so the extra bits will be set to zero if and
5692 only if C is signed-extended to its full width. If MASK is nonzero,
5693 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5695 static tree
5696 unextend (tree c, int p, int unsignedp, tree mask)
5698 tree type = TREE_TYPE (c);
5699 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5700 tree temp;
5702 if (p == modesize || unsignedp)
5703 return c;
5705 /* We work by getting just the sign bit into the low-order bit, then
5706 into the high-order bit, then sign-extend. We then XOR that value
5707 with C. */
5708 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5709 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5711 /* We must use a signed type in order to get an arithmetic right shift.
5712 However, we must also avoid introducing accidental overflows, so that
5713 a subsequent call to integer_zerop will work. Hence we must
5714 do the type conversion here. At this point, the constant is either
5715 zero or one, and the conversion to a signed type can never overflow.
5716 We could get an overflow if this conversion is done anywhere else. */
5717 if (TYPE_UNSIGNED (type))
5718 temp = fold_convert (signed_type_for (type), temp);
5720 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5721 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5722 if (mask != 0)
5723 temp = const_binop (BIT_AND_EXPR, temp,
5724 fold_convert (TREE_TYPE (c), mask),
5726 /* If necessary, convert the type back to match the type of C. */
5727 if (TYPE_UNSIGNED (type))
5728 temp = fold_convert (type, temp);
5730 return fold_convert (type,
5731 const_binop (BIT_XOR_EXPR, c, temp, 0));
5734 /* Find ways of folding logical expressions of LHS and RHS:
5735 Try to merge two comparisons to the same innermost item.
5736 Look for range tests like "ch >= '0' && ch <= '9'".
5737 Look for combinations of simple terms on machines with expensive branches
5738 and evaluate the RHS unconditionally.
5740 For example, if we have p->a == 2 && p->b == 4 and we can make an
5741 object large enough to span both A and B, we can do this with a comparison
5742 against the object ANDed with the a mask.
5744 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5745 operations to do this with one comparison.
5747 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5748 function and the one above.
5750 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5751 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5753 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5754 two operands.
5756 We return the simplified tree or 0 if no optimization is possible. */
5758 static tree
5759 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5760 tree lhs, tree rhs)
5762 /* If this is the "or" of two comparisons, we can do something if
5763 the comparisons are NE_EXPR. If this is the "and", we can do something
5764 if the comparisons are EQ_EXPR. I.e.,
5765 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5767 WANTED_CODE is this operation code. For single bit fields, we can
5768 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5769 comparison for one-bit fields. */
5771 enum tree_code wanted_code;
5772 enum tree_code lcode, rcode;
5773 tree ll_arg, lr_arg, rl_arg, rr_arg;
5774 tree ll_inner, lr_inner, rl_inner, rr_inner;
5775 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5776 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5777 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5778 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5779 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5780 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5781 enum machine_mode lnmode, rnmode;
5782 tree ll_mask, lr_mask, rl_mask, rr_mask;
5783 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5784 tree l_const, r_const;
5785 tree lntype, rntype, result;
5786 HOST_WIDE_INT first_bit, end_bit;
5787 int volatilep;
5788 tree orig_lhs = lhs, orig_rhs = rhs;
5789 enum tree_code orig_code = code;
5791 /* Start by getting the comparison codes. Fail if anything is volatile.
5792 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5793 it were surrounded with a NE_EXPR. */
5795 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5796 return 0;
5798 lcode = TREE_CODE (lhs);
5799 rcode = TREE_CODE (rhs);
5801 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5803 lhs = build2 (NE_EXPR, truth_type, lhs,
5804 build_int_cst (TREE_TYPE (lhs), 0));
5805 lcode = NE_EXPR;
5808 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5810 rhs = build2 (NE_EXPR, truth_type, rhs,
5811 build_int_cst (TREE_TYPE (rhs), 0));
5812 rcode = NE_EXPR;
5815 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5816 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5817 return 0;
5819 ll_arg = TREE_OPERAND (lhs, 0);
5820 lr_arg = TREE_OPERAND (lhs, 1);
5821 rl_arg = TREE_OPERAND (rhs, 0);
5822 rr_arg = TREE_OPERAND (rhs, 1);
5824 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5825 if (simple_operand_p (ll_arg)
5826 && simple_operand_p (lr_arg))
5828 tree result;
5829 if (operand_equal_p (ll_arg, rl_arg, 0)
5830 && operand_equal_p (lr_arg, rr_arg, 0))
5832 result = combine_comparisons (loc, code, lcode, rcode,
5833 truth_type, ll_arg, lr_arg);
5834 if (result)
5835 return result;
5837 else if (operand_equal_p (ll_arg, rr_arg, 0)
5838 && operand_equal_p (lr_arg, rl_arg, 0))
5840 result = combine_comparisons (loc, code, lcode,
5841 swap_tree_comparison (rcode),
5842 truth_type, ll_arg, lr_arg);
5843 if (result)
5844 return result;
5848 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5849 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5851 /* If the RHS can be evaluated unconditionally and its operands are
5852 simple, it wins to evaluate the RHS unconditionally on machines
5853 with expensive branches. In this case, this isn't a comparison
5854 that can be merged. Avoid doing this if the RHS is a floating-point
5855 comparison since those can trap. */
5857 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5858 false) >= 2
5859 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5860 && simple_operand_p (rl_arg)
5861 && simple_operand_p (rr_arg))
5863 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5864 if (code == TRUTH_OR_EXPR
5865 && lcode == NE_EXPR && integer_zerop (lr_arg)
5866 && rcode == NE_EXPR && integer_zerop (rr_arg)
5867 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5868 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5870 result = build2 (NE_EXPR, truth_type,
5871 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5872 ll_arg, rl_arg),
5873 build_int_cst (TREE_TYPE (ll_arg), 0));
5874 goto fold_truthop_exit;
5877 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5878 if (code == TRUTH_AND_EXPR
5879 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5880 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5881 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5882 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5884 result = build2 (EQ_EXPR, truth_type,
5885 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5886 ll_arg, rl_arg),
5887 build_int_cst (TREE_TYPE (ll_arg), 0));
5888 goto fold_truthop_exit;
5891 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5893 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5895 result = build2 (code, truth_type, lhs, rhs);
5896 goto fold_truthop_exit;
5898 return NULL_TREE;
5902 /* See if the comparisons can be merged. Then get all the parameters for
5903 each side. */
5905 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5906 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5907 return 0;
5909 volatilep = 0;
5910 ll_inner = decode_field_reference (loc, ll_arg,
5911 &ll_bitsize, &ll_bitpos, &ll_mode,
5912 &ll_unsignedp, &volatilep, &ll_mask,
5913 &ll_and_mask);
5914 lr_inner = decode_field_reference (loc, lr_arg,
5915 &lr_bitsize, &lr_bitpos, &lr_mode,
5916 &lr_unsignedp, &volatilep, &lr_mask,
5917 &lr_and_mask);
5918 rl_inner = decode_field_reference (loc, rl_arg,
5919 &rl_bitsize, &rl_bitpos, &rl_mode,
5920 &rl_unsignedp, &volatilep, &rl_mask,
5921 &rl_and_mask);
5922 rr_inner = decode_field_reference (loc, rr_arg,
5923 &rr_bitsize, &rr_bitpos, &rr_mode,
5924 &rr_unsignedp, &volatilep, &rr_mask,
5925 &rr_and_mask);
5927 /* It must be true that the inner operation on the lhs of each
5928 comparison must be the same if we are to be able to do anything.
5929 Then see if we have constants. If not, the same must be true for
5930 the rhs's. */
5931 if (volatilep || ll_inner == 0 || rl_inner == 0
5932 || ! operand_equal_p (ll_inner, rl_inner, 0))
5933 return 0;
5935 if (TREE_CODE (lr_arg) == INTEGER_CST
5936 && TREE_CODE (rr_arg) == INTEGER_CST)
5937 l_const = lr_arg, r_const = rr_arg;
5938 else if (lr_inner == 0 || rr_inner == 0
5939 || ! operand_equal_p (lr_inner, rr_inner, 0))
5940 return 0;
5941 else
5942 l_const = r_const = 0;
5944 /* If either comparison code is not correct for our logical operation,
5945 fail. However, we can convert a one-bit comparison against zero into
5946 the opposite comparison against that bit being set in the field. */
5948 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5949 if (lcode != wanted_code)
5951 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5953 /* Make the left operand unsigned, since we are only interested
5954 in the value of one bit. Otherwise we are doing the wrong
5955 thing below. */
5956 ll_unsignedp = 1;
5957 l_const = ll_mask;
5959 else
5960 return 0;
5963 /* This is analogous to the code for l_const above. */
5964 if (rcode != wanted_code)
5966 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5968 rl_unsignedp = 1;
5969 r_const = rl_mask;
5971 else
5972 return 0;
5975 /* See if we can find a mode that contains both fields being compared on
5976 the left. If we can't, fail. Otherwise, update all constants and masks
5977 to be relative to a field of that size. */
5978 first_bit = MIN (ll_bitpos, rl_bitpos);
5979 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5980 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5981 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5982 volatilep);
5983 if (lnmode == VOIDmode)
5984 return 0;
5986 lnbitsize = GET_MODE_BITSIZE (lnmode);
5987 lnbitpos = first_bit & ~ (lnbitsize - 1);
5988 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5989 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5991 if (BYTES_BIG_ENDIAN)
5993 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5994 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5997 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5998 size_int (xll_bitpos), 0);
5999 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6000 size_int (xrl_bitpos), 0);
6002 if (l_const)
6004 l_const = fold_convert_loc (loc, lntype, l_const);
6005 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6006 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
6007 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6008 fold_build1_loc (loc, BIT_NOT_EXPR,
6009 lntype, ll_mask),
6010 0)))
6012 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6014 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6017 if (r_const)
6019 r_const = fold_convert_loc (loc, lntype, r_const);
6020 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6021 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
6022 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6023 fold_build1_loc (loc, BIT_NOT_EXPR,
6024 lntype, rl_mask),
6025 0)))
6027 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6029 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6033 /* If the right sides are not constant, do the same for it. Also,
6034 disallow this optimization if a size or signedness mismatch occurs
6035 between the left and right sides. */
6036 if (l_const == 0)
6038 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6039 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6040 /* Make sure the two fields on the right
6041 correspond to the left without being swapped. */
6042 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6043 return 0;
6045 first_bit = MIN (lr_bitpos, rr_bitpos);
6046 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6047 rnmode = get_best_mode (end_bit - first_bit, first_bit,
6048 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
6049 volatilep);
6050 if (rnmode == VOIDmode)
6051 return 0;
6053 rnbitsize = GET_MODE_BITSIZE (rnmode);
6054 rnbitpos = first_bit & ~ (rnbitsize - 1);
6055 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6056 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6058 if (BYTES_BIG_ENDIAN)
6060 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6061 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6064 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6065 rntype, lr_mask),
6066 size_int (xlr_bitpos), 0);
6067 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6068 rntype, rr_mask),
6069 size_int (xrr_bitpos), 0);
6071 /* Make a mask that corresponds to both fields being compared.
6072 Do this for both items being compared. If the operands are the
6073 same size and the bits being compared are in the same position
6074 then we can do this by masking both and comparing the masked
6075 results. */
6076 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
6077 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
6078 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
6080 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
6081 ll_unsignedp || rl_unsignedp);
6082 if (! all_ones_mask_p (ll_mask, lnbitsize))
6083 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6085 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
6086 lr_unsignedp || rr_unsignedp);
6087 if (! all_ones_mask_p (lr_mask, rnbitsize))
6088 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6090 result = build2 (wanted_code, truth_type, lhs, rhs);
6091 goto fold_truthop_exit;
6094 /* There is still another way we can do something: If both pairs of
6095 fields being compared are adjacent, we may be able to make a wider
6096 field containing them both.
6098 Note that we still must mask the lhs/rhs expressions. Furthermore,
6099 the mask must be shifted to account for the shift done by
6100 make_bit_field_ref. */
6101 if ((ll_bitsize + ll_bitpos == rl_bitpos
6102 && lr_bitsize + lr_bitpos == rr_bitpos)
6103 || (ll_bitpos == rl_bitpos + rl_bitsize
6104 && lr_bitpos == rr_bitpos + rr_bitsize))
6106 tree type;
6108 lhs = make_bit_field_ref (loc, ll_inner, lntype,
6109 ll_bitsize + rl_bitsize,
6110 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
6111 rhs = make_bit_field_ref (loc, lr_inner, rntype,
6112 lr_bitsize + rr_bitsize,
6113 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
6115 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6116 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
6117 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6118 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
6120 /* Convert to the smaller type before masking out unwanted bits. */
6121 type = lntype;
6122 if (lntype != rntype)
6124 if (lnbitsize > rnbitsize)
6126 lhs = fold_convert_loc (loc, rntype, lhs);
6127 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6128 type = rntype;
6130 else if (lnbitsize < rnbitsize)
6132 rhs = fold_convert_loc (loc, lntype, rhs);
6133 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6134 type = lntype;
6138 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6139 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6141 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6142 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6144 result = build2 (wanted_code, truth_type, lhs, rhs);
6145 goto fold_truthop_exit;
6148 return 0;
6151 /* Handle the case of comparisons with constants. If there is something in
6152 common between the masks, those bits of the constants must be the same.
6153 If not, the condition is always false. Test for this to avoid generating
6154 incorrect code below. */
6155 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
6156 if (! integer_zerop (result)
6157 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
6158 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
6160 if (wanted_code == NE_EXPR)
6162 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6163 return constant_boolean_node (true, truth_type);
6165 else
6167 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6168 return constant_boolean_node (false, truth_type);
6172 /* Construct the expression we will return. First get the component
6173 reference we will make. Unless the mask is all ones the width of
6174 that field, perform the mask operation. Then compare with the
6175 merged constant. */
6176 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
6177 ll_unsignedp || rl_unsignedp);
6179 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
6180 if (! all_ones_mask_p (ll_mask, lnbitsize))
6182 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
6183 SET_EXPR_LOCATION (result, loc);
6186 result = build2 (wanted_code, truth_type, result,
6187 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
6189 fold_truthop_exit:
6190 SET_EXPR_LOCATION (result, loc);
6191 return result;
6194 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
6195 constant. */
6197 static tree
6198 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
6199 tree op0, tree op1)
6201 tree arg0 = op0;
6202 enum tree_code op_code;
6203 tree comp_const;
6204 tree minmax_const;
6205 int consts_equal, consts_lt;
6206 tree inner;
6208 STRIP_SIGN_NOPS (arg0);
6210 op_code = TREE_CODE (arg0);
6211 minmax_const = TREE_OPERAND (arg0, 1);
6212 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
6213 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
6214 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
6215 inner = TREE_OPERAND (arg0, 0);
6217 /* If something does not permit us to optimize, return the original tree. */
6218 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
6219 || TREE_CODE (comp_const) != INTEGER_CST
6220 || TREE_OVERFLOW (comp_const)
6221 || TREE_CODE (minmax_const) != INTEGER_CST
6222 || TREE_OVERFLOW (minmax_const))
6223 return NULL_TREE;
6225 /* Now handle all the various comparison codes. We only handle EQ_EXPR
6226 and GT_EXPR, doing the rest with recursive calls using logical
6227 simplifications. */
6228 switch (code)
6230 case NE_EXPR: case LT_EXPR: case LE_EXPR:
6232 tree tem
6233 = optimize_minmax_comparison (loc,
6234 invert_tree_comparison (code, false),
6235 type, op0, op1);
6236 if (tem)
6237 return invert_truthvalue_loc (loc, tem);
6238 return NULL_TREE;
6241 case GE_EXPR:
6242 return
6243 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
6244 optimize_minmax_comparison
6245 (loc, EQ_EXPR, type, arg0, comp_const),
6246 optimize_minmax_comparison
6247 (loc, GT_EXPR, type, arg0, comp_const));
6249 case EQ_EXPR:
6250 if (op_code == MAX_EXPR && consts_equal)
6251 /* MAX (X, 0) == 0 -> X <= 0 */
6252 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
6254 else if (op_code == MAX_EXPR && consts_lt)
6255 /* MAX (X, 0) == 5 -> X == 5 */
6256 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6258 else if (op_code == MAX_EXPR)
6259 /* MAX (X, 0) == -1 -> false */
6260 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6262 else if (consts_equal)
6263 /* MIN (X, 0) == 0 -> X >= 0 */
6264 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
6266 else if (consts_lt)
6267 /* MIN (X, 0) == 5 -> false */
6268 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6270 else
6271 /* MIN (X, 0) == -1 -> X == -1 */
6272 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6274 case GT_EXPR:
6275 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6276 /* MAX (X, 0) > 0 -> X > 0
6277 MAX (X, 0) > 5 -> X > 5 */
6278 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6280 else if (op_code == MAX_EXPR)
6281 /* MAX (X, 0) > -1 -> true */
6282 return omit_one_operand_loc (loc, type, integer_one_node, inner);
6284 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6285 /* MIN (X, 0) > 0 -> false
6286 MIN (X, 0) > 5 -> false */
6287 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6289 else
6290 /* MIN (X, 0) > -1 -> X > -1 */
6291 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6293 default:
6294 return NULL_TREE;
6298 /* T is an integer expression that is being multiplied, divided, or taken a
6299 modulus (CODE says which and what kind of divide or modulus) by a
6300 constant C. See if we can eliminate that operation by folding it with
6301 other operations already in T. WIDE_TYPE, if non-null, is a type that
6302 should be used for the computation if wider than our type.
6304 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6305 (X * 2) + (Y * 4). We must, however, be assured that either the original
6306 expression would not overflow or that overflow is undefined for the type
6307 in the language in question.
6309 If we return a non-null expression, it is an equivalent form of the
6310 original computation, but need not be in the original type.
6312 We set *STRICT_OVERFLOW_P to true if the return values depends on
6313 signed overflow being undefined. Otherwise we do not change
6314 *STRICT_OVERFLOW_P. */
6316 static tree
6317 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6318 bool *strict_overflow_p)
6320 /* To avoid exponential search depth, refuse to allow recursion past
6321 three levels. Beyond that (1) it's highly unlikely that we'll find
6322 something interesting and (2) we've probably processed it before
6323 when we built the inner expression. */
6325 static int depth;
6326 tree ret;
6328 if (depth > 3)
6329 return NULL;
6331 depth++;
6332 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6333 depth--;
6335 return ret;
6338 static tree
6339 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6340 bool *strict_overflow_p)
6342 tree type = TREE_TYPE (t);
6343 enum tree_code tcode = TREE_CODE (t);
6344 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6345 > GET_MODE_SIZE (TYPE_MODE (type)))
6346 ? wide_type : type);
6347 tree t1, t2;
6348 int same_p = tcode == code;
6349 tree op0 = NULL_TREE, op1 = NULL_TREE;
6350 bool sub_strict_overflow_p;
6352 /* Don't deal with constants of zero here; they confuse the code below. */
6353 if (integer_zerop (c))
6354 return NULL_TREE;
6356 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6357 op0 = TREE_OPERAND (t, 0);
6359 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6360 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6362 /* Note that we need not handle conditional operations here since fold
6363 already handles those cases. So just do arithmetic here. */
6364 switch (tcode)
6366 case INTEGER_CST:
6367 /* For a constant, we can always simplify if we are a multiply
6368 or (for divide and modulus) if it is a multiple of our constant. */
6369 if (code == MULT_EXPR
6370 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6371 return const_binop (code, fold_convert (ctype, t),
6372 fold_convert (ctype, c), 0);
6373 break;
6375 CASE_CONVERT: case NON_LVALUE_EXPR:
6376 /* If op0 is an expression ... */
6377 if ((COMPARISON_CLASS_P (op0)
6378 || UNARY_CLASS_P (op0)
6379 || BINARY_CLASS_P (op0)
6380 || VL_EXP_CLASS_P (op0)
6381 || EXPRESSION_CLASS_P (op0))
6382 /* ... and has wrapping overflow, and its type is smaller
6383 than ctype, then we cannot pass through as widening. */
6384 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6385 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6386 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6387 && (TYPE_PRECISION (ctype)
6388 > TYPE_PRECISION (TREE_TYPE (op0))))
6389 /* ... or this is a truncation (t is narrower than op0),
6390 then we cannot pass through this narrowing. */
6391 || (TYPE_PRECISION (type)
6392 < TYPE_PRECISION (TREE_TYPE (op0)))
6393 /* ... or signedness changes for division or modulus,
6394 then we cannot pass through this conversion. */
6395 || (code != MULT_EXPR
6396 && (TYPE_UNSIGNED (ctype)
6397 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6398 /* ... or has undefined overflow while the converted to
6399 type has not, we cannot do the operation in the inner type
6400 as that would introduce undefined overflow. */
6401 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6402 && !TYPE_OVERFLOW_UNDEFINED (type))))
6403 break;
6405 /* Pass the constant down and see if we can make a simplification. If
6406 we can, replace this expression with the inner simplification for
6407 possible later conversion to our or some other type. */
6408 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6409 && TREE_CODE (t2) == INTEGER_CST
6410 && !TREE_OVERFLOW (t2)
6411 && (0 != (t1 = extract_muldiv (op0, t2, code,
6412 code == MULT_EXPR
6413 ? ctype : NULL_TREE,
6414 strict_overflow_p))))
6415 return t1;
6416 break;
6418 case ABS_EXPR:
6419 /* If widening the type changes it from signed to unsigned, then we
6420 must avoid building ABS_EXPR itself as unsigned. */
6421 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6423 tree cstype = (*signed_type_for) (ctype);
6424 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6425 != 0)
6427 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6428 return fold_convert (ctype, t1);
6430 break;
6432 /* If the constant is negative, we cannot simplify this. */
6433 if (tree_int_cst_sgn (c) == -1)
6434 break;
6435 /* FALLTHROUGH */
6436 case NEGATE_EXPR:
6437 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6438 != 0)
6439 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6440 break;
6442 case MIN_EXPR: case MAX_EXPR:
6443 /* If widening the type changes the signedness, then we can't perform
6444 this optimization as that changes the result. */
6445 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6446 break;
6448 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6449 sub_strict_overflow_p = false;
6450 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6451 &sub_strict_overflow_p)) != 0
6452 && (t2 = extract_muldiv (op1, c, code, wide_type,
6453 &sub_strict_overflow_p)) != 0)
6455 if (tree_int_cst_sgn (c) < 0)
6456 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6457 if (sub_strict_overflow_p)
6458 *strict_overflow_p = true;
6459 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6460 fold_convert (ctype, t2));
6462 break;
6464 case LSHIFT_EXPR: case RSHIFT_EXPR:
6465 /* If the second operand is constant, this is a multiplication
6466 or floor division, by a power of two, so we can treat it that
6467 way unless the multiplier or divisor overflows. Signed
6468 left-shift overflow is implementation-defined rather than
6469 undefined in C90, so do not convert signed left shift into
6470 multiplication. */
6471 if (TREE_CODE (op1) == INTEGER_CST
6472 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6473 /* const_binop may not detect overflow correctly,
6474 so check for it explicitly here. */
6475 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6476 && TREE_INT_CST_HIGH (op1) == 0
6477 && 0 != (t1 = fold_convert (ctype,
6478 const_binop (LSHIFT_EXPR,
6479 size_one_node,
6480 op1, 0)))
6481 && !TREE_OVERFLOW (t1))
6482 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6483 ? MULT_EXPR : FLOOR_DIV_EXPR,
6484 ctype,
6485 fold_convert (ctype, op0),
6486 t1),
6487 c, code, wide_type, strict_overflow_p);
6488 break;
6490 case PLUS_EXPR: case MINUS_EXPR:
6491 /* See if we can eliminate the operation on both sides. If we can, we
6492 can return a new PLUS or MINUS. If we can't, the only remaining
6493 cases where we can do anything are if the second operand is a
6494 constant. */
6495 sub_strict_overflow_p = false;
6496 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6497 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6498 if (t1 != 0 && t2 != 0
6499 && (code == MULT_EXPR
6500 /* If not multiplication, we can only do this if both operands
6501 are divisible by c. */
6502 || (multiple_of_p (ctype, op0, c)
6503 && multiple_of_p (ctype, op1, c))))
6505 if (sub_strict_overflow_p)
6506 *strict_overflow_p = true;
6507 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6508 fold_convert (ctype, t2));
6511 /* If this was a subtraction, negate OP1 and set it to be an addition.
6512 This simplifies the logic below. */
6513 if (tcode == MINUS_EXPR)
6515 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6516 /* If OP1 was not easily negatable, the constant may be OP0. */
6517 if (TREE_CODE (op0) == INTEGER_CST)
6519 tree tem = op0;
6520 op0 = op1;
6521 op1 = tem;
6522 tem = t1;
6523 t1 = t2;
6524 t2 = tem;
6528 if (TREE_CODE (op1) != INTEGER_CST)
6529 break;
6531 /* If either OP1 or C are negative, this optimization is not safe for
6532 some of the division and remainder types while for others we need
6533 to change the code. */
6534 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6536 if (code == CEIL_DIV_EXPR)
6537 code = FLOOR_DIV_EXPR;
6538 else if (code == FLOOR_DIV_EXPR)
6539 code = CEIL_DIV_EXPR;
6540 else if (code != MULT_EXPR
6541 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6542 break;
6545 /* If it's a multiply or a division/modulus operation of a multiple
6546 of our constant, do the operation and verify it doesn't overflow. */
6547 if (code == MULT_EXPR
6548 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6550 op1 = const_binop (code, fold_convert (ctype, op1),
6551 fold_convert (ctype, c), 0);
6552 /* We allow the constant to overflow with wrapping semantics. */
6553 if (op1 == 0
6554 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6555 break;
6557 else
6558 break;
6560 /* If we have an unsigned type is not a sizetype, we cannot widen
6561 the operation since it will change the result if the original
6562 computation overflowed. */
6563 if (TYPE_UNSIGNED (ctype)
6564 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6565 && ctype != type)
6566 break;
6568 /* If we were able to eliminate our operation from the first side,
6569 apply our operation to the second side and reform the PLUS. */
6570 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6571 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6573 /* The last case is if we are a multiply. In that case, we can
6574 apply the distributive law to commute the multiply and addition
6575 if the multiplication of the constants doesn't overflow. */
6576 if (code == MULT_EXPR)
6577 return fold_build2 (tcode, ctype,
6578 fold_build2 (code, ctype,
6579 fold_convert (ctype, op0),
6580 fold_convert (ctype, c)),
6581 op1);
6583 break;
6585 case MULT_EXPR:
6586 /* We have a special case here if we are doing something like
6587 (C * 8) % 4 since we know that's zero. */
6588 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6589 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6590 /* If the multiplication can overflow we cannot optimize this.
6591 ??? Until we can properly mark individual operations as
6592 not overflowing we need to treat sizetype special here as
6593 stor-layout relies on this opimization to make
6594 DECL_FIELD_BIT_OFFSET always a constant. */
6595 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6596 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
6597 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
6598 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6599 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6601 *strict_overflow_p = true;
6602 return omit_one_operand (type, integer_zero_node, op0);
6605 /* ... fall through ... */
6607 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6608 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6609 /* If we can extract our operation from the LHS, do so and return a
6610 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6611 do something only if the second operand is a constant. */
6612 if (same_p
6613 && (t1 = extract_muldiv (op0, c, code, wide_type,
6614 strict_overflow_p)) != 0)
6615 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6616 fold_convert (ctype, op1));
6617 else if (tcode == MULT_EXPR && code == MULT_EXPR
6618 && (t1 = extract_muldiv (op1, c, code, wide_type,
6619 strict_overflow_p)) != 0)
6620 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6621 fold_convert (ctype, t1));
6622 else if (TREE_CODE (op1) != INTEGER_CST)
6623 return 0;
6625 /* If these are the same operation types, we can associate them
6626 assuming no overflow. */
6627 if (tcode == code
6628 && 0 != (t1 = int_const_binop (MULT_EXPR,
6629 fold_convert (ctype, op1),
6630 fold_convert (ctype, c), 1))
6631 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
6632 TREE_INT_CST_HIGH (t1),
6633 (TYPE_UNSIGNED (ctype)
6634 && tcode != MULT_EXPR) ? -1 : 1,
6635 TREE_OVERFLOW (t1)))
6636 && !TREE_OVERFLOW (t1))
6637 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6639 /* If these operations "cancel" each other, we have the main
6640 optimizations of this pass, which occur when either constant is a
6641 multiple of the other, in which case we replace this with either an
6642 operation or CODE or TCODE.
6644 If we have an unsigned type that is not a sizetype, we cannot do
6645 this since it will change the result if the original computation
6646 overflowed. */
6647 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6648 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6649 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6650 || (tcode == MULT_EXPR
6651 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6652 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6653 && code != MULT_EXPR)))
6655 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6657 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6658 *strict_overflow_p = true;
6659 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6660 fold_convert (ctype,
6661 const_binop (TRUNC_DIV_EXPR,
6662 op1, c, 0)));
6664 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6666 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6667 *strict_overflow_p = true;
6668 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6669 fold_convert (ctype,
6670 const_binop (TRUNC_DIV_EXPR,
6671 c, op1, 0)));
6674 break;
6676 default:
6677 break;
6680 return 0;
6683 /* Return a node which has the indicated constant VALUE (either 0 or
6684 1), and is of the indicated TYPE. */
6686 tree
6687 constant_boolean_node (int value, tree type)
6689 if (type == integer_type_node)
6690 return value ? integer_one_node : integer_zero_node;
6691 else if (type == boolean_type_node)
6692 return value ? boolean_true_node : boolean_false_node;
6693 else
6694 return build_int_cst (type, value);
6698 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6699 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6700 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6701 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6702 COND is the first argument to CODE; otherwise (as in the example
6703 given here), it is the second argument. TYPE is the type of the
6704 original expression. Return NULL_TREE if no simplification is
6705 possible. */
6707 static tree
6708 fold_binary_op_with_conditional_arg (location_t loc,
6709 enum tree_code code,
6710 tree type, tree op0, tree op1,
6711 tree cond, tree arg, int cond_first_p)
6713 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6714 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6715 tree test, true_value, false_value;
6716 tree lhs = NULL_TREE;
6717 tree rhs = NULL_TREE;
6719 /* This transformation is only worthwhile if we don't have to wrap
6720 arg in a SAVE_EXPR, and the operation can be simplified on at least
6721 one of the branches once its pushed inside the COND_EXPR. */
6722 if (!TREE_CONSTANT (arg))
6723 return NULL_TREE;
6725 if (TREE_CODE (cond) == COND_EXPR)
6727 test = TREE_OPERAND (cond, 0);
6728 true_value = TREE_OPERAND (cond, 1);
6729 false_value = TREE_OPERAND (cond, 2);
6730 /* If this operand throws an expression, then it does not make
6731 sense to try to perform a logical or arithmetic operation
6732 involving it. */
6733 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6734 lhs = true_value;
6735 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6736 rhs = false_value;
6738 else
6740 tree testtype = TREE_TYPE (cond);
6741 test = cond;
6742 true_value = constant_boolean_node (true, testtype);
6743 false_value = constant_boolean_node (false, testtype);
6746 arg = fold_convert_loc (loc, arg_type, arg);
6747 if (lhs == 0)
6749 true_value = fold_convert_loc (loc, cond_type, true_value);
6750 if (cond_first_p)
6751 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6752 else
6753 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6755 if (rhs == 0)
6757 false_value = fold_convert_loc (loc, cond_type, false_value);
6758 if (cond_first_p)
6759 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6760 else
6761 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6764 test = fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6765 return fold_convert_loc (loc, type, test);
6769 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6771 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6772 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6773 ADDEND is the same as X.
6775 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6776 and finite. The problematic cases are when X is zero, and its mode
6777 has signed zeros. In the case of rounding towards -infinity,
6778 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6779 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6781 bool
6782 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6784 if (!real_zerop (addend))
6785 return false;
6787 /* Don't allow the fold with -fsignaling-nans. */
6788 if (HONOR_SNANS (TYPE_MODE (type)))
6789 return false;
6791 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6792 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6793 return true;
6795 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6796 if (TREE_CODE (addend) == REAL_CST
6797 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6798 negate = !negate;
6800 /* The mode has signed zeros, and we have to honor their sign.
6801 In this situation, there is only one case we can return true for.
6802 X - 0 is the same as X unless rounding towards -infinity is
6803 supported. */
6804 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6807 /* Subroutine of fold() that checks comparisons of built-in math
6808 functions against real constants.
6810 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6811 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6812 is the type of the result and ARG0 and ARG1 are the operands of the
6813 comparison. ARG1 must be a TREE_REAL_CST.
6815 The function returns the constant folded tree if a simplification
6816 can be made, and NULL_TREE otherwise. */
6818 static tree
6819 fold_mathfn_compare (location_t loc,
6820 enum built_in_function fcode, enum tree_code code,
6821 tree type, tree arg0, tree arg1)
6823 REAL_VALUE_TYPE c;
6825 if (BUILTIN_SQRT_P (fcode))
6827 tree arg = CALL_EXPR_ARG (arg0, 0);
6828 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6830 c = TREE_REAL_CST (arg1);
6831 if (REAL_VALUE_NEGATIVE (c))
6833 /* sqrt(x) < y is always false, if y is negative. */
6834 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6835 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6837 /* sqrt(x) > y is always true, if y is negative and we
6838 don't care about NaNs, i.e. negative values of x. */
6839 if (code == NE_EXPR || !HONOR_NANS (mode))
6840 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6842 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6843 return fold_build2_loc (loc, GE_EXPR, type, arg,
6844 build_real (TREE_TYPE (arg), dconst0));
6846 else if (code == GT_EXPR || code == GE_EXPR)
6848 REAL_VALUE_TYPE c2;
6850 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6851 real_convert (&c2, mode, &c2);
6853 if (REAL_VALUE_ISINF (c2))
6855 /* sqrt(x) > y is x == +Inf, when y is very large. */
6856 if (HONOR_INFINITIES (mode))
6857 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6858 build_real (TREE_TYPE (arg), c2));
6860 /* sqrt(x) > y is always false, when y is very large
6861 and we don't care about infinities. */
6862 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6865 /* sqrt(x) > c is the same as x > c*c. */
6866 return fold_build2_loc (loc, code, type, arg,
6867 build_real (TREE_TYPE (arg), c2));
6869 else if (code == LT_EXPR || code == LE_EXPR)
6871 REAL_VALUE_TYPE c2;
6873 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6874 real_convert (&c2, mode, &c2);
6876 if (REAL_VALUE_ISINF (c2))
6878 /* sqrt(x) < y is always true, when y is a very large
6879 value and we don't care about NaNs or Infinities. */
6880 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6881 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6883 /* sqrt(x) < y is x != +Inf when y is very large and we
6884 don't care about NaNs. */
6885 if (! HONOR_NANS (mode))
6886 return fold_build2_loc (loc, NE_EXPR, type, arg,
6887 build_real (TREE_TYPE (arg), c2));
6889 /* sqrt(x) < y is x >= 0 when y is very large and we
6890 don't care about Infinities. */
6891 if (! HONOR_INFINITIES (mode))
6892 return fold_build2_loc (loc, GE_EXPR, type, arg,
6893 build_real (TREE_TYPE (arg), dconst0));
6895 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6896 if (lang_hooks.decls.global_bindings_p () != 0
6897 || CONTAINS_PLACEHOLDER_P (arg))
6898 return NULL_TREE;
6900 arg = save_expr (arg);
6901 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6902 fold_build2_loc (loc, GE_EXPR, type, arg,
6903 build_real (TREE_TYPE (arg),
6904 dconst0)),
6905 fold_build2_loc (loc, NE_EXPR, type, arg,
6906 build_real (TREE_TYPE (arg),
6907 c2)));
6910 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6911 if (! HONOR_NANS (mode))
6912 return fold_build2_loc (loc, code, type, arg,
6913 build_real (TREE_TYPE (arg), c2));
6915 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6916 if (lang_hooks.decls.global_bindings_p () == 0
6917 && ! CONTAINS_PLACEHOLDER_P (arg))
6919 arg = save_expr (arg);
6920 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6921 fold_build2_loc (loc, GE_EXPR, type, arg,
6922 build_real (TREE_TYPE (arg),
6923 dconst0)),
6924 fold_build2_loc (loc, code, type, arg,
6925 build_real (TREE_TYPE (arg),
6926 c2)));
6931 return NULL_TREE;
6934 /* Subroutine of fold() that optimizes comparisons against Infinities,
6935 either +Inf or -Inf.
6937 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6938 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6939 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6941 The function returns the constant folded tree if a simplification
6942 can be made, and NULL_TREE otherwise. */
6944 static tree
6945 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6946 tree arg0, tree arg1)
6948 enum machine_mode mode;
6949 REAL_VALUE_TYPE max;
6950 tree temp;
6951 bool neg;
6953 mode = TYPE_MODE (TREE_TYPE (arg0));
6955 /* For negative infinity swap the sense of the comparison. */
6956 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6957 if (neg)
6958 code = swap_tree_comparison (code);
6960 switch (code)
6962 case GT_EXPR:
6963 /* x > +Inf is always false, if with ignore sNANs. */
6964 if (HONOR_SNANS (mode))
6965 return NULL_TREE;
6966 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6968 case LE_EXPR:
6969 /* x <= +Inf is always true, if we don't case about NaNs. */
6970 if (! HONOR_NANS (mode))
6971 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6973 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6974 if (lang_hooks.decls.global_bindings_p () == 0
6975 && ! CONTAINS_PLACEHOLDER_P (arg0))
6977 arg0 = save_expr (arg0);
6978 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6980 break;
6982 case EQ_EXPR:
6983 case GE_EXPR:
6984 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6985 real_maxval (&max, neg, mode);
6986 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6987 arg0, build_real (TREE_TYPE (arg0), max));
6989 case LT_EXPR:
6990 /* x < +Inf is always equal to x <= DBL_MAX. */
6991 real_maxval (&max, neg, mode);
6992 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6993 arg0, build_real (TREE_TYPE (arg0), max));
6995 case NE_EXPR:
6996 /* x != +Inf is always equal to !(x > DBL_MAX). */
6997 real_maxval (&max, neg, mode);
6998 if (! HONOR_NANS (mode))
6999 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
7000 arg0, build_real (TREE_TYPE (arg0), max));
7002 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
7003 arg0, build_real (TREE_TYPE (arg0), max));
7004 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
7006 default:
7007 break;
7010 return NULL_TREE;
7013 /* Subroutine of fold() that optimizes comparisons of a division by
7014 a nonzero integer constant against an integer constant, i.e.
7015 X/C1 op C2.
7017 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7018 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
7019 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
7021 The function returns the constant folded tree if a simplification
7022 can be made, and NULL_TREE otherwise. */
7024 static tree
7025 fold_div_compare (location_t loc,
7026 enum tree_code code, tree type, tree arg0, tree arg1)
7028 tree prod, tmp, hi, lo;
7029 tree arg00 = TREE_OPERAND (arg0, 0);
7030 tree arg01 = TREE_OPERAND (arg0, 1);
7031 unsigned HOST_WIDE_INT lpart;
7032 HOST_WIDE_INT hpart;
7033 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
7034 bool neg_overflow;
7035 int overflow;
7037 /* We have to do this the hard way to detect unsigned overflow.
7038 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
7039 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
7040 TREE_INT_CST_HIGH (arg01),
7041 TREE_INT_CST_LOW (arg1),
7042 TREE_INT_CST_HIGH (arg1),
7043 &lpart, &hpart, unsigned_p);
7044 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
7045 -1, overflow);
7046 neg_overflow = false;
7048 if (unsigned_p)
7050 tmp = int_const_binop (MINUS_EXPR, arg01,
7051 build_int_cst (TREE_TYPE (arg01), 1), 0);
7052 lo = prod;
7054 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
7055 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
7056 TREE_INT_CST_HIGH (prod),
7057 TREE_INT_CST_LOW (tmp),
7058 TREE_INT_CST_HIGH (tmp),
7059 &lpart, &hpart, unsigned_p);
7060 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
7061 -1, overflow | TREE_OVERFLOW (prod));
7063 else if (tree_int_cst_sgn (arg01) >= 0)
7065 tmp = int_const_binop (MINUS_EXPR, arg01,
7066 build_int_cst (TREE_TYPE (arg01), 1), 0);
7067 switch (tree_int_cst_sgn (arg1))
7069 case -1:
7070 neg_overflow = true;
7071 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
7072 hi = prod;
7073 break;
7075 case 0:
7076 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
7077 hi = tmp;
7078 break;
7080 case 1:
7081 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
7082 lo = prod;
7083 break;
7085 default:
7086 gcc_unreachable ();
7089 else
7091 /* A negative divisor reverses the relational operators. */
7092 code = swap_tree_comparison (code);
7094 tmp = int_const_binop (PLUS_EXPR, arg01,
7095 build_int_cst (TREE_TYPE (arg01), 1), 0);
7096 switch (tree_int_cst_sgn (arg1))
7098 case -1:
7099 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
7100 lo = prod;
7101 break;
7103 case 0:
7104 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
7105 lo = tmp;
7106 break;
7108 case 1:
7109 neg_overflow = true;
7110 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
7111 hi = prod;
7112 break;
7114 default:
7115 gcc_unreachable ();
7119 switch (code)
7121 case EQ_EXPR:
7122 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
7123 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
7124 if (TREE_OVERFLOW (hi))
7125 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
7126 if (TREE_OVERFLOW (lo))
7127 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
7128 return build_range_check (loc, type, arg00, 1, lo, hi);
7130 case NE_EXPR:
7131 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
7132 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
7133 if (TREE_OVERFLOW (hi))
7134 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
7135 if (TREE_OVERFLOW (lo))
7136 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
7137 return build_range_check (loc, type, arg00, 0, lo, hi);
7139 case LT_EXPR:
7140 if (TREE_OVERFLOW (lo))
7142 tmp = neg_overflow ? integer_zero_node : integer_one_node;
7143 return omit_one_operand_loc (loc, type, tmp, arg00);
7145 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
7147 case LE_EXPR:
7148 if (TREE_OVERFLOW (hi))
7150 tmp = neg_overflow ? integer_zero_node : integer_one_node;
7151 return omit_one_operand_loc (loc, type, tmp, arg00);
7153 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
7155 case GT_EXPR:
7156 if (TREE_OVERFLOW (hi))
7158 tmp = neg_overflow ? integer_one_node : integer_zero_node;
7159 return omit_one_operand_loc (loc, type, tmp, arg00);
7161 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
7163 case GE_EXPR:
7164 if (TREE_OVERFLOW (lo))
7166 tmp = neg_overflow ? integer_one_node : integer_zero_node;
7167 return omit_one_operand_loc (loc, type, tmp, arg00);
7169 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
7171 default:
7172 break;
7175 return NULL_TREE;
7179 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7180 equality/inequality test, then return a simplified form of the test
7181 using a sign testing. Otherwise return NULL. TYPE is the desired
7182 result type. */
7184 static tree
7185 fold_single_bit_test_into_sign_test (location_t loc,
7186 enum tree_code code, tree arg0, tree arg1,
7187 tree result_type)
7189 /* If this is testing a single bit, we can optimize the test. */
7190 if ((code == NE_EXPR || code == EQ_EXPR)
7191 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7192 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7194 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7195 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7196 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7198 if (arg00 != NULL_TREE
7199 /* This is only a win if casting to a signed type is cheap,
7200 i.e. when arg00's type is not a partial mode. */
7201 && TYPE_PRECISION (TREE_TYPE (arg00))
7202 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
7204 tree stype = signed_type_for (TREE_TYPE (arg00));
7205 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7206 result_type,
7207 fold_convert_loc (loc, stype, arg00),
7208 build_int_cst (stype, 0));
7212 return NULL_TREE;
7215 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7216 equality/inequality test, then return a simplified form of
7217 the test using shifts and logical operations. Otherwise return
7218 NULL. TYPE is the desired result type. */
7220 tree
7221 fold_single_bit_test (location_t loc, enum tree_code code,
7222 tree arg0, tree arg1, tree result_type)
7224 /* If this is testing a single bit, we can optimize the test. */
7225 if ((code == NE_EXPR || code == EQ_EXPR)
7226 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7227 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7229 tree inner = TREE_OPERAND (arg0, 0);
7230 tree type = TREE_TYPE (arg0);
7231 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7232 enum machine_mode operand_mode = TYPE_MODE (type);
7233 int ops_unsigned;
7234 tree signed_type, unsigned_type, intermediate_type;
7235 tree tem, one;
7237 /* First, see if we can fold the single bit test into a sign-bit
7238 test. */
7239 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7240 result_type);
7241 if (tem)
7242 return tem;
7244 /* Otherwise we have (A & C) != 0 where C is a single bit,
7245 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7246 Similarly for (A & C) == 0. */
7248 /* If INNER is a right shift of a constant and it plus BITNUM does
7249 not overflow, adjust BITNUM and INNER. */
7250 if (TREE_CODE (inner) == RSHIFT_EXPR
7251 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7252 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
7253 && bitnum < TYPE_PRECISION (type)
7254 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
7255 bitnum - TYPE_PRECISION (type)))
7257 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
7258 inner = TREE_OPERAND (inner, 0);
7261 /* If we are going to be able to omit the AND below, we must do our
7262 operations as unsigned. If we must use the AND, we have a choice.
7263 Normally unsigned is faster, but for some machines signed is. */
7264 #ifdef LOAD_EXTEND_OP
7265 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
7266 && !flag_syntax_only) ? 0 : 1;
7267 #else
7268 ops_unsigned = 1;
7269 #endif
7271 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7272 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7273 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7274 inner = fold_convert_loc (loc, intermediate_type, inner);
7276 if (bitnum != 0)
7277 inner = build2 (RSHIFT_EXPR, intermediate_type,
7278 inner, size_int (bitnum));
7280 one = build_int_cst (intermediate_type, 1);
7282 if (code == EQ_EXPR)
7283 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7285 /* Put the AND last so it can combine with more things. */
7286 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7288 /* Make sure to return the proper type. */
7289 inner = fold_convert_loc (loc, result_type, inner);
7291 return inner;
7293 return NULL_TREE;
7296 /* Check whether we are allowed to reorder operands arg0 and arg1,
7297 such that the evaluation of arg1 occurs before arg0. */
7299 static bool
7300 reorder_operands_p (const_tree arg0, const_tree arg1)
7302 if (! flag_evaluation_order)
7303 return true;
7304 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
7305 return true;
7306 return ! TREE_SIDE_EFFECTS (arg0)
7307 && ! TREE_SIDE_EFFECTS (arg1);
7310 /* Test whether it is preferable two swap two operands, ARG0 and
7311 ARG1, for example because ARG0 is an integer constant and ARG1
7312 isn't. If REORDER is true, only recommend swapping if we can
7313 evaluate the operands in reverse order. */
7315 bool
7316 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
7318 STRIP_SIGN_NOPS (arg0);
7319 STRIP_SIGN_NOPS (arg1);
7321 if (TREE_CODE (arg1) == INTEGER_CST)
7322 return 0;
7323 if (TREE_CODE (arg0) == INTEGER_CST)
7324 return 1;
7326 if (TREE_CODE (arg1) == REAL_CST)
7327 return 0;
7328 if (TREE_CODE (arg0) == REAL_CST)
7329 return 1;
7331 if (TREE_CODE (arg1) == FIXED_CST)
7332 return 0;
7333 if (TREE_CODE (arg0) == FIXED_CST)
7334 return 1;
7336 if (TREE_CODE (arg1) == COMPLEX_CST)
7337 return 0;
7338 if (TREE_CODE (arg0) == COMPLEX_CST)
7339 return 1;
7341 if (TREE_CONSTANT (arg1))
7342 return 0;
7343 if (TREE_CONSTANT (arg0))
7344 return 1;
7346 if (optimize_function_for_size_p (cfun))
7347 return 0;
7349 if (reorder && flag_evaluation_order
7350 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7351 return 0;
7353 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7354 for commutative and comparison operators. Ensuring a canonical
7355 form allows the optimizers to find additional redundancies without
7356 having to explicitly check for both orderings. */
7357 if (TREE_CODE (arg0) == SSA_NAME
7358 && TREE_CODE (arg1) == SSA_NAME
7359 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7360 return 1;
7362 /* Put SSA_NAMEs last. */
7363 if (TREE_CODE (arg1) == SSA_NAME)
7364 return 0;
7365 if (TREE_CODE (arg0) == SSA_NAME)
7366 return 1;
7368 /* Put variables last. */
7369 if (DECL_P (arg1))
7370 return 0;
7371 if (DECL_P (arg0))
7372 return 1;
7374 return 0;
7377 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7378 ARG0 is extended to a wider type. */
7380 static tree
7381 fold_widened_comparison (location_t loc, enum tree_code code,
7382 tree type, tree arg0, tree arg1)
7384 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7385 tree arg1_unw;
7386 tree shorter_type, outer_type;
7387 tree min, max;
7388 bool above, below;
7390 if (arg0_unw == arg0)
7391 return NULL_TREE;
7392 shorter_type = TREE_TYPE (arg0_unw);
7394 #ifdef HAVE_canonicalize_funcptr_for_compare
7395 /* Disable this optimization if we're casting a function pointer
7396 type on targets that require function pointer canonicalization. */
7397 if (HAVE_canonicalize_funcptr_for_compare
7398 && TREE_CODE (shorter_type) == POINTER_TYPE
7399 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7400 return NULL_TREE;
7401 #endif
7403 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7404 return NULL_TREE;
7406 arg1_unw = get_unwidened (arg1, NULL_TREE);
7408 /* If possible, express the comparison in the shorter mode. */
7409 if ((code == EQ_EXPR || code == NE_EXPR
7410 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7411 && (TREE_TYPE (arg1_unw) == shorter_type
7412 || ((TYPE_PRECISION (shorter_type)
7413 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7414 && (TYPE_UNSIGNED (shorter_type)
7415 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7416 || (TREE_CODE (arg1_unw) == INTEGER_CST
7417 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7418 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7419 && int_fits_type_p (arg1_unw, shorter_type))))
7420 return fold_build2_loc (loc, code, type, arg0_unw,
7421 fold_convert_loc (loc, shorter_type, arg1_unw));
7423 if (TREE_CODE (arg1_unw) != INTEGER_CST
7424 || TREE_CODE (shorter_type) != INTEGER_TYPE
7425 || !int_fits_type_p (arg1_unw, shorter_type))
7426 return NULL_TREE;
7428 /* If we are comparing with the integer that does not fit into the range
7429 of the shorter type, the result is known. */
7430 outer_type = TREE_TYPE (arg1_unw);
7431 min = lower_bound_in_type (outer_type, shorter_type);
7432 max = upper_bound_in_type (outer_type, shorter_type);
7434 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7435 max, arg1_unw));
7436 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7437 arg1_unw, min));
7439 switch (code)
7441 case EQ_EXPR:
7442 if (above || below)
7443 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7444 break;
7446 case NE_EXPR:
7447 if (above || below)
7448 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7449 break;
7451 case LT_EXPR:
7452 case LE_EXPR:
7453 if (above)
7454 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7455 else if (below)
7456 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7458 case GT_EXPR:
7459 case GE_EXPR:
7460 if (above)
7461 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7462 else if (below)
7463 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7465 default:
7466 break;
7469 return NULL_TREE;
7472 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7473 ARG0 just the signedness is changed. */
7475 static tree
7476 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7477 tree arg0, tree arg1)
7479 tree arg0_inner;
7480 tree inner_type, outer_type;
7482 if (!CONVERT_EXPR_P (arg0))
7483 return NULL_TREE;
7485 outer_type = TREE_TYPE (arg0);
7486 arg0_inner = TREE_OPERAND (arg0, 0);
7487 inner_type = TREE_TYPE (arg0_inner);
7489 #ifdef HAVE_canonicalize_funcptr_for_compare
7490 /* Disable this optimization if we're casting a function pointer
7491 type on targets that require function pointer canonicalization. */
7492 if (HAVE_canonicalize_funcptr_for_compare
7493 && TREE_CODE (inner_type) == POINTER_TYPE
7494 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7495 return NULL_TREE;
7496 #endif
7498 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7499 return NULL_TREE;
7501 if (TREE_CODE (arg1) != INTEGER_CST
7502 && !(CONVERT_EXPR_P (arg1)
7503 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7504 return NULL_TREE;
7506 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7507 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7508 && code != NE_EXPR
7509 && code != EQ_EXPR)
7510 return NULL_TREE;
7512 if (TREE_CODE (arg1) == INTEGER_CST)
7513 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7514 TREE_INT_CST_HIGH (arg1), 0,
7515 TREE_OVERFLOW (arg1));
7516 else
7517 arg1 = fold_convert_loc (loc, inner_type, arg1);
7519 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7522 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7523 step of the array. Reconstructs s and delta in the case of s *
7524 delta being an integer constant (and thus already folded). ADDR is
7525 the address. MULT is the multiplicative expression. If the
7526 function succeeds, the new address expression is returned.
7527 Otherwise NULL_TREE is returned. LOC is the location of the
7528 resulting expression. */
7530 static tree
7531 try_move_mult_to_index (location_t loc, tree addr, tree op1)
7533 tree s, delta, step;
7534 tree ref = TREE_OPERAND (addr, 0), pref;
7535 tree ret, pos;
7536 tree itype;
7537 bool mdim = false;
7539 /* Strip the nops that might be added when converting op1 to sizetype. */
7540 STRIP_NOPS (op1);
7542 /* Canonicalize op1 into a possibly non-constant delta
7543 and an INTEGER_CST s. */
7544 if (TREE_CODE (op1) == MULT_EXPR)
7546 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7548 STRIP_NOPS (arg0);
7549 STRIP_NOPS (arg1);
7551 if (TREE_CODE (arg0) == INTEGER_CST)
7553 s = arg0;
7554 delta = arg1;
7556 else if (TREE_CODE (arg1) == INTEGER_CST)
7558 s = arg1;
7559 delta = arg0;
7561 else
7562 return NULL_TREE;
7564 else if (TREE_CODE (op1) == INTEGER_CST)
7566 delta = op1;
7567 s = NULL_TREE;
7569 else
7571 /* Simulate we are delta * 1. */
7572 delta = op1;
7573 s = integer_one_node;
7576 for (;; ref = TREE_OPERAND (ref, 0))
7578 if (TREE_CODE (ref) == ARRAY_REF)
7580 tree domain;
7582 /* Remember if this was a multi-dimensional array. */
7583 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7584 mdim = true;
7586 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7587 if (! domain)
7588 continue;
7589 itype = TREE_TYPE (domain);
7591 step = array_ref_element_size (ref);
7592 if (TREE_CODE (step) != INTEGER_CST)
7593 continue;
7595 if (s)
7597 if (! tree_int_cst_equal (step, s))
7598 continue;
7600 else
7602 /* Try if delta is a multiple of step. */
7603 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7604 if (! tmp)
7605 continue;
7606 delta = tmp;
7609 /* Only fold here if we can verify we do not overflow one
7610 dimension of a multi-dimensional array. */
7611 if (mdim)
7613 tree tmp;
7615 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7616 || !TYPE_MAX_VALUE (domain)
7617 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7618 continue;
7620 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7621 fold_convert_loc (loc, itype,
7622 TREE_OPERAND (ref, 1)),
7623 fold_convert_loc (loc, itype, delta));
7624 if (!tmp
7625 || TREE_CODE (tmp) != INTEGER_CST
7626 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7627 continue;
7630 break;
7632 else
7633 mdim = false;
7635 if (!handled_component_p (ref))
7636 return NULL_TREE;
7639 /* We found the suitable array reference. So copy everything up to it,
7640 and replace the index. */
7642 pref = TREE_OPERAND (addr, 0);
7643 ret = copy_node (pref);
7644 SET_EXPR_LOCATION (ret, loc);
7645 pos = ret;
7647 while (pref != ref)
7649 pref = TREE_OPERAND (pref, 0);
7650 TREE_OPERAND (pos, 0) = copy_node (pref);
7651 pos = TREE_OPERAND (pos, 0);
7654 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
7655 fold_convert_loc (loc, itype,
7656 TREE_OPERAND (pos, 1)),
7657 fold_convert_loc (loc, itype, delta));
7659 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7663 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7664 means A >= Y && A != MAX, but in this case we know that
7665 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7667 static tree
7668 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7670 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7672 if (TREE_CODE (bound) == LT_EXPR)
7673 a = TREE_OPERAND (bound, 0);
7674 else if (TREE_CODE (bound) == GT_EXPR)
7675 a = TREE_OPERAND (bound, 1);
7676 else
7677 return NULL_TREE;
7679 typea = TREE_TYPE (a);
7680 if (!INTEGRAL_TYPE_P (typea)
7681 && !POINTER_TYPE_P (typea))
7682 return NULL_TREE;
7684 if (TREE_CODE (ineq) == LT_EXPR)
7686 a1 = TREE_OPERAND (ineq, 1);
7687 y = TREE_OPERAND (ineq, 0);
7689 else if (TREE_CODE (ineq) == GT_EXPR)
7691 a1 = TREE_OPERAND (ineq, 0);
7692 y = TREE_OPERAND (ineq, 1);
7694 else
7695 return NULL_TREE;
7697 if (TREE_TYPE (a1) != typea)
7698 return NULL_TREE;
7700 if (POINTER_TYPE_P (typea))
7702 /* Convert the pointer types into integer before taking the difference. */
7703 tree ta = fold_convert_loc (loc, ssizetype, a);
7704 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7705 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7707 else
7708 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7710 if (!diff || !integer_onep (diff))
7711 return NULL_TREE;
7713 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7716 /* Fold a sum or difference of at least one multiplication.
7717 Returns the folded tree or NULL if no simplification could be made. */
7719 static tree
7720 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7721 tree arg0, tree arg1)
7723 tree arg00, arg01, arg10, arg11;
7724 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7726 /* (A * C) +- (B * C) -> (A+-B) * C.
7727 (A * C) +- A -> A * (C+-1).
7728 We are most concerned about the case where C is a constant,
7729 but other combinations show up during loop reduction. Since
7730 it is not difficult, try all four possibilities. */
7732 if (TREE_CODE (arg0) == MULT_EXPR)
7734 arg00 = TREE_OPERAND (arg0, 0);
7735 arg01 = TREE_OPERAND (arg0, 1);
7737 else if (TREE_CODE (arg0) == INTEGER_CST)
7739 arg00 = build_one_cst (type);
7740 arg01 = arg0;
7742 else
7744 /* We cannot generate constant 1 for fract. */
7745 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7746 return NULL_TREE;
7747 arg00 = arg0;
7748 arg01 = build_one_cst (type);
7750 if (TREE_CODE (arg1) == MULT_EXPR)
7752 arg10 = TREE_OPERAND (arg1, 0);
7753 arg11 = TREE_OPERAND (arg1, 1);
7755 else if (TREE_CODE (arg1) == INTEGER_CST)
7757 arg10 = build_one_cst (type);
7758 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7759 the purpose of this canonicalization. */
7760 if (TREE_INT_CST_HIGH (arg1) == -1
7761 && negate_expr_p (arg1)
7762 && code == PLUS_EXPR)
7764 arg11 = negate_expr (arg1);
7765 code = MINUS_EXPR;
7767 else
7768 arg11 = arg1;
7770 else
7772 /* We cannot generate constant 1 for fract. */
7773 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7774 return NULL_TREE;
7775 arg10 = arg1;
7776 arg11 = build_one_cst (type);
7778 same = NULL_TREE;
7780 if (operand_equal_p (arg01, arg11, 0))
7781 same = arg01, alt0 = arg00, alt1 = arg10;
7782 else if (operand_equal_p (arg00, arg10, 0))
7783 same = arg00, alt0 = arg01, alt1 = arg11;
7784 else if (operand_equal_p (arg00, arg11, 0))
7785 same = arg00, alt0 = arg01, alt1 = arg10;
7786 else if (operand_equal_p (arg01, arg10, 0))
7787 same = arg01, alt0 = arg00, alt1 = arg11;
7789 /* No identical multiplicands; see if we can find a common
7790 power-of-two factor in non-power-of-two multiplies. This
7791 can help in multi-dimensional array access. */
7792 else if (host_integerp (arg01, 0)
7793 && host_integerp (arg11, 0))
7795 HOST_WIDE_INT int01, int11, tmp;
7796 bool swap = false;
7797 tree maybe_same;
7798 int01 = TREE_INT_CST_LOW (arg01);
7799 int11 = TREE_INT_CST_LOW (arg11);
7801 /* Move min of absolute values to int11. */
7802 if ((int01 >= 0 ? int01 : -int01)
7803 < (int11 >= 0 ? int11 : -int11))
7805 tmp = int01, int01 = int11, int11 = tmp;
7806 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7807 maybe_same = arg01;
7808 swap = true;
7810 else
7811 maybe_same = arg11;
7813 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7814 /* The remainder should not be a constant, otherwise we
7815 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7816 increased the number of multiplications necessary. */
7817 && TREE_CODE (arg10) != INTEGER_CST)
7819 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7820 build_int_cst (TREE_TYPE (arg00),
7821 int01 / int11));
7822 alt1 = arg10;
7823 same = maybe_same;
7824 if (swap)
7825 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7829 if (same)
7830 return fold_build2_loc (loc, MULT_EXPR, type,
7831 fold_build2_loc (loc, code, type,
7832 fold_convert_loc (loc, type, alt0),
7833 fold_convert_loc (loc, type, alt1)),
7834 fold_convert_loc (loc, type, same));
7836 return NULL_TREE;
7839 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7840 specified by EXPR into the buffer PTR of length LEN bytes.
7841 Return the number of bytes placed in the buffer, or zero
7842 upon failure. */
7844 static int
7845 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7847 tree type = TREE_TYPE (expr);
7848 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7849 int byte, offset, word, words;
7850 unsigned char value;
7852 if (total_bytes > len)
7853 return 0;
7854 words = total_bytes / UNITS_PER_WORD;
7856 for (byte = 0; byte < total_bytes; byte++)
7858 int bitpos = byte * BITS_PER_UNIT;
7859 if (bitpos < HOST_BITS_PER_WIDE_INT)
7860 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7861 else
7862 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7863 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7865 if (total_bytes > UNITS_PER_WORD)
7867 word = byte / UNITS_PER_WORD;
7868 if (WORDS_BIG_ENDIAN)
7869 word = (words - 1) - word;
7870 offset = word * UNITS_PER_WORD;
7871 if (BYTES_BIG_ENDIAN)
7872 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7873 else
7874 offset += byte % UNITS_PER_WORD;
7876 else
7877 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7878 ptr[offset] = value;
7880 return total_bytes;
7884 /* Subroutine of native_encode_expr. Encode the REAL_CST
7885 specified by EXPR into the buffer PTR of length LEN bytes.
7886 Return the number of bytes placed in the buffer, or zero
7887 upon failure. */
7889 static int
7890 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7892 tree type = TREE_TYPE (expr);
7893 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7894 int byte, offset, word, words, bitpos;
7895 unsigned char value;
7897 /* There are always 32 bits in each long, no matter the size of
7898 the hosts long. We handle floating point representations with
7899 up to 192 bits. */
7900 long tmp[6];
7902 if (total_bytes > len)
7903 return 0;
7904 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7906 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7908 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7909 bitpos += BITS_PER_UNIT)
7911 byte = (bitpos / BITS_PER_UNIT) & 3;
7912 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7914 if (UNITS_PER_WORD < 4)
7916 word = byte / UNITS_PER_WORD;
7917 if (WORDS_BIG_ENDIAN)
7918 word = (words - 1) - word;
7919 offset = word * UNITS_PER_WORD;
7920 if (BYTES_BIG_ENDIAN)
7921 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7922 else
7923 offset += byte % UNITS_PER_WORD;
7925 else
7926 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7927 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7929 return total_bytes;
7932 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7933 specified by EXPR into the buffer PTR of length LEN bytes.
7934 Return the number of bytes placed in the buffer, or zero
7935 upon failure. */
7937 static int
7938 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7940 int rsize, isize;
7941 tree part;
7943 part = TREE_REALPART (expr);
7944 rsize = native_encode_expr (part, ptr, len);
7945 if (rsize == 0)
7946 return 0;
7947 part = TREE_IMAGPART (expr);
7948 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7949 if (isize != rsize)
7950 return 0;
7951 return rsize + isize;
7955 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7956 specified by EXPR into the buffer PTR of length LEN bytes.
7957 Return the number of bytes placed in the buffer, or zero
7958 upon failure. */
7960 static int
7961 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7963 int i, size, offset, count;
7964 tree itype, elem, elements;
7966 offset = 0;
7967 elements = TREE_VECTOR_CST_ELTS (expr);
7968 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7969 itype = TREE_TYPE (TREE_TYPE (expr));
7970 size = GET_MODE_SIZE (TYPE_MODE (itype));
7971 for (i = 0; i < count; i++)
7973 if (elements)
7975 elem = TREE_VALUE (elements);
7976 elements = TREE_CHAIN (elements);
7978 else
7979 elem = NULL_TREE;
7981 if (elem)
7983 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7984 return 0;
7986 else
7988 if (offset + size > len)
7989 return 0;
7990 memset (ptr+offset, 0, size);
7992 offset += size;
7994 return offset;
7998 /* Subroutine of native_encode_expr. Encode the STRING_CST
7999 specified by EXPR into the buffer PTR of length LEN bytes.
8000 Return the number of bytes placed in the buffer, or zero
8001 upon failure. */
8003 static int
8004 native_encode_string (const_tree expr, unsigned char *ptr, int len)
8006 tree type = TREE_TYPE (expr);
8007 HOST_WIDE_INT total_bytes;
8009 if (TREE_CODE (type) != ARRAY_TYPE
8010 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
8011 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
8012 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
8013 return 0;
8014 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
8015 if (total_bytes > len)
8016 return 0;
8017 if (TREE_STRING_LENGTH (expr) < total_bytes)
8019 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
8020 memset (ptr + TREE_STRING_LENGTH (expr), 0,
8021 total_bytes - TREE_STRING_LENGTH (expr));
8023 else
8024 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
8025 return total_bytes;
8029 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
8030 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
8031 buffer PTR of length LEN bytes. Return the number of bytes
8032 placed in the buffer, or zero upon failure. */
8035 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
8037 switch (TREE_CODE (expr))
8039 case INTEGER_CST:
8040 return native_encode_int (expr, ptr, len);
8042 case REAL_CST:
8043 return native_encode_real (expr, ptr, len);
8045 case COMPLEX_CST:
8046 return native_encode_complex (expr, ptr, len);
8048 case VECTOR_CST:
8049 return native_encode_vector (expr, ptr, len);
8051 case STRING_CST:
8052 return native_encode_string (expr, ptr, len);
8054 default:
8055 return 0;
8060 /* Subroutine of native_interpret_expr. Interpret the contents of
8061 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8062 If the buffer cannot be interpreted, return NULL_TREE. */
8064 static tree
8065 native_interpret_int (tree type, const unsigned char *ptr, int len)
8067 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
8068 int byte, offset, word, words;
8069 unsigned char value;
8070 unsigned int HOST_WIDE_INT lo = 0;
8071 HOST_WIDE_INT hi = 0;
8073 if (total_bytes > len)
8074 return NULL_TREE;
8075 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
8076 return NULL_TREE;
8077 words = total_bytes / UNITS_PER_WORD;
8079 for (byte = 0; byte < total_bytes; byte++)
8081 int bitpos = byte * BITS_PER_UNIT;
8082 if (total_bytes > UNITS_PER_WORD)
8084 word = byte / UNITS_PER_WORD;
8085 if (WORDS_BIG_ENDIAN)
8086 word = (words - 1) - word;
8087 offset = word * UNITS_PER_WORD;
8088 if (BYTES_BIG_ENDIAN)
8089 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8090 else
8091 offset += byte % UNITS_PER_WORD;
8093 else
8094 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
8095 value = ptr[offset];
8097 if (bitpos < HOST_BITS_PER_WIDE_INT)
8098 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
8099 else
8100 hi |= (unsigned HOST_WIDE_INT) value
8101 << (bitpos - HOST_BITS_PER_WIDE_INT);
8104 return build_int_cst_wide_type (type, lo, hi);
8108 /* Subroutine of native_interpret_expr. Interpret the contents of
8109 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8110 If the buffer cannot be interpreted, return NULL_TREE. */
8112 static tree
8113 native_interpret_real (tree type, const unsigned char *ptr, int len)
8115 enum machine_mode mode = TYPE_MODE (type);
8116 int total_bytes = GET_MODE_SIZE (mode);
8117 int byte, offset, word, words, bitpos;
8118 unsigned char value;
8119 /* There are always 32 bits in each long, no matter the size of
8120 the hosts long. We handle floating point representations with
8121 up to 192 bits. */
8122 REAL_VALUE_TYPE r;
8123 long tmp[6];
8125 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
8126 if (total_bytes > len || total_bytes > 24)
8127 return NULL_TREE;
8128 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8130 memset (tmp, 0, sizeof (tmp));
8131 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8132 bitpos += BITS_PER_UNIT)
8134 byte = (bitpos / BITS_PER_UNIT) & 3;
8135 if (UNITS_PER_WORD < 4)
8137 word = byte / UNITS_PER_WORD;
8138 if (WORDS_BIG_ENDIAN)
8139 word = (words - 1) - word;
8140 offset = word * UNITS_PER_WORD;
8141 if (BYTES_BIG_ENDIAN)
8142 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8143 else
8144 offset += byte % UNITS_PER_WORD;
8146 else
8147 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
8148 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8150 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8153 real_from_target (&r, tmp, mode);
8154 return build_real (type, r);
8158 /* Subroutine of native_interpret_expr. Interpret the contents of
8159 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8160 If the buffer cannot be interpreted, return NULL_TREE. */
8162 static tree
8163 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8165 tree etype, rpart, ipart;
8166 int size;
8168 etype = TREE_TYPE (type);
8169 size = GET_MODE_SIZE (TYPE_MODE (etype));
8170 if (size * 2 > len)
8171 return NULL_TREE;
8172 rpart = native_interpret_expr (etype, ptr, size);
8173 if (!rpart)
8174 return NULL_TREE;
8175 ipart = native_interpret_expr (etype, ptr+size, size);
8176 if (!ipart)
8177 return NULL_TREE;
8178 return build_complex (type, rpart, ipart);
8182 /* Subroutine of native_interpret_expr. Interpret the contents of
8183 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8184 If the buffer cannot be interpreted, return NULL_TREE. */
8186 static tree
8187 native_interpret_vector (tree type, const unsigned char *ptr, int len)
8189 tree etype, elem, elements;
8190 int i, size, count;
8192 etype = TREE_TYPE (type);
8193 size = GET_MODE_SIZE (TYPE_MODE (etype));
8194 count = TYPE_VECTOR_SUBPARTS (type);
8195 if (size * count > len)
8196 return NULL_TREE;
8198 elements = NULL_TREE;
8199 for (i = count - 1; i >= 0; i--)
8201 elem = native_interpret_expr (etype, ptr+(i*size), size);
8202 if (!elem)
8203 return NULL_TREE;
8204 elements = tree_cons (NULL_TREE, elem, elements);
8206 return build_vector (type, elements);
8210 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8211 the buffer PTR of length LEN as a constant of type TYPE. For
8212 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8213 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8214 return NULL_TREE. */
8216 tree
8217 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8219 switch (TREE_CODE (type))
8221 case INTEGER_TYPE:
8222 case ENUMERAL_TYPE:
8223 case BOOLEAN_TYPE:
8224 return native_interpret_int (type, ptr, len);
8226 case REAL_TYPE:
8227 return native_interpret_real (type, ptr, len);
8229 case COMPLEX_TYPE:
8230 return native_interpret_complex (type, ptr, len);
8232 case VECTOR_TYPE:
8233 return native_interpret_vector (type, ptr, len);
8235 default:
8236 return NULL_TREE;
8241 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8242 TYPE at compile-time. If we're unable to perform the conversion
8243 return NULL_TREE. */
8245 static tree
8246 fold_view_convert_expr (tree type, tree expr)
8248 /* We support up to 512-bit values (for V8DFmode). */
8249 unsigned char buffer[64];
8250 int len;
8252 /* Check that the host and target are sane. */
8253 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8254 return NULL_TREE;
8256 len = native_encode_expr (expr, buffer, sizeof (buffer));
8257 if (len == 0)
8258 return NULL_TREE;
8260 return native_interpret_expr (type, buffer, len);
8263 /* Build an expression for the address of T. Folds away INDIRECT_REF
8264 to avoid confusing the gimplify process. */
8266 tree
8267 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8269 /* The size of the object is not relevant when talking about its address. */
8270 if (TREE_CODE (t) == WITH_SIZE_EXPR)
8271 t = TREE_OPERAND (t, 0);
8273 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
8274 if (TREE_CODE (t) == INDIRECT_REF
8275 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
8277 t = TREE_OPERAND (t, 0);
8279 if (TREE_TYPE (t) != ptrtype)
8281 t = build1 (NOP_EXPR, ptrtype, t);
8282 SET_EXPR_LOCATION (t, loc);
8285 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8287 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8289 if (TREE_TYPE (t) != ptrtype)
8290 t = fold_convert_loc (loc, ptrtype, t);
8292 else
8294 t = build1 (ADDR_EXPR, ptrtype, t);
8295 SET_EXPR_LOCATION (t, loc);
8298 return t;
8301 /* Build an expression for the address of T. */
8303 tree
8304 build_fold_addr_expr_loc (location_t loc, tree t)
8306 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8308 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8311 /* Fold a unary expression of code CODE and type TYPE with operand
8312 OP0. Return the folded expression if folding is successful.
8313 Otherwise, return NULL_TREE. */
8315 tree
8316 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8318 tree tem;
8319 tree arg0;
8320 enum tree_code_class kind = TREE_CODE_CLASS (code);
8322 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8323 && TREE_CODE_LENGTH (code) == 1);
8325 arg0 = op0;
8326 if (arg0)
8328 if (CONVERT_EXPR_CODE_P (code)
8329 || code == FLOAT_EXPR || code == ABS_EXPR)
8331 /* Don't use STRIP_NOPS, because signedness of argument type
8332 matters. */
8333 STRIP_SIGN_NOPS (arg0);
8335 else
8337 /* Strip any conversions that don't change the mode. This
8338 is safe for every expression, except for a comparison
8339 expression because its signedness is derived from its
8340 operands.
8342 Note that this is done as an internal manipulation within
8343 the constant folder, in order to find the simplest
8344 representation of the arguments so that their form can be
8345 studied. In any cases, the appropriate type conversions
8346 should be put back in the tree that will get out of the
8347 constant folder. */
8348 STRIP_NOPS (arg0);
8352 if (TREE_CODE_CLASS (code) == tcc_unary)
8354 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8355 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8356 fold_build1_loc (loc, code, type,
8357 fold_convert_loc (loc, TREE_TYPE (op0),
8358 TREE_OPERAND (arg0, 1))));
8359 else if (TREE_CODE (arg0) == COND_EXPR)
8361 tree arg01 = TREE_OPERAND (arg0, 1);
8362 tree arg02 = TREE_OPERAND (arg0, 2);
8363 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8364 arg01 = fold_build1_loc (loc, code, type,
8365 fold_convert_loc (loc,
8366 TREE_TYPE (op0), arg01));
8367 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8368 arg02 = fold_build1_loc (loc, code, type,
8369 fold_convert_loc (loc,
8370 TREE_TYPE (op0), arg02));
8371 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8372 arg01, arg02);
8374 /* If this was a conversion, and all we did was to move into
8375 inside the COND_EXPR, bring it back out. But leave it if
8376 it is a conversion from integer to integer and the
8377 result precision is no wider than a word since such a
8378 conversion is cheap and may be optimized away by combine,
8379 while it couldn't if it were outside the COND_EXPR. Then return
8380 so we don't get into an infinite recursion loop taking the
8381 conversion out and then back in. */
8383 if ((CONVERT_EXPR_CODE_P (code)
8384 || code == NON_LVALUE_EXPR)
8385 && TREE_CODE (tem) == COND_EXPR
8386 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8387 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8388 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8389 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8390 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8391 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8392 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8393 && (INTEGRAL_TYPE_P
8394 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8395 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8396 || flag_syntax_only))
8398 tem = build1 (code, type,
8399 build3 (COND_EXPR,
8400 TREE_TYPE (TREE_OPERAND
8401 (TREE_OPERAND (tem, 1), 0)),
8402 TREE_OPERAND (tem, 0),
8403 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8404 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8405 SET_EXPR_LOCATION (tem, loc);
8407 return tem;
8409 else if (COMPARISON_CLASS_P (arg0))
8411 if (TREE_CODE (type) == BOOLEAN_TYPE)
8413 arg0 = copy_node (arg0);
8414 TREE_TYPE (arg0) = type;
8415 return arg0;
8417 else if (TREE_CODE (type) != INTEGER_TYPE)
8418 return fold_build3_loc (loc, COND_EXPR, type, arg0,
8419 fold_build1_loc (loc, code, type,
8420 integer_one_node),
8421 fold_build1_loc (loc, code, type,
8422 integer_zero_node));
8426 switch (code)
8428 case PAREN_EXPR:
8429 /* Re-association barriers around constants and other re-association
8430 barriers can be removed. */
8431 if (CONSTANT_CLASS_P (op0)
8432 || TREE_CODE (op0) == PAREN_EXPR)
8433 return fold_convert_loc (loc, type, op0);
8434 return NULL_TREE;
8436 CASE_CONVERT:
8437 case FLOAT_EXPR:
8438 case FIX_TRUNC_EXPR:
8439 if (TREE_TYPE (op0) == type)
8440 return op0;
8442 /* If we have (type) (a CMP b) and type is an integral type, return
8443 new expression involving the new type. */
8444 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8445 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8446 TREE_OPERAND (op0, 1));
8448 /* Handle cases of two conversions in a row. */
8449 if (CONVERT_EXPR_P (op0))
8451 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8452 tree inter_type = TREE_TYPE (op0);
8453 int inside_int = INTEGRAL_TYPE_P (inside_type);
8454 int inside_ptr = POINTER_TYPE_P (inside_type);
8455 int inside_float = FLOAT_TYPE_P (inside_type);
8456 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8457 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8458 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8459 int inter_int = INTEGRAL_TYPE_P (inter_type);
8460 int inter_ptr = POINTER_TYPE_P (inter_type);
8461 int inter_float = FLOAT_TYPE_P (inter_type);
8462 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8463 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8464 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8465 int final_int = INTEGRAL_TYPE_P (type);
8466 int final_ptr = POINTER_TYPE_P (type);
8467 int final_float = FLOAT_TYPE_P (type);
8468 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8469 unsigned int final_prec = TYPE_PRECISION (type);
8470 int final_unsignedp = TYPE_UNSIGNED (type);
8472 /* In addition to the cases of two conversions in a row
8473 handled below, if we are converting something to its own
8474 type via an object of identical or wider precision, neither
8475 conversion is needed. */
8476 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8477 && (((inter_int || inter_ptr) && final_int)
8478 || (inter_float && final_float))
8479 && inter_prec >= final_prec)
8480 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8482 /* Likewise, if the intermediate and initial types are either both
8483 float or both integer, we don't need the middle conversion if the
8484 former is wider than the latter and doesn't change the signedness
8485 (for integers). Avoid this if the final type is a pointer since
8486 then we sometimes need the middle conversion. Likewise if the
8487 final type has a precision not equal to the size of its mode. */
8488 if (((inter_int && inside_int)
8489 || (inter_float && inside_float)
8490 || (inter_vec && inside_vec))
8491 && inter_prec >= inside_prec
8492 && (inter_float || inter_vec
8493 || inter_unsignedp == inside_unsignedp)
8494 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8495 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8496 && ! final_ptr
8497 && (! final_vec || inter_prec == inside_prec))
8498 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8500 /* If we have a sign-extension of a zero-extended value, we can
8501 replace that by a single zero-extension. */
8502 if (inside_int && inter_int && final_int
8503 && inside_prec < inter_prec && inter_prec < final_prec
8504 && inside_unsignedp && !inter_unsignedp)
8505 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8507 /* Two conversions in a row are not needed unless:
8508 - some conversion is floating-point (overstrict for now), or
8509 - some conversion is a vector (overstrict for now), or
8510 - the intermediate type is narrower than both initial and
8511 final, or
8512 - the intermediate type and innermost type differ in signedness,
8513 and the outermost type is wider than the intermediate, or
8514 - the initial type is a pointer type and the precisions of the
8515 intermediate and final types differ, or
8516 - the final type is a pointer type and the precisions of the
8517 initial and intermediate types differ. */
8518 if (! inside_float && ! inter_float && ! final_float
8519 && ! inside_vec && ! inter_vec && ! final_vec
8520 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8521 && ! (inside_int && inter_int
8522 && inter_unsignedp != inside_unsignedp
8523 && inter_prec < final_prec)
8524 && ((inter_unsignedp && inter_prec > inside_prec)
8525 == (final_unsignedp && final_prec > inter_prec))
8526 && ! (inside_ptr && inter_prec != final_prec)
8527 && ! (final_ptr && inside_prec != inter_prec)
8528 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8529 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8530 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8533 /* Handle (T *)&A.B.C for A being of type T and B and C
8534 living at offset zero. This occurs frequently in
8535 C++ upcasting and then accessing the base. */
8536 if (TREE_CODE (op0) == ADDR_EXPR
8537 && POINTER_TYPE_P (type)
8538 && handled_component_p (TREE_OPERAND (op0, 0)))
8540 HOST_WIDE_INT bitsize, bitpos;
8541 tree offset;
8542 enum machine_mode mode;
8543 int unsignedp, volatilep;
8544 tree base = TREE_OPERAND (op0, 0);
8545 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8546 &mode, &unsignedp, &volatilep, false);
8547 /* If the reference was to a (constant) zero offset, we can use
8548 the address of the base if it has the same base type
8549 as the result type. */
8550 if (! offset && bitpos == 0
8551 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8552 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8553 return fold_convert_loc (loc, type,
8554 build_fold_addr_expr_loc (loc, base));
8557 if (TREE_CODE (op0) == MODIFY_EXPR
8558 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8559 /* Detect assigning a bitfield. */
8560 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8561 && DECL_BIT_FIELD
8562 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8564 /* Don't leave an assignment inside a conversion
8565 unless assigning a bitfield. */
8566 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8567 /* First do the assignment, then return converted constant. */
8568 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8569 TREE_NO_WARNING (tem) = 1;
8570 TREE_USED (tem) = 1;
8571 SET_EXPR_LOCATION (tem, loc);
8572 return tem;
8575 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8576 constants (if x has signed type, the sign bit cannot be set
8577 in c). This folds extension into the BIT_AND_EXPR.
8578 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8579 very likely don't have maximal range for their precision and this
8580 transformation effectively doesn't preserve non-maximal ranges. */
8581 if (TREE_CODE (type) == INTEGER_TYPE
8582 && TREE_CODE (op0) == BIT_AND_EXPR
8583 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8585 tree and_expr = op0;
8586 tree and0 = TREE_OPERAND (and_expr, 0);
8587 tree and1 = TREE_OPERAND (and_expr, 1);
8588 int change = 0;
8590 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8591 || (TYPE_PRECISION (type)
8592 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8593 change = 1;
8594 else if (TYPE_PRECISION (TREE_TYPE (and1))
8595 <= HOST_BITS_PER_WIDE_INT
8596 && host_integerp (and1, 1))
8598 unsigned HOST_WIDE_INT cst;
8600 cst = tree_low_cst (and1, 1);
8601 cst &= (HOST_WIDE_INT) -1
8602 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8603 change = (cst == 0);
8604 #ifdef LOAD_EXTEND_OP
8605 if (change
8606 && !flag_syntax_only
8607 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8608 == ZERO_EXTEND))
8610 tree uns = unsigned_type_for (TREE_TYPE (and0));
8611 and0 = fold_convert_loc (loc, uns, and0);
8612 and1 = fold_convert_loc (loc, uns, and1);
8614 #endif
8616 if (change)
8618 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8619 TREE_INT_CST_HIGH (and1), 0,
8620 TREE_OVERFLOW (and1));
8621 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8622 fold_convert_loc (loc, type, and0), tem);
8626 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8627 when one of the new casts will fold away. Conservatively we assume
8628 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8629 if (POINTER_TYPE_P (type)
8630 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8631 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8632 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8633 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8635 tree arg00 = TREE_OPERAND (arg0, 0);
8636 tree arg01 = TREE_OPERAND (arg0, 1);
8638 return fold_build2_loc (loc,
8639 TREE_CODE (arg0), type,
8640 fold_convert_loc (loc, type, arg00),
8641 fold_convert_loc (loc, sizetype, arg01));
8644 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8645 of the same precision, and X is an integer type not narrower than
8646 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8647 if (INTEGRAL_TYPE_P (type)
8648 && TREE_CODE (op0) == BIT_NOT_EXPR
8649 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8650 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8651 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8653 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8654 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8655 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8656 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8657 fold_convert_loc (loc, type, tem));
8660 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8661 type of X and Y (integer types only). */
8662 if (INTEGRAL_TYPE_P (type)
8663 && TREE_CODE (op0) == MULT_EXPR
8664 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8665 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8667 /* Be careful not to introduce new overflows. */
8668 tree mult_type;
8669 if (TYPE_OVERFLOW_WRAPS (type))
8670 mult_type = type;
8671 else
8672 mult_type = unsigned_type_for (type);
8674 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8676 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8677 fold_convert_loc (loc, mult_type,
8678 TREE_OPERAND (op0, 0)),
8679 fold_convert_loc (loc, mult_type,
8680 TREE_OPERAND (op0, 1)));
8681 return fold_convert_loc (loc, type, tem);
8685 tem = fold_convert_const (code, type, op0);
8686 return tem ? tem : NULL_TREE;
8688 case ADDR_SPACE_CONVERT_EXPR:
8689 if (integer_zerop (arg0))
8690 return fold_convert_const (code, type, arg0);
8691 return NULL_TREE;
8693 case FIXED_CONVERT_EXPR:
8694 tem = fold_convert_const (code, type, arg0);
8695 return tem ? tem : NULL_TREE;
8697 case VIEW_CONVERT_EXPR:
8698 if (TREE_TYPE (op0) == type)
8699 return op0;
8700 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8701 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8702 type, TREE_OPERAND (op0, 0));
8704 /* For integral conversions with the same precision or pointer
8705 conversions use a NOP_EXPR instead. */
8706 if ((INTEGRAL_TYPE_P (type)
8707 || POINTER_TYPE_P (type))
8708 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8709 || POINTER_TYPE_P (TREE_TYPE (op0)))
8710 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8711 return fold_convert_loc (loc, type, op0);
8713 /* Strip inner integral conversions that do not change the precision. */
8714 if (CONVERT_EXPR_P (op0)
8715 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8716 || POINTER_TYPE_P (TREE_TYPE (op0)))
8717 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8718 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8719 && (TYPE_PRECISION (TREE_TYPE (op0))
8720 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8721 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8722 type, TREE_OPERAND (op0, 0));
8724 return fold_view_convert_expr (type, op0);
8726 case NEGATE_EXPR:
8727 tem = fold_negate_expr (loc, arg0);
8728 if (tem)
8729 return fold_convert_loc (loc, type, tem);
8730 return NULL_TREE;
8732 case ABS_EXPR:
8733 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8734 return fold_abs_const (arg0, type);
8735 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8736 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8737 /* Convert fabs((double)float) into (double)fabsf(float). */
8738 else if (TREE_CODE (arg0) == NOP_EXPR
8739 && TREE_CODE (type) == REAL_TYPE)
8741 tree targ0 = strip_float_extensions (arg0);
8742 if (targ0 != arg0)
8743 return fold_convert_loc (loc, type,
8744 fold_build1_loc (loc, ABS_EXPR,
8745 TREE_TYPE (targ0),
8746 targ0));
8748 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8749 else if (TREE_CODE (arg0) == ABS_EXPR)
8750 return arg0;
8751 else if (tree_expr_nonnegative_p (arg0))
8752 return arg0;
8754 /* Strip sign ops from argument. */
8755 if (TREE_CODE (type) == REAL_TYPE)
8757 tem = fold_strip_sign_ops (arg0);
8758 if (tem)
8759 return fold_build1_loc (loc, ABS_EXPR, type,
8760 fold_convert_loc (loc, type, tem));
8762 return NULL_TREE;
8764 case CONJ_EXPR:
8765 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8766 return fold_convert_loc (loc, type, arg0);
8767 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8769 tree itype = TREE_TYPE (type);
8770 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8771 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8772 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8773 negate_expr (ipart));
8775 if (TREE_CODE (arg0) == COMPLEX_CST)
8777 tree itype = TREE_TYPE (type);
8778 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8779 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8780 return build_complex (type, rpart, negate_expr (ipart));
8782 if (TREE_CODE (arg0) == CONJ_EXPR)
8783 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8784 return NULL_TREE;
8786 case BIT_NOT_EXPR:
8787 if (TREE_CODE (arg0) == INTEGER_CST)
8788 return fold_not_const (arg0, type);
8789 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8790 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8791 /* Convert ~ (-A) to A - 1. */
8792 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8793 return fold_build2_loc (loc, MINUS_EXPR, type,
8794 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8795 build_int_cst (type, 1));
8796 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8797 else if (INTEGRAL_TYPE_P (type)
8798 && ((TREE_CODE (arg0) == MINUS_EXPR
8799 && integer_onep (TREE_OPERAND (arg0, 1)))
8800 || (TREE_CODE (arg0) == PLUS_EXPR
8801 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8802 return fold_build1_loc (loc, NEGATE_EXPR, type,
8803 fold_convert_loc (loc, type,
8804 TREE_OPERAND (arg0, 0)));
8805 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8806 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8807 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8808 fold_convert_loc (loc, type,
8809 TREE_OPERAND (arg0, 0)))))
8810 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8811 fold_convert_loc (loc, type,
8812 TREE_OPERAND (arg0, 1)));
8813 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8814 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8815 fold_convert_loc (loc, type,
8816 TREE_OPERAND (arg0, 1)))))
8817 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8818 fold_convert_loc (loc, type,
8819 TREE_OPERAND (arg0, 0)), tem);
8820 /* Perform BIT_NOT_EXPR on each element individually. */
8821 else if (TREE_CODE (arg0) == VECTOR_CST)
8823 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8824 int count = TYPE_VECTOR_SUBPARTS (type), i;
8826 for (i = 0; i < count; i++)
8828 if (elements)
8830 elem = TREE_VALUE (elements);
8831 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8832 if (elem == NULL_TREE)
8833 break;
8834 elements = TREE_CHAIN (elements);
8836 else
8837 elem = build_int_cst (TREE_TYPE (type), -1);
8838 list = tree_cons (NULL_TREE, elem, list);
8840 if (i == count)
8841 return build_vector (type, nreverse (list));
8844 return NULL_TREE;
8846 case TRUTH_NOT_EXPR:
8847 /* The argument to invert_truthvalue must have Boolean type. */
8848 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8849 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8851 /* Note that the operand of this must be an int
8852 and its values must be 0 or 1.
8853 ("true" is a fixed value perhaps depending on the language,
8854 but we don't handle values other than 1 correctly yet.) */
8855 tem = fold_truth_not_expr (loc, arg0);
8856 if (!tem)
8857 return NULL_TREE;
8858 return fold_convert_loc (loc, type, tem);
8860 case REALPART_EXPR:
8861 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8862 return fold_convert_loc (loc, type, arg0);
8863 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8864 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8865 TREE_OPERAND (arg0, 1));
8866 if (TREE_CODE (arg0) == COMPLEX_CST)
8867 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8868 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8870 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8871 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8872 fold_build1_loc (loc, REALPART_EXPR, itype,
8873 TREE_OPERAND (arg0, 0)),
8874 fold_build1_loc (loc, REALPART_EXPR, itype,
8875 TREE_OPERAND (arg0, 1)));
8876 return fold_convert_loc (loc, type, tem);
8878 if (TREE_CODE (arg0) == CONJ_EXPR)
8880 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8881 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8882 TREE_OPERAND (arg0, 0));
8883 return fold_convert_loc (loc, type, tem);
8885 if (TREE_CODE (arg0) == CALL_EXPR)
8887 tree fn = get_callee_fndecl (arg0);
8888 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8889 switch (DECL_FUNCTION_CODE (fn))
8891 CASE_FLT_FN (BUILT_IN_CEXPI):
8892 fn = mathfn_built_in (type, BUILT_IN_COS);
8893 if (fn)
8894 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8895 break;
8897 default:
8898 break;
8901 return NULL_TREE;
8903 case IMAGPART_EXPR:
8904 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8905 return fold_convert_loc (loc, type, integer_zero_node);
8906 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8907 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8908 TREE_OPERAND (arg0, 0));
8909 if (TREE_CODE (arg0) == COMPLEX_CST)
8910 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8911 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8913 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8914 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8915 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8916 TREE_OPERAND (arg0, 0)),
8917 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8918 TREE_OPERAND (arg0, 1)));
8919 return fold_convert_loc (loc, type, tem);
8921 if (TREE_CODE (arg0) == CONJ_EXPR)
8923 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8924 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8925 return fold_convert_loc (loc, type, negate_expr (tem));
8927 if (TREE_CODE (arg0) == CALL_EXPR)
8929 tree fn = get_callee_fndecl (arg0);
8930 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8931 switch (DECL_FUNCTION_CODE (fn))
8933 CASE_FLT_FN (BUILT_IN_CEXPI):
8934 fn = mathfn_built_in (type, BUILT_IN_SIN);
8935 if (fn)
8936 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8937 break;
8939 default:
8940 break;
8943 return NULL_TREE;
8945 default:
8946 return NULL_TREE;
8947 } /* switch (code) */
8951 /* If the operation was a conversion do _not_ mark a resulting constant
8952 with TREE_OVERFLOW if the original constant was not. These conversions
8953 have implementation defined behavior and retaining the TREE_OVERFLOW
8954 flag here would confuse later passes such as VRP. */
8955 tree
8956 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8957 tree type, tree op0)
8959 tree res = fold_unary_loc (loc, code, type, op0);
8960 if (res
8961 && TREE_CODE (res) == INTEGER_CST
8962 && TREE_CODE (op0) == INTEGER_CST
8963 && CONVERT_EXPR_CODE_P (code))
8964 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8966 return res;
8969 /* Fold a binary expression of code CODE and type TYPE with operands
8970 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8971 Return the folded expression if folding is successful. Otherwise,
8972 return NULL_TREE. */
8974 static tree
8975 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8977 enum tree_code compl_code;
8979 if (code == MIN_EXPR)
8980 compl_code = MAX_EXPR;
8981 else if (code == MAX_EXPR)
8982 compl_code = MIN_EXPR;
8983 else
8984 gcc_unreachable ();
8986 /* MIN (MAX (a, b), b) == b. */
8987 if (TREE_CODE (op0) == compl_code
8988 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8989 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8991 /* MIN (MAX (b, a), b) == b. */
8992 if (TREE_CODE (op0) == compl_code
8993 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8994 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8995 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8997 /* MIN (a, MAX (a, b)) == a. */
8998 if (TREE_CODE (op1) == compl_code
8999 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
9000 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
9001 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
9003 /* MIN (a, MAX (b, a)) == a. */
9004 if (TREE_CODE (op1) == compl_code
9005 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
9006 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
9007 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
9009 return NULL_TREE;
9012 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9013 by changing CODE to reduce the magnitude of constants involved in
9014 ARG0 of the comparison.
9015 Returns a canonicalized comparison tree if a simplification was
9016 possible, otherwise returns NULL_TREE.
9017 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9018 valid if signed overflow is undefined. */
9020 static tree
9021 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9022 tree arg0, tree arg1,
9023 bool *strict_overflow_p)
9025 enum tree_code code0 = TREE_CODE (arg0);
9026 tree t, cst0 = NULL_TREE;
9027 int sgn0;
9028 bool swap = false;
9030 /* Match A +- CST code arg1 and CST code arg1. We can change the
9031 first form only if overflow is undefined. */
9032 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9033 /* In principle pointers also have undefined overflow behavior,
9034 but that causes problems elsewhere. */
9035 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9036 && (code0 == MINUS_EXPR
9037 || code0 == PLUS_EXPR)
9038 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9039 || code0 == INTEGER_CST))
9040 return NULL_TREE;
9042 /* Identify the constant in arg0 and its sign. */
9043 if (code0 == INTEGER_CST)
9044 cst0 = arg0;
9045 else
9046 cst0 = TREE_OPERAND (arg0, 1);
9047 sgn0 = tree_int_cst_sgn (cst0);
9049 /* Overflowed constants and zero will cause problems. */
9050 if (integer_zerop (cst0)
9051 || TREE_OVERFLOW (cst0))
9052 return NULL_TREE;
9054 /* See if we can reduce the magnitude of the constant in
9055 arg0 by changing the comparison code. */
9056 if (code0 == INTEGER_CST)
9058 /* CST <= arg1 -> CST-1 < arg1. */
9059 if (code == LE_EXPR && sgn0 == 1)
9060 code = LT_EXPR;
9061 /* -CST < arg1 -> -CST-1 <= arg1. */
9062 else if (code == LT_EXPR && sgn0 == -1)
9063 code = LE_EXPR;
9064 /* CST > arg1 -> CST-1 >= arg1. */
9065 else if (code == GT_EXPR && sgn0 == 1)
9066 code = GE_EXPR;
9067 /* -CST >= arg1 -> -CST-1 > arg1. */
9068 else if (code == GE_EXPR && sgn0 == -1)
9069 code = GT_EXPR;
9070 else
9071 return NULL_TREE;
9072 /* arg1 code' CST' might be more canonical. */
9073 swap = true;
9075 else
9077 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9078 if (code == LT_EXPR
9079 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9080 code = LE_EXPR;
9081 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9082 else if (code == GT_EXPR
9083 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9084 code = GE_EXPR;
9085 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9086 else if (code == LE_EXPR
9087 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9088 code = LT_EXPR;
9089 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9090 else if (code == GE_EXPR
9091 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9092 code = GT_EXPR;
9093 else
9094 return NULL_TREE;
9095 *strict_overflow_p = true;
9098 /* Now build the constant reduced in magnitude. But not if that
9099 would produce one outside of its types range. */
9100 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9101 && ((sgn0 == 1
9102 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9103 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9104 || (sgn0 == -1
9105 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9106 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9107 /* We cannot swap the comparison here as that would cause us to
9108 endlessly recurse. */
9109 return NULL_TREE;
9111 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9112 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
9113 if (code0 != INTEGER_CST)
9114 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9116 /* If swapping might yield to a more canonical form, do so. */
9117 if (swap)
9118 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
9119 else
9120 return fold_build2_loc (loc, code, type, t, arg1);
9123 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9124 overflow further. Try to decrease the magnitude of constants involved
9125 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9126 and put sole constants at the second argument position.
9127 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9129 static tree
9130 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9131 tree arg0, tree arg1)
9133 tree t;
9134 bool strict_overflow_p;
9135 const char * const warnmsg = G_("assuming signed overflow does not occur "
9136 "when reducing constant in comparison");
9138 /* Try canonicalization by simplifying arg0. */
9139 strict_overflow_p = false;
9140 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9141 &strict_overflow_p);
9142 if (t)
9144 if (strict_overflow_p)
9145 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9146 return t;
9149 /* Try canonicalization by simplifying arg1 using the swapped
9150 comparison. */
9151 code = swap_tree_comparison (code);
9152 strict_overflow_p = false;
9153 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9154 &strict_overflow_p);
9155 if (t && strict_overflow_p)
9156 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9157 return t;
9160 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9161 space. This is used to avoid issuing overflow warnings for
9162 expressions like &p->x which can not wrap. */
9164 static bool
9165 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
9167 unsigned HOST_WIDE_INT offset_low, total_low;
9168 HOST_WIDE_INT size, offset_high, total_high;
9170 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9171 return true;
9173 if (bitpos < 0)
9174 return true;
9176 if (offset == NULL_TREE)
9178 offset_low = 0;
9179 offset_high = 0;
9181 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
9182 return true;
9183 else
9185 offset_low = TREE_INT_CST_LOW (offset);
9186 offset_high = TREE_INT_CST_HIGH (offset);
9189 if (add_double_with_sign (offset_low, offset_high,
9190 bitpos / BITS_PER_UNIT, 0,
9191 &total_low, &total_high,
9192 true))
9193 return true;
9195 if (total_high != 0)
9196 return true;
9198 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
9199 if (size <= 0)
9200 return true;
9202 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9203 array. */
9204 if (TREE_CODE (base) == ADDR_EXPR)
9206 HOST_WIDE_INT base_size;
9208 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
9209 if (base_size > 0 && size < base_size)
9210 size = base_size;
9213 return total_low > (unsigned HOST_WIDE_INT) size;
9216 /* Subroutine of fold_binary. This routine performs all of the
9217 transformations that are common to the equality/inequality
9218 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9219 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9220 fold_binary should call fold_binary. Fold a comparison with
9221 tree code CODE and type TYPE with operands OP0 and OP1. Return
9222 the folded comparison or NULL_TREE. */
9224 static tree
9225 fold_comparison (location_t loc, enum tree_code code, tree type,
9226 tree op0, tree op1)
9228 tree arg0, arg1, tem;
9230 arg0 = op0;
9231 arg1 = op1;
9233 STRIP_SIGN_NOPS (arg0);
9234 STRIP_SIGN_NOPS (arg1);
9236 tem = fold_relational_const (code, type, arg0, arg1);
9237 if (tem != NULL_TREE)
9238 return tem;
9240 /* If one arg is a real or integer constant, put it last. */
9241 if (tree_swap_operands_p (arg0, arg1, true))
9242 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9244 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9245 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9246 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9247 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9248 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
9249 && (TREE_CODE (arg1) == INTEGER_CST
9250 && !TREE_OVERFLOW (arg1)))
9252 tree const1 = TREE_OPERAND (arg0, 1);
9253 tree const2 = arg1;
9254 tree variable = TREE_OPERAND (arg0, 0);
9255 tree lhs;
9256 int lhs_add;
9257 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9259 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9260 TREE_TYPE (arg1), const2, const1);
9262 /* If the constant operation overflowed this can be
9263 simplified as a comparison against INT_MAX/INT_MIN. */
9264 if (TREE_CODE (lhs) == INTEGER_CST
9265 && TREE_OVERFLOW (lhs))
9267 int const1_sgn = tree_int_cst_sgn (const1);
9268 enum tree_code code2 = code;
9270 /* Get the sign of the constant on the lhs if the
9271 operation were VARIABLE + CONST1. */
9272 if (TREE_CODE (arg0) == MINUS_EXPR)
9273 const1_sgn = -const1_sgn;
9275 /* The sign of the constant determines if we overflowed
9276 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9277 Canonicalize to the INT_MIN overflow by swapping the comparison
9278 if necessary. */
9279 if (const1_sgn == -1)
9280 code2 = swap_tree_comparison (code);
9282 /* We now can look at the canonicalized case
9283 VARIABLE + 1 CODE2 INT_MIN
9284 and decide on the result. */
9285 if (code2 == LT_EXPR
9286 || code2 == LE_EXPR
9287 || code2 == EQ_EXPR)
9288 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9289 else if (code2 == NE_EXPR
9290 || code2 == GE_EXPR
9291 || code2 == GT_EXPR)
9292 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9295 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9296 && (TREE_CODE (lhs) != INTEGER_CST
9297 || !TREE_OVERFLOW (lhs)))
9299 fold_overflow_warning (("assuming signed overflow does not occur "
9300 "when changing X +- C1 cmp C2 to "
9301 "X cmp C1 +- C2"),
9302 WARN_STRICT_OVERFLOW_COMPARISON);
9303 return fold_build2_loc (loc, code, type, variable, lhs);
9307 /* For comparisons of pointers we can decompose it to a compile time
9308 comparison of the base objects and the offsets into the object.
9309 This requires at least one operand being an ADDR_EXPR or a
9310 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9311 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9312 && (TREE_CODE (arg0) == ADDR_EXPR
9313 || TREE_CODE (arg1) == ADDR_EXPR
9314 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9315 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9317 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9318 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9319 enum machine_mode mode;
9320 int volatilep, unsignedp;
9321 bool indirect_base0 = false, indirect_base1 = false;
9323 /* Get base and offset for the access. Strip ADDR_EXPR for
9324 get_inner_reference, but put it back by stripping INDIRECT_REF
9325 off the base object if possible. indirect_baseN will be true
9326 if baseN is not an address but refers to the object itself. */
9327 base0 = arg0;
9328 if (TREE_CODE (arg0) == ADDR_EXPR)
9330 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9331 &bitsize, &bitpos0, &offset0, &mode,
9332 &unsignedp, &volatilep, false);
9333 if (TREE_CODE (base0) == INDIRECT_REF)
9334 base0 = TREE_OPERAND (base0, 0);
9335 else
9336 indirect_base0 = true;
9338 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9340 base0 = TREE_OPERAND (arg0, 0);
9341 offset0 = TREE_OPERAND (arg0, 1);
9344 base1 = arg1;
9345 if (TREE_CODE (arg1) == ADDR_EXPR)
9347 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9348 &bitsize, &bitpos1, &offset1, &mode,
9349 &unsignedp, &volatilep, false);
9350 if (TREE_CODE (base1) == INDIRECT_REF)
9351 base1 = TREE_OPERAND (base1, 0);
9352 else
9353 indirect_base1 = true;
9355 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9357 base1 = TREE_OPERAND (arg1, 0);
9358 offset1 = TREE_OPERAND (arg1, 1);
9361 /* If we have equivalent bases we might be able to simplify. */
9362 if (indirect_base0 == indirect_base1
9363 && operand_equal_p (base0, base1, 0))
9365 /* We can fold this expression to a constant if the non-constant
9366 offset parts are equal. */
9367 if ((offset0 == offset1
9368 || (offset0 && offset1
9369 && operand_equal_p (offset0, offset1, 0)))
9370 && (code == EQ_EXPR
9371 || code == NE_EXPR
9372 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9375 if (code != EQ_EXPR
9376 && code != NE_EXPR
9377 && bitpos0 != bitpos1
9378 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9379 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9380 fold_overflow_warning (("assuming pointer wraparound does not "
9381 "occur when comparing P +- C1 with "
9382 "P +- C2"),
9383 WARN_STRICT_OVERFLOW_CONDITIONAL);
9385 switch (code)
9387 case EQ_EXPR:
9388 return constant_boolean_node (bitpos0 == bitpos1, type);
9389 case NE_EXPR:
9390 return constant_boolean_node (bitpos0 != bitpos1, type);
9391 case LT_EXPR:
9392 return constant_boolean_node (bitpos0 < bitpos1, type);
9393 case LE_EXPR:
9394 return constant_boolean_node (bitpos0 <= bitpos1, type);
9395 case GE_EXPR:
9396 return constant_boolean_node (bitpos0 >= bitpos1, type);
9397 case GT_EXPR:
9398 return constant_boolean_node (bitpos0 > bitpos1, type);
9399 default:;
9402 /* We can simplify the comparison to a comparison of the variable
9403 offset parts if the constant offset parts are equal.
9404 Be careful to use signed size type here because otherwise we
9405 mess with array offsets in the wrong way. This is possible
9406 because pointer arithmetic is restricted to retain within an
9407 object and overflow on pointer differences is undefined as of
9408 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9409 else if (bitpos0 == bitpos1
9410 && ((code == EQ_EXPR || code == NE_EXPR)
9411 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9413 tree signed_size_type_node;
9414 signed_size_type_node = signed_type_for (size_type_node);
9416 /* By converting to signed size type we cover middle-end pointer
9417 arithmetic which operates on unsigned pointer types of size
9418 type size and ARRAY_REF offsets which are properly sign or
9419 zero extended from their type in case it is narrower than
9420 size type. */
9421 if (offset0 == NULL_TREE)
9422 offset0 = build_int_cst (signed_size_type_node, 0);
9423 else
9424 offset0 = fold_convert_loc (loc, signed_size_type_node,
9425 offset0);
9426 if (offset1 == NULL_TREE)
9427 offset1 = build_int_cst (signed_size_type_node, 0);
9428 else
9429 offset1 = fold_convert_loc (loc, signed_size_type_node,
9430 offset1);
9432 if (code != EQ_EXPR
9433 && code != NE_EXPR
9434 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9435 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9436 fold_overflow_warning (("assuming pointer wraparound does not "
9437 "occur when comparing P +- C1 with "
9438 "P +- C2"),
9439 WARN_STRICT_OVERFLOW_COMPARISON);
9441 return fold_build2_loc (loc, code, type, offset0, offset1);
9444 /* For non-equal bases we can simplify if they are addresses
9445 of local binding decls or constants. */
9446 else if (indirect_base0 && indirect_base1
9447 /* We know that !operand_equal_p (base0, base1, 0)
9448 because the if condition was false. But make
9449 sure two decls are not the same. */
9450 && base0 != base1
9451 && TREE_CODE (arg0) == ADDR_EXPR
9452 && TREE_CODE (arg1) == ADDR_EXPR
9453 && (((TREE_CODE (base0) == VAR_DECL
9454 || TREE_CODE (base0) == PARM_DECL)
9455 && (targetm.binds_local_p (base0)
9456 || CONSTANT_CLASS_P (base1)))
9457 || CONSTANT_CLASS_P (base0))
9458 && (((TREE_CODE (base1) == VAR_DECL
9459 || TREE_CODE (base1) == PARM_DECL)
9460 && (targetm.binds_local_p (base1)
9461 || CONSTANT_CLASS_P (base0)))
9462 || CONSTANT_CLASS_P (base1)))
9464 if (code == EQ_EXPR)
9465 return omit_two_operands_loc (loc, type, boolean_false_node,
9466 arg0, arg1);
9467 else if (code == NE_EXPR)
9468 return omit_two_operands_loc (loc, type, boolean_true_node,
9469 arg0, arg1);
9471 /* For equal offsets we can simplify to a comparison of the
9472 base addresses. */
9473 else if (bitpos0 == bitpos1
9474 && (indirect_base0
9475 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9476 && (indirect_base1
9477 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9478 && ((offset0 == offset1)
9479 || (offset0 && offset1
9480 && operand_equal_p (offset0, offset1, 0))))
9482 if (indirect_base0)
9483 base0 = build_fold_addr_expr_loc (loc, base0);
9484 if (indirect_base1)
9485 base1 = build_fold_addr_expr_loc (loc, base1);
9486 return fold_build2_loc (loc, code, type, base0, base1);
9490 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9491 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9492 the resulting offset is smaller in absolute value than the
9493 original one. */
9494 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9495 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9496 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9497 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9498 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9499 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9500 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9502 tree const1 = TREE_OPERAND (arg0, 1);
9503 tree const2 = TREE_OPERAND (arg1, 1);
9504 tree variable1 = TREE_OPERAND (arg0, 0);
9505 tree variable2 = TREE_OPERAND (arg1, 0);
9506 tree cst;
9507 const char * const warnmsg = G_("assuming signed overflow does not "
9508 "occur when combining constants around "
9509 "a comparison");
9511 /* Put the constant on the side where it doesn't overflow and is
9512 of lower absolute value than before. */
9513 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9514 ? MINUS_EXPR : PLUS_EXPR,
9515 const2, const1, 0);
9516 if (!TREE_OVERFLOW (cst)
9517 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9519 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9520 return fold_build2_loc (loc, code, type,
9521 variable1,
9522 fold_build2_loc (loc,
9523 TREE_CODE (arg1), TREE_TYPE (arg1),
9524 variable2, cst));
9527 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9528 ? MINUS_EXPR : PLUS_EXPR,
9529 const1, const2, 0);
9530 if (!TREE_OVERFLOW (cst)
9531 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9533 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9534 return fold_build2_loc (loc, code, type,
9535 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9536 variable1, cst),
9537 variable2);
9541 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9542 signed arithmetic case. That form is created by the compiler
9543 often enough for folding it to be of value. One example is in
9544 computing loop trip counts after Operator Strength Reduction. */
9545 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9546 && TREE_CODE (arg0) == MULT_EXPR
9547 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9548 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9549 && integer_zerop (arg1))
9551 tree const1 = TREE_OPERAND (arg0, 1);
9552 tree const2 = arg1; /* zero */
9553 tree variable1 = TREE_OPERAND (arg0, 0);
9554 enum tree_code cmp_code = code;
9556 gcc_assert (!integer_zerop (const1));
9558 fold_overflow_warning (("assuming signed overflow does not occur when "
9559 "eliminating multiplication in comparison "
9560 "with zero"),
9561 WARN_STRICT_OVERFLOW_COMPARISON);
9563 /* If const1 is negative we swap the sense of the comparison. */
9564 if (tree_int_cst_sgn (const1) < 0)
9565 cmp_code = swap_tree_comparison (cmp_code);
9567 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9570 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
9571 if (tem)
9572 return tem;
9574 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9576 tree targ0 = strip_float_extensions (arg0);
9577 tree targ1 = strip_float_extensions (arg1);
9578 tree newtype = TREE_TYPE (targ0);
9580 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9581 newtype = TREE_TYPE (targ1);
9583 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9584 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9585 return fold_build2_loc (loc, code, type,
9586 fold_convert_loc (loc, newtype, targ0),
9587 fold_convert_loc (loc, newtype, targ1));
9589 /* (-a) CMP (-b) -> b CMP a */
9590 if (TREE_CODE (arg0) == NEGATE_EXPR
9591 && TREE_CODE (arg1) == NEGATE_EXPR)
9592 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9593 TREE_OPERAND (arg0, 0));
9595 if (TREE_CODE (arg1) == REAL_CST)
9597 REAL_VALUE_TYPE cst;
9598 cst = TREE_REAL_CST (arg1);
9600 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9601 if (TREE_CODE (arg0) == NEGATE_EXPR)
9602 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9603 TREE_OPERAND (arg0, 0),
9604 build_real (TREE_TYPE (arg1),
9605 REAL_VALUE_NEGATE (cst)));
9607 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9608 /* a CMP (-0) -> a CMP 0 */
9609 if (REAL_VALUE_MINUS_ZERO (cst))
9610 return fold_build2_loc (loc, code, type, arg0,
9611 build_real (TREE_TYPE (arg1), dconst0));
9613 /* x != NaN is always true, other ops are always false. */
9614 if (REAL_VALUE_ISNAN (cst)
9615 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9617 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9618 return omit_one_operand_loc (loc, type, tem, arg0);
9621 /* Fold comparisons against infinity. */
9622 if (REAL_VALUE_ISINF (cst)
9623 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9625 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9626 if (tem != NULL_TREE)
9627 return tem;
9631 /* If this is a comparison of a real constant with a PLUS_EXPR
9632 or a MINUS_EXPR of a real constant, we can convert it into a
9633 comparison with a revised real constant as long as no overflow
9634 occurs when unsafe_math_optimizations are enabled. */
9635 if (flag_unsafe_math_optimizations
9636 && TREE_CODE (arg1) == REAL_CST
9637 && (TREE_CODE (arg0) == PLUS_EXPR
9638 || TREE_CODE (arg0) == MINUS_EXPR)
9639 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9640 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9641 ? MINUS_EXPR : PLUS_EXPR,
9642 arg1, TREE_OPERAND (arg0, 1), 0))
9643 && !TREE_OVERFLOW (tem))
9644 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9646 /* Likewise, we can simplify a comparison of a real constant with
9647 a MINUS_EXPR whose first operand is also a real constant, i.e.
9648 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9649 floating-point types only if -fassociative-math is set. */
9650 if (flag_associative_math
9651 && TREE_CODE (arg1) == REAL_CST
9652 && TREE_CODE (arg0) == MINUS_EXPR
9653 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9654 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9655 arg1, 0))
9656 && !TREE_OVERFLOW (tem))
9657 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9658 TREE_OPERAND (arg0, 1), tem);
9660 /* Fold comparisons against built-in math functions. */
9661 if (TREE_CODE (arg1) == REAL_CST
9662 && flag_unsafe_math_optimizations
9663 && ! flag_errno_math)
9665 enum built_in_function fcode = builtin_mathfn_code (arg0);
9667 if (fcode != END_BUILTINS)
9669 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9670 if (tem != NULL_TREE)
9671 return tem;
9676 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9677 && CONVERT_EXPR_P (arg0))
9679 /* If we are widening one operand of an integer comparison,
9680 see if the other operand is similarly being widened. Perhaps we
9681 can do the comparison in the narrower type. */
9682 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9683 if (tem)
9684 return tem;
9686 /* Or if we are changing signedness. */
9687 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9688 if (tem)
9689 return tem;
9692 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9693 constant, we can simplify it. */
9694 if (TREE_CODE (arg1) == INTEGER_CST
9695 && (TREE_CODE (arg0) == MIN_EXPR
9696 || TREE_CODE (arg0) == MAX_EXPR)
9697 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9699 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9700 if (tem)
9701 return tem;
9704 /* Simplify comparison of something with itself. (For IEEE
9705 floating-point, we can only do some of these simplifications.) */
9706 if (operand_equal_p (arg0, arg1, 0))
9708 switch (code)
9710 case EQ_EXPR:
9711 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9712 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9713 return constant_boolean_node (1, type);
9714 break;
9716 case GE_EXPR:
9717 case LE_EXPR:
9718 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9719 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9720 return constant_boolean_node (1, type);
9721 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9723 case NE_EXPR:
9724 /* For NE, we can only do this simplification if integer
9725 or we don't honor IEEE floating point NaNs. */
9726 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9727 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9728 break;
9729 /* ... fall through ... */
9730 case GT_EXPR:
9731 case LT_EXPR:
9732 return constant_boolean_node (0, type);
9733 default:
9734 gcc_unreachable ();
9738 /* If we are comparing an expression that just has comparisons
9739 of two integer values, arithmetic expressions of those comparisons,
9740 and constants, we can simplify it. There are only three cases
9741 to check: the two values can either be equal, the first can be
9742 greater, or the second can be greater. Fold the expression for
9743 those three values. Since each value must be 0 or 1, we have
9744 eight possibilities, each of which corresponds to the constant 0
9745 or 1 or one of the six possible comparisons.
9747 This handles common cases like (a > b) == 0 but also handles
9748 expressions like ((x > y) - (y > x)) > 0, which supposedly
9749 occur in macroized code. */
9751 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9753 tree cval1 = 0, cval2 = 0;
9754 int save_p = 0;
9756 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9757 /* Don't handle degenerate cases here; they should already
9758 have been handled anyway. */
9759 && cval1 != 0 && cval2 != 0
9760 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9761 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9762 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9763 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9764 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9765 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9766 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9768 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9769 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9771 /* We can't just pass T to eval_subst in case cval1 or cval2
9772 was the same as ARG1. */
9774 tree high_result
9775 = fold_build2_loc (loc, code, type,
9776 eval_subst (loc, arg0, cval1, maxval,
9777 cval2, minval),
9778 arg1);
9779 tree equal_result
9780 = fold_build2_loc (loc, code, type,
9781 eval_subst (loc, arg0, cval1, maxval,
9782 cval2, maxval),
9783 arg1);
9784 tree low_result
9785 = fold_build2_loc (loc, code, type,
9786 eval_subst (loc, arg0, cval1, minval,
9787 cval2, maxval),
9788 arg1);
9790 /* All three of these results should be 0 or 1. Confirm they are.
9791 Then use those values to select the proper code to use. */
9793 if (TREE_CODE (high_result) == INTEGER_CST
9794 && TREE_CODE (equal_result) == INTEGER_CST
9795 && TREE_CODE (low_result) == INTEGER_CST)
9797 /* Make a 3-bit mask with the high-order bit being the
9798 value for `>', the next for '=', and the low for '<'. */
9799 switch ((integer_onep (high_result) * 4)
9800 + (integer_onep (equal_result) * 2)
9801 + integer_onep (low_result))
9803 case 0:
9804 /* Always false. */
9805 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9806 case 1:
9807 code = LT_EXPR;
9808 break;
9809 case 2:
9810 code = EQ_EXPR;
9811 break;
9812 case 3:
9813 code = LE_EXPR;
9814 break;
9815 case 4:
9816 code = GT_EXPR;
9817 break;
9818 case 5:
9819 code = NE_EXPR;
9820 break;
9821 case 6:
9822 code = GE_EXPR;
9823 break;
9824 case 7:
9825 /* Always true. */
9826 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9829 if (save_p)
9831 tem = save_expr (build2 (code, type, cval1, cval2));
9832 SET_EXPR_LOCATION (tem, loc);
9833 return tem;
9835 return fold_build2_loc (loc, code, type, cval1, cval2);
9840 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9841 into a single range test. */
9842 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9843 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9844 && TREE_CODE (arg1) == INTEGER_CST
9845 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9846 && !integer_zerop (TREE_OPERAND (arg0, 1))
9847 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9848 && !TREE_OVERFLOW (arg1))
9850 tem = fold_div_compare (loc, code, type, arg0, arg1);
9851 if (tem != NULL_TREE)
9852 return tem;
9855 /* Fold ~X op ~Y as Y op X. */
9856 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9857 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9859 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9860 return fold_build2_loc (loc, code, type,
9861 fold_convert_loc (loc, cmp_type,
9862 TREE_OPERAND (arg1, 0)),
9863 TREE_OPERAND (arg0, 0));
9866 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9867 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9868 && TREE_CODE (arg1) == INTEGER_CST)
9870 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9871 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9872 TREE_OPERAND (arg0, 0),
9873 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9874 fold_convert_loc (loc, cmp_type, arg1)));
9877 return NULL_TREE;
9881 /* Subroutine of fold_binary. Optimize complex multiplications of the
9882 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9883 argument EXPR represents the expression "z" of type TYPE. */
9885 static tree
9886 fold_mult_zconjz (location_t loc, tree type, tree expr)
9888 tree itype = TREE_TYPE (type);
9889 tree rpart, ipart, tem;
9891 if (TREE_CODE (expr) == COMPLEX_EXPR)
9893 rpart = TREE_OPERAND (expr, 0);
9894 ipart = TREE_OPERAND (expr, 1);
9896 else if (TREE_CODE (expr) == COMPLEX_CST)
9898 rpart = TREE_REALPART (expr);
9899 ipart = TREE_IMAGPART (expr);
9901 else
9903 expr = save_expr (expr);
9904 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9905 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9908 rpart = save_expr (rpart);
9909 ipart = save_expr (ipart);
9910 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9911 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9912 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9913 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9914 fold_convert_loc (loc, itype, integer_zero_node));
9918 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9919 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9920 guarantees that P and N have the same least significant log2(M) bits.
9921 N is not otherwise constrained. In particular, N is not normalized to
9922 0 <= N < M as is common. In general, the precise value of P is unknown.
9923 M is chosen as large as possible such that constant N can be determined.
9925 Returns M and sets *RESIDUE to N.
9927 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9928 account. This is not always possible due to PR 35705.
9931 static unsigned HOST_WIDE_INT
9932 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9933 bool allow_func_align)
9935 enum tree_code code;
9937 *residue = 0;
9939 code = TREE_CODE (expr);
9940 if (code == ADDR_EXPR)
9942 expr = TREE_OPERAND (expr, 0);
9943 if (handled_component_p (expr))
9945 HOST_WIDE_INT bitsize, bitpos;
9946 tree offset;
9947 enum machine_mode mode;
9948 int unsignedp, volatilep;
9950 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9951 &mode, &unsignedp, &volatilep, false);
9952 *residue = bitpos / BITS_PER_UNIT;
9953 if (offset)
9955 if (TREE_CODE (offset) == INTEGER_CST)
9956 *residue += TREE_INT_CST_LOW (offset);
9957 else
9958 /* We don't handle more complicated offset expressions. */
9959 return 1;
9963 if (DECL_P (expr)
9964 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9965 return DECL_ALIGN_UNIT (expr);
9967 else if (code == POINTER_PLUS_EXPR)
9969 tree op0, op1;
9970 unsigned HOST_WIDE_INT modulus;
9971 enum tree_code inner_code;
9973 op0 = TREE_OPERAND (expr, 0);
9974 STRIP_NOPS (op0);
9975 modulus = get_pointer_modulus_and_residue (op0, residue,
9976 allow_func_align);
9978 op1 = TREE_OPERAND (expr, 1);
9979 STRIP_NOPS (op1);
9980 inner_code = TREE_CODE (op1);
9981 if (inner_code == INTEGER_CST)
9983 *residue += TREE_INT_CST_LOW (op1);
9984 return modulus;
9986 else if (inner_code == MULT_EXPR)
9988 op1 = TREE_OPERAND (op1, 1);
9989 if (TREE_CODE (op1) == INTEGER_CST)
9991 unsigned HOST_WIDE_INT align;
9993 /* Compute the greatest power-of-2 divisor of op1. */
9994 align = TREE_INT_CST_LOW (op1);
9995 align &= -align;
9997 /* If align is non-zero and less than *modulus, replace
9998 *modulus with align., If align is 0, then either op1 is 0
9999 or the greatest power-of-2 divisor of op1 doesn't fit in an
10000 unsigned HOST_WIDE_INT. In either case, no additional
10001 constraint is imposed. */
10002 if (align)
10003 modulus = MIN (modulus, align);
10005 return modulus;
10010 /* If we get here, we were unable to determine anything useful about the
10011 expression. */
10012 return 1;
10016 /* Fold a binary expression of code CODE and type TYPE with operands
10017 OP0 and OP1. LOC is the location of the resulting expression.
10018 Return the folded expression if folding is successful. Otherwise,
10019 return NULL_TREE. */
10021 tree
10022 fold_binary_loc (location_t loc,
10023 enum tree_code code, tree type, tree op0, tree op1)
10025 enum tree_code_class kind = TREE_CODE_CLASS (code);
10026 tree arg0, arg1, tem;
10027 tree t1 = NULL_TREE;
10028 bool strict_overflow_p;
10030 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10031 && TREE_CODE_LENGTH (code) == 2
10032 && op0 != NULL_TREE
10033 && op1 != NULL_TREE);
10035 arg0 = op0;
10036 arg1 = op1;
10038 /* Strip any conversions that don't change the mode. This is
10039 safe for every expression, except for a comparison expression
10040 because its signedness is derived from its operands. So, in
10041 the latter case, only strip conversions that don't change the
10042 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10043 preserved.
10045 Note that this is done as an internal manipulation within the
10046 constant folder, in order to find the simplest representation
10047 of the arguments so that their form can be studied. In any
10048 cases, the appropriate type conversions should be put back in
10049 the tree that will get out of the constant folder. */
10051 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10053 STRIP_SIGN_NOPS (arg0);
10054 STRIP_SIGN_NOPS (arg1);
10056 else
10058 STRIP_NOPS (arg0);
10059 STRIP_NOPS (arg1);
10062 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10063 constant but we can't do arithmetic on them. */
10064 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10065 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10066 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10067 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10068 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10069 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
10071 if (kind == tcc_binary)
10073 /* Make sure type and arg0 have the same saturating flag. */
10074 gcc_assert (TYPE_SATURATING (type)
10075 == TYPE_SATURATING (TREE_TYPE (arg0)));
10076 tem = const_binop (code, arg0, arg1, 0);
10078 else if (kind == tcc_comparison)
10079 tem = fold_relational_const (code, type, arg0, arg1);
10080 else
10081 tem = NULL_TREE;
10083 if (tem != NULL_TREE)
10085 if (TREE_TYPE (tem) != type)
10086 tem = fold_convert_loc (loc, type, tem);
10087 return tem;
10091 /* If this is a commutative operation, and ARG0 is a constant, move it
10092 to ARG1 to reduce the number of tests below. */
10093 if (commutative_tree_code (code)
10094 && tree_swap_operands_p (arg0, arg1, true))
10095 return fold_build2_loc (loc, code, type, op1, op0);
10097 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10099 First check for cases where an arithmetic operation is applied to a
10100 compound, conditional, or comparison operation. Push the arithmetic
10101 operation inside the compound or conditional to see if any folding
10102 can then be done. Convert comparison to conditional for this purpose.
10103 The also optimizes non-constant cases that used to be done in
10104 expand_expr.
10106 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10107 one of the operands is a comparison and the other is a comparison, a
10108 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10109 code below would make the expression more complex. Change it to a
10110 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10111 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10113 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10114 || code == EQ_EXPR || code == NE_EXPR)
10115 && ((truth_value_p (TREE_CODE (arg0))
10116 && (truth_value_p (TREE_CODE (arg1))
10117 || (TREE_CODE (arg1) == BIT_AND_EXPR
10118 && integer_onep (TREE_OPERAND (arg1, 1)))))
10119 || (truth_value_p (TREE_CODE (arg1))
10120 && (truth_value_p (TREE_CODE (arg0))
10121 || (TREE_CODE (arg0) == BIT_AND_EXPR
10122 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10124 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10125 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10126 : TRUTH_XOR_EXPR,
10127 boolean_type_node,
10128 fold_convert_loc (loc, boolean_type_node, arg0),
10129 fold_convert_loc (loc, boolean_type_node, arg1));
10131 if (code == EQ_EXPR)
10132 tem = invert_truthvalue_loc (loc, tem);
10134 return fold_convert_loc (loc, type, tem);
10137 if (TREE_CODE_CLASS (code) == tcc_binary
10138 || TREE_CODE_CLASS (code) == tcc_comparison)
10140 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10142 tem = fold_build2_loc (loc, code, type,
10143 fold_convert_loc (loc, TREE_TYPE (op0),
10144 TREE_OPERAND (arg0, 1)), op1);
10145 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
10146 goto fold_binary_exit;
10148 if (TREE_CODE (arg1) == COMPOUND_EXPR
10149 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10151 tem = fold_build2_loc (loc, code, type, op0,
10152 fold_convert_loc (loc, TREE_TYPE (op1),
10153 TREE_OPERAND (arg1, 1)));
10154 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
10155 goto fold_binary_exit;
10158 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
10160 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10161 arg0, arg1,
10162 /*cond_first_p=*/1);
10163 if (tem != NULL_TREE)
10164 return tem;
10167 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
10169 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10170 arg1, arg0,
10171 /*cond_first_p=*/0);
10172 if (tem != NULL_TREE)
10173 return tem;
10177 switch (code)
10179 case POINTER_PLUS_EXPR:
10180 /* 0 +p index -> (type)index */
10181 if (integer_zerop (arg0))
10182 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10184 /* PTR +p 0 -> PTR */
10185 if (integer_zerop (arg1))
10186 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10188 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10189 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10190 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10191 return fold_convert_loc (loc, type,
10192 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10193 fold_convert_loc (loc, sizetype,
10194 arg1),
10195 fold_convert_loc (loc, sizetype,
10196 arg0)));
10198 /* index +p PTR -> PTR +p index */
10199 if (POINTER_TYPE_P (TREE_TYPE (arg1))
10200 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10201 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
10202 fold_convert_loc (loc, type, arg1),
10203 fold_convert_loc (loc, sizetype, arg0));
10205 /* (PTR +p B) +p A -> PTR +p (B + A) */
10206 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10208 tree inner;
10209 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10210 tree arg00 = TREE_OPERAND (arg0, 0);
10211 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10212 arg01, fold_convert_loc (loc, sizetype, arg1));
10213 return fold_convert_loc (loc, type,
10214 fold_build2_loc (loc, POINTER_PLUS_EXPR,
10215 TREE_TYPE (arg00),
10216 arg00, inner));
10219 /* PTR_CST +p CST -> CST1 */
10220 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10221 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10222 fold_convert_loc (loc, type, arg1));
10224 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10225 of the array. Loop optimizer sometimes produce this type of
10226 expressions. */
10227 if (TREE_CODE (arg0) == ADDR_EXPR)
10229 tem = try_move_mult_to_index (loc, arg0,
10230 fold_convert_loc (loc, sizetype, arg1));
10231 if (tem)
10232 return fold_convert_loc (loc, type, tem);
10235 return NULL_TREE;
10237 case PLUS_EXPR:
10238 /* A + (-B) -> A - B */
10239 if (TREE_CODE (arg1) == NEGATE_EXPR)
10240 return fold_build2_loc (loc, MINUS_EXPR, type,
10241 fold_convert_loc (loc, type, arg0),
10242 fold_convert_loc (loc, type,
10243 TREE_OPERAND (arg1, 0)));
10244 /* (-A) + B -> B - A */
10245 if (TREE_CODE (arg0) == NEGATE_EXPR
10246 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10247 return fold_build2_loc (loc, MINUS_EXPR, type,
10248 fold_convert_loc (loc, type, arg1),
10249 fold_convert_loc (loc, type,
10250 TREE_OPERAND (arg0, 0)));
10252 if (INTEGRAL_TYPE_P (type))
10254 /* Convert ~A + 1 to -A. */
10255 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10256 && integer_onep (arg1))
10257 return fold_build1_loc (loc, NEGATE_EXPR, type,
10258 fold_convert_loc (loc, type,
10259 TREE_OPERAND (arg0, 0)));
10261 /* ~X + X is -1. */
10262 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10263 && !TYPE_OVERFLOW_TRAPS (type))
10265 tree tem = TREE_OPERAND (arg0, 0);
10267 STRIP_NOPS (tem);
10268 if (operand_equal_p (tem, arg1, 0))
10270 t1 = build_int_cst_type (type, -1);
10271 return omit_one_operand_loc (loc, type, t1, arg1);
10275 /* X + ~X is -1. */
10276 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10277 && !TYPE_OVERFLOW_TRAPS (type))
10279 tree tem = TREE_OPERAND (arg1, 0);
10281 STRIP_NOPS (tem);
10282 if (operand_equal_p (arg0, tem, 0))
10284 t1 = build_int_cst_type (type, -1);
10285 return omit_one_operand_loc (loc, type, t1, arg0);
10289 /* X + (X / CST) * -CST is X % CST. */
10290 if (TREE_CODE (arg1) == MULT_EXPR
10291 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10292 && operand_equal_p (arg0,
10293 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10295 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10296 tree cst1 = TREE_OPERAND (arg1, 1);
10297 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10298 cst1, cst0);
10299 if (sum && integer_zerop (sum))
10300 return fold_convert_loc (loc, type,
10301 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10302 TREE_TYPE (arg0), arg0,
10303 cst0));
10307 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
10308 same or one. Make sure type is not saturating.
10309 fold_plusminus_mult_expr will re-associate. */
10310 if ((TREE_CODE (arg0) == MULT_EXPR
10311 || TREE_CODE (arg1) == MULT_EXPR)
10312 && !TYPE_SATURATING (type)
10313 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10315 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10316 if (tem)
10317 return tem;
10320 if (! FLOAT_TYPE_P (type))
10322 if (integer_zerop (arg1))
10323 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10325 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10326 with a constant, and the two constants have no bits in common,
10327 we should treat this as a BIT_IOR_EXPR since this may produce more
10328 simplifications. */
10329 if (TREE_CODE (arg0) == BIT_AND_EXPR
10330 && TREE_CODE (arg1) == BIT_AND_EXPR
10331 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10332 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10333 && integer_zerop (const_binop (BIT_AND_EXPR,
10334 TREE_OPERAND (arg0, 1),
10335 TREE_OPERAND (arg1, 1), 0)))
10337 code = BIT_IOR_EXPR;
10338 goto bit_ior;
10341 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10342 (plus (plus (mult) (mult)) (foo)) so that we can
10343 take advantage of the factoring cases below. */
10344 if (((TREE_CODE (arg0) == PLUS_EXPR
10345 || TREE_CODE (arg0) == MINUS_EXPR)
10346 && TREE_CODE (arg1) == MULT_EXPR)
10347 || ((TREE_CODE (arg1) == PLUS_EXPR
10348 || TREE_CODE (arg1) == MINUS_EXPR)
10349 && TREE_CODE (arg0) == MULT_EXPR))
10351 tree parg0, parg1, parg, marg;
10352 enum tree_code pcode;
10354 if (TREE_CODE (arg1) == MULT_EXPR)
10355 parg = arg0, marg = arg1;
10356 else
10357 parg = arg1, marg = arg0;
10358 pcode = TREE_CODE (parg);
10359 parg0 = TREE_OPERAND (parg, 0);
10360 parg1 = TREE_OPERAND (parg, 1);
10361 STRIP_NOPS (parg0);
10362 STRIP_NOPS (parg1);
10364 if (TREE_CODE (parg0) == MULT_EXPR
10365 && TREE_CODE (parg1) != MULT_EXPR)
10366 return fold_build2_loc (loc, pcode, type,
10367 fold_build2_loc (loc, PLUS_EXPR, type,
10368 fold_convert_loc (loc, type,
10369 parg0),
10370 fold_convert_loc (loc, type,
10371 marg)),
10372 fold_convert_loc (loc, type, parg1));
10373 if (TREE_CODE (parg0) != MULT_EXPR
10374 && TREE_CODE (parg1) == MULT_EXPR)
10375 return
10376 fold_build2_loc (loc, PLUS_EXPR, type,
10377 fold_convert_loc (loc, type, parg0),
10378 fold_build2_loc (loc, pcode, type,
10379 fold_convert_loc (loc, type, marg),
10380 fold_convert_loc (loc, type,
10381 parg1)));
10384 else
10386 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10387 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10388 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10390 /* Likewise if the operands are reversed. */
10391 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10392 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10394 /* Convert X + -C into X - C. */
10395 if (TREE_CODE (arg1) == REAL_CST
10396 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10398 tem = fold_negate_const (arg1, type);
10399 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10400 return fold_build2_loc (loc, MINUS_EXPR, type,
10401 fold_convert_loc (loc, type, arg0),
10402 fold_convert_loc (loc, type, tem));
10405 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10406 to __complex__ ( x, y ). This is not the same for SNaNs or
10407 if signed zeros are involved. */
10408 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10409 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10410 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10412 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10413 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10414 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10415 bool arg0rz = false, arg0iz = false;
10416 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10417 || (arg0i && (arg0iz = real_zerop (arg0i))))
10419 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10420 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10421 if (arg0rz && arg1i && real_zerop (arg1i))
10423 tree rp = arg1r ? arg1r
10424 : build1 (REALPART_EXPR, rtype, arg1);
10425 tree ip = arg0i ? arg0i
10426 : build1 (IMAGPART_EXPR, rtype, arg0);
10427 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10429 else if (arg0iz && arg1r && real_zerop (arg1r))
10431 tree rp = arg0r ? arg0r
10432 : build1 (REALPART_EXPR, rtype, arg0);
10433 tree ip = arg1i ? arg1i
10434 : build1 (IMAGPART_EXPR, rtype, arg1);
10435 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10440 if (flag_unsafe_math_optimizations
10441 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10442 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10443 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10444 return tem;
10446 /* Convert x+x into x*2.0. */
10447 if (operand_equal_p (arg0, arg1, 0)
10448 && SCALAR_FLOAT_TYPE_P (type))
10449 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10450 build_real (type, dconst2));
10452 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10453 We associate floats only if the user has specified
10454 -fassociative-math. */
10455 if (flag_associative_math
10456 && TREE_CODE (arg1) == PLUS_EXPR
10457 && TREE_CODE (arg0) != MULT_EXPR)
10459 tree tree10 = TREE_OPERAND (arg1, 0);
10460 tree tree11 = TREE_OPERAND (arg1, 1);
10461 if (TREE_CODE (tree11) == MULT_EXPR
10462 && TREE_CODE (tree10) == MULT_EXPR)
10464 tree tree0;
10465 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10466 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10469 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10470 We associate floats only if the user has specified
10471 -fassociative-math. */
10472 if (flag_associative_math
10473 && TREE_CODE (arg0) == PLUS_EXPR
10474 && TREE_CODE (arg1) != MULT_EXPR)
10476 tree tree00 = TREE_OPERAND (arg0, 0);
10477 tree tree01 = TREE_OPERAND (arg0, 1);
10478 if (TREE_CODE (tree01) == MULT_EXPR
10479 && TREE_CODE (tree00) == MULT_EXPR)
10481 tree tree0;
10482 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10483 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10488 bit_rotate:
10489 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10490 is a rotate of A by C1 bits. */
10491 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10492 is a rotate of A by B bits. */
10494 enum tree_code code0, code1;
10495 tree rtype;
10496 code0 = TREE_CODE (arg0);
10497 code1 = TREE_CODE (arg1);
10498 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10499 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10500 && operand_equal_p (TREE_OPERAND (arg0, 0),
10501 TREE_OPERAND (arg1, 0), 0)
10502 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10503 TYPE_UNSIGNED (rtype))
10504 /* Only create rotates in complete modes. Other cases are not
10505 expanded properly. */
10506 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10508 tree tree01, tree11;
10509 enum tree_code code01, code11;
10511 tree01 = TREE_OPERAND (arg0, 1);
10512 tree11 = TREE_OPERAND (arg1, 1);
10513 STRIP_NOPS (tree01);
10514 STRIP_NOPS (tree11);
10515 code01 = TREE_CODE (tree01);
10516 code11 = TREE_CODE (tree11);
10517 if (code01 == INTEGER_CST
10518 && code11 == INTEGER_CST
10519 && TREE_INT_CST_HIGH (tree01) == 0
10520 && TREE_INT_CST_HIGH (tree11) == 0
10521 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10522 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10524 tem = build2 (LROTATE_EXPR,
10525 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10526 TREE_OPERAND (arg0, 0),
10527 code0 == LSHIFT_EXPR
10528 ? tree01 : tree11);
10529 SET_EXPR_LOCATION (tem, loc);
10530 return fold_convert_loc (loc, type, tem);
10532 else if (code11 == MINUS_EXPR)
10534 tree tree110, tree111;
10535 tree110 = TREE_OPERAND (tree11, 0);
10536 tree111 = TREE_OPERAND (tree11, 1);
10537 STRIP_NOPS (tree110);
10538 STRIP_NOPS (tree111);
10539 if (TREE_CODE (tree110) == INTEGER_CST
10540 && 0 == compare_tree_int (tree110,
10541 TYPE_PRECISION
10542 (TREE_TYPE (TREE_OPERAND
10543 (arg0, 0))))
10544 && operand_equal_p (tree01, tree111, 0))
10545 return
10546 fold_convert_loc (loc, type,
10547 build2 ((code0 == LSHIFT_EXPR
10548 ? LROTATE_EXPR
10549 : RROTATE_EXPR),
10550 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10551 TREE_OPERAND (arg0, 0), tree01));
10553 else if (code01 == MINUS_EXPR)
10555 tree tree010, tree011;
10556 tree010 = TREE_OPERAND (tree01, 0);
10557 tree011 = TREE_OPERAND (tree01, 1);
10558 STRIP_NOPS (tree010);
10559 STRIP_NOPS (tree011);
10560 if (TREE_CODE (tree010) == INTEGER_CST
10561 && 0 == compare_tree_int (tree010,
10562 TYPE_PRECISION
10563 (TREE_TYPE (TREE_OPERAND
10564 (arg0, 0))))
10565 && operand_equal_p (tree11, tree011, 0))
10566 return fold_convert_loc
10567 (loc, type,
10568 build2 ((code0 != LSHIFT_EXPR
10569 ? LROTATE_EXPR
10570 : RROTATE_EXPR),
10571 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10572 TREE_OPERAND (arg0, 0), tree11));
10577 associate:
10578 /* In most languages, can't associate operations on floats through
10579 parentheses. Rather than remember where the parentheses were, we
10580 don't associate floats at all, unless the user has specified
10581 -fassociative-math.
10582 And, we need to make sure type is not saturating. */
10584 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10585 && !TYPE_SATURATING (type))
10587 tree var0, con0, lit0, minus_lit0;
10588 tree var1, con1, lit1, minus_lit1;
10589 bool ok = true;
10591 /* Split both trees into variables, constants, and literals. Then
10592 associate each group together, the constants with literals,
10593 then the result with variables. This increases the chances of
10594 literals being recombined later and of generating relocatable
10595 expressions for the sum of a constant and literal. */
10596 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10597 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10598 code == MINUS_EXPR);
10600 /* With undefined overflow we can only associate constants
10601 with one variable. */
10602 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10603 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10604 && var0 && var1)
10606 tree tmp0 = var0;
10607 tree tmp1 = var1;
10609 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10610 tmp0 = TREE_OPERAND (tmp0, 0);
10611 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10612 tmp1 = TREE_OPERAND (tmp1, 0);
10613 /* The only case we can still associate with two variables
10614 is if they are the same, modulo negation. */
10615 if (!operand_equal_p (tmp0, tmp1, 0))
10616 ok = false;
10619 /* Only do something if we found more than two objects. Otherwise,
10620 nothing has changed and we risk infinite recursion. */
10621 if (ok
10622 && (2 < ((var0 != 0) + (var1 != 0)
10623 + (con0 != 0) + (con1 != 0)
10624 + (lit0 != 0) + (lit1 != 0)
10625 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10627 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10628 if (code == MINUS_EXPR)
10629 code = PLUS_EXPR;
10631 var0 = associate_trees (loc, var0, var1, code, type);
10632 con0 = associate_trees (loc, con0, con1, code, type);
10633 lit0 = associate_trees (loc, lit0, lit1, code, type);
10634 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10636 /* Preserve the MINUS_EXPR if the negative part of the literal is
10637 greater than the positive part. Otherwise, the multiplicative
10638 folding code (i.e extract_muldiv) may be fooled in case
10639 unsigned constants are subtracted, like in the following
10640 example: ((X*2 + 4) - 8U)/2. */
10641 if (minus_lit0 && lit0)
10643 if (TREE_CODE (lit0) == INTEGER_CST
10644 && TREE_CODE (minus_lit0) == INTEGER_CST
10645 && tree_int_cst_lt (lit0, minus_lit0))
10647 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10648 MINUS_EXPR, type);
10649 lit0 = 0;
10651 else
10653 lit0 = associate_trees (loc, lit0, minus_lit0,
10654 MINUS_EXPR, type);
10655 minus_lit0 = 0;
10658 if (minus_lit0)
10660 if (con0 == 0)
10661 return
10662 fold_convert_loc (loc, type,
10663 associate_trees (loc, var0, minus_lit0,
10664 MINUS_EXPR, type));
10665 else
10667 con0 = associate_trees (loc, con0, minus_lit0,
10668 MINUS_EXPR, type);
10669 return
10670 fold_convert_loc (loc, type,
10671 associate_trees (loc, var0, con0,
10672 PLUS_EXPR, type));
10676 con0 = associate_trees (loc, con0, lit0, code, type);
10677 return
10678 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10679 code, type));
10683 return NULL_TREE;
10685 case MINUS_EXPR:
10686 /* Pointer simplifications for subtraction, simple reassociations. */
10687 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10689 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10690 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10691 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10693 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10694 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10695 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10696 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10697 return fold_build2_loc (loc, PLUS_EXPR, type,
10698 fold_build2_loc (loc, MINUS_EXPR, type,
10699 arg00, arg10),
10700 fold_build2_loc (loc, MINUS_EXPR, type,
10701 arg01, arg11));
10703 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10704 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10706 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10707 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10708 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10709 fold_convert_loc (loc, type, arg1));
10710 if (tmp)
10711 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10714 /* A - (-B) -> A + B */
10715 if (TREE_CODE (arg1) == NEGATE_EXPR)
10716 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10717 fold_convert_loc (loc, type,
10718 TREE_OPERAND (arg1, 0)));
10719 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10720 if (TREE_CODE (arg0) == NEGATE_EXPR
10721 && (FLOAT_TYPE_P (type)
10722 || INTEGRAL_TYPE_P (type))
10723 && negate_expr_p (arg1)
10724 && reorder_operands_p (arg0, arg1))
10725 return fold_build2_loc (loc, MINUS_EXPR, type,
10726 fold_convert_loc (loc, type,
10727 negate_expr (arg1)),
10728 fold_convert_loc (loc, type,
10729 TREE_OPERAND (arg0, 0)));
10730 /* Convert -A - 1 to ~A. */
10731 if (INTEGRAL_TYPE_P (type)
10732 && TREE_CODE (arg0) == NEGATE_EXPR
10733 && integer_onep (arg1)
10734 && !TYPE_OVERFLOW_TRAPS (type))
10735 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10736 fold_convert_loc (loc, type,
10737 TREE_OPERAND (arg0, 0)));
10739 /* Convert -1 - A to ~A. */
10740 if (INTEGRAL_TYPE_P (type)
10741 && integer_all_onesp (arg0))
10742 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10745 /* X - (X / CST) * CST is X % CST. */
10746 if (INTEGRAL_TYPE_P (type)
10747 && TREE_CODE (arg1) == MULT_EXPR
10748 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10749 && operand_equal_p (arg0,
10750 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10751 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10752 TREE_OPERAND (arg1, 1), 0))
10753 return
10754 fold_convert_loc (loc, type,
10755 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10756 arg0, TREE_OPERAND (arg1, 1)));
10758 if (! FLOAT_TYPE_P (type))
10760 if (integer_zerop (arg0))
10761 return negate_expr (fold_convert_loc (loc, type, arg1));
10762 if (integer_zerop (arg1))
10763 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10765 /* Fold A - (A & B) into ~B & A. */
10766 if (!TREE_SIDE_EFFECTS (arg0)
10767 && TREE_CODE (arg1) == BIT_AND_EXPR)
10769 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10771 tree arg10 = fold_convert_loc (loc, type,
10772 TREE_OPERAND (arg1, 0));
10773 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10774 fold_build1_loc (loc, BIT_NOT_EXPR,
10775 type, arg10),
10776 fold_convert_loc (loc, type, arg0));
10778 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10780 tree arg11 = fold_convert_loc (loc,
10781 type, TREE_OPERAND (arg1, 1));
10782 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10783 fold_build1_loc (loc, BIT_NOT_EXPR,
10784 type, arg11),
10785 fold_convert_loc (loc, type, arg0));
10789 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10790 any power of 2 minus 1. */
10791 if (TREE_CODE (arg0) == BIT_AND_EXPR
10792 && TREE_CODE (arg1) == BIT_AND_EXPR
10793 && operand_equal_p (TREE_OPERAND (arg0, 0),
10794 TREE_OPERAND (arg1, 0), 0))
10796 tree mask0 = TREE_OPERAND (arg0, 1);
10797 tree mask1 = TREE_OPERAND (arg1, 1);
10798 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10800 if (operand_equal_p (tem, mask1, 0))
10802 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10803 TREE_OPERAND (arg0, 0), mask1);
10804 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10809 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10810 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10811 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10813 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10814 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10815 (-ARG1 + ARG0) reduces to -ARG1. */
10816 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10817 return negate_expr (fold_convert_loc (loc, type, arg1));
10819 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10820 __complex__ ( x, -y ). This is not the same for SNaNs or if
10821 signed zeros are involved. */
10822 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10823 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10824 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10826 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10827 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10828 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10829 bool arg0rz = false, arg0iz = false;
10830 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10831 || (arg0i && (arg0iz = real_zerop (arg0i))))
10833 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10834 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10835 if (arg0rz && arg1i && real_zerop (arg1i))
10837 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10838 arg1r ? arg1r
10839 : build1 (REALPART_EXPR, rtype, arg1));
10840 tree ip = arg0i ? arg0i
10841 : build1 (IMAGPART_EXPR, rtype, arg0);
10842 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10844 else if (arg0iz && arg1r && real_zerop (arg1r))
10846 tree rp = arg0r ? arg0r
10847 : build1 (REALPART_EXPR, rtype, arg0);
10848 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10849 arg1i ? arg1i
10850 : build1 (IMAGPART_EXPR, rtype, arg1));
10851 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10856 /* Fold &x - &x. This can happen from &x.foo - &x.
10857 This is unsafe for certain floats even in non-IEEE formats.
10858 In IEEE, it is unsafe because it does wrong for NaNs.
10859 Also note that operand_equal_p is always false if an operand
10860 is volatile. */
10862 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10863 && operand_equal_p (arg0, arg1, 0))
10864 return fold_convert_loc (loc, type, integer_zero_node);
10866 /* A - B -> A + (-B) if B is easily negatable. */
10867 if (negate_expr_p (arg1)
10868 && ((FLOAT_TYPE_P (type)
10869 /* Avoid this transformation if B is a positive REAL_CST. */
10870 && (TREE_CODE (arg1) != REAL_CST
10871 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10872 || INTEGRAL_TYPE_P (type)))
10873 return fold_build2_loc (loc, PLUS_EXPR, type,
10874 fold_convert_loc (loc, type, arg0),
10875 fold_convert_loc (loc, type,
10876 negate_expr (arg1)));
10878 /* Try folding difference of addresses. */
10880 HOST_WIDE_INT diff;
10882 if ((TREE_CODE (arg0) == ADDR_EXPR
10883 || TREE_CODE (arg1) == ADDR_EXPR)
10884 && ptr_difference_const (arg0, arg1, &diff))
10885 return build_int_cst_type (type, diff);
10888 /* Fold &a[i] - &a[j] to i-j. */
10889 if (TREE_CODE (arg0) == ADDR_EXPR
10890 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10891 && TREE_CODE (arg1) == ADDR_EXPR
10892 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10894 tree aref0 = TREE_OPERAND (arg0, 0);
10895 tree aref1 = TREE_OPERAND (arg1, 0);
10896 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10897 TREE_OPERAND (aref1, 0), 0))
10899 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10900 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10901 tree esz = array_ref_element_size (aref0);
10902 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10903 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10904 fold_convert_loc (loc, type, esz));
10909 if (FLOAT_TYPE_P (type)
10910 && flag_unsafe_math_optimizations
10911 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10912 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10913 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10914 return tem;
10916 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10917 same or one. Make sure type is not saturating.
10918 fold_plusminus_mult_expr will re-associate. */
10919 if ((TREE_CODE (arg0) == MULT_EXPR
10920 || TREE_CODE (arg1) == MULT_EXPR)
10921 && !TYPE_SATURATING (type)
10922 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10924 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10925 if (tem)
10926 return tem;
10929 goto associate;
10931 case MULT_EXPR:
10932 /* (-A) * (-B) -> A * B */
10933 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10934 return fold_build2_loc (loc, MULT_EXPR, type,
10935 fold_convert_loc (loc, type,
10936 TREE_OPERAND (arg0, 0)),
10937 fold_convert_loc (loc, type,
10938 negate_expr (arg1)));
10939 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10940 return fold_build2_loc (loc, MULT_EXPR, type,
10941 fold_convert_loc (loc, type,
10942 negate_expr (arg0)),
10943 fold_convert_loc (loc, type,
10944 TREE_OPERAND (arg1, 0)));
10946 if (! FLOAT_TYPE_P (type))
10948 if (integer_zerop (arg1))
10949 return omit_one_operand_loc (loc, type, arg1, arg0);
10950 if (integer_onep (arg1))
10951 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10952 /* Transform x * -1 into -x. Make sure to do the negation
10953 on the original operand with conversions not stripped
10954 because we can only strip non-sign-changing conversions. */
10955 if (integer_all_onesp (arg1))
10956 return fold_convert_loc (loc, type, negate_expr (op0));
10957 /* Transform x * -C into -x * C if x is easily negatable. */
10958 if (TREE_CODE (arg1) == INTEGER_CST
10959 && tree_int_cst_sgn (arg1) == -1
10960 && negate_expr_p (arg0)
10961 && (tem = negate_expr (arg1)) != arg1
10962 && !TREE_OVERFLOW (tem))
10963 return fold_build2_loc (loc, MULT_EXPR, type,
10964 fold_convert_loc (loc, type,
10965 negate_expr (arg0)),
10966 tem);
10968 /* (a * (1 << b)) is (a << b) */
10969 if (TREE_CODE (arg1) == LSHIFT_EXPR
10970 && integer_onep (TREE_OPERAND (arg1, 0)))
10971 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10972 TREE_OPERAND (arg1, 1));
10973 if (TREE_CODE (arg0) == LSHIFT_EXPR
10974 && integer_onep (TREE_OPERAND (arg0, 0)))
10975 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10976 TREE_OPERAND (arg0, 1));
10978 /* (A + A) * C -> A * 2 * C */
10979 if (TREE_CODE (arg0) == PLUS_EXPR
10980 && TREE_CODE (arg1) == INTEGER_CST
10981 && operand_equal_p (TREE_OPERAND (arg0, 0),
10982 TREE_OPERAND (arg0, 1), 0))
10983 return fold_build2_loc (loc, MULT_EXPR, type,
10984 omit_one_operand_loc (loc, type,
10985 TREE_OPERAND (arg0, 0),
10986 TREE_OPERAND (arg0, 1)),
10987 fold_build2_loc (loc, MULT_EXPR, type,
10988 build_int_cst (type, 2) , arg1));
10990 strict_overflow_p = false;
10991 if (TREE_CODE (arg1) == INTEGER_CST
10992 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10993 &strict_overflow_p)))
10995 if (strict_overflow_p)
10996 fold_overflow_warning (("assuming signed overflow does not "
10997 "occur when simplifying "
10998 "multiplication"),
10999 WARN_STRICT_OVERFLOW_MISC);
11000 return fold_convert_loc (loc, type, tem);
11003 /* Optimize z * conj(z) for integer complex numbers. */
11004 if (TREE_CODE (arg0) == CONJ_EXPR
11005 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11006 return fold_mult_zconjz (loc, type, arg1);
11007 if (TREE_CODE (arg1) == CONJ_EXPR
11008 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11009 return fold_mult_zconjz (loc, type, arg0);
11011 else
11013 /* Maybe fold x * 0 to 0. The expressions aren't the same
11014 when x is NaN, since x * 0 is also NaN. Nor are they the
11015 same in modes with signed zeros, since multiplying a
11016 negative value by 0 gives -0, not +0. */
11017 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11018 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11019 && real_zerop (arg1))
11020 return omit_one_operand_loc (loc, type, arg1, arg0);
11021 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11022 Likewise for complex arithmetic with signed zeros. */
11023 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11024 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11025 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11026 && real_onep (arg1))
11027 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11029 /* Transform x * -1.0 into -x. */
11030 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11031 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11032 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11033 && real_minus_onep (arg1))
11034 return fold_convert_loc (loc, type, negate_expr (arg0));
11036 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11037 the result for floating point types due to rounding so it is applied
11038 only if -fassociative-math was specify. */
11039 if (flag_associative_math
11040 && TREE_CODE (arg0) == RDIV_EXPR
11041 && TREE_CODE (arg1) == REAL_CST
11042 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11044 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11045 arg1, 0);
11046 if (tem)
11047 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11048 TREE_OPERAND (arg0, 1));
11051 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11052 if (operand_equal_p (arg0, arg1, 0))
11054 tree tem = fold_strip_sign_ops (arg0);
11055 if (tem != NULL_TREE)
11057 tem = fold_convert_loc (loc, type, tem);
11058 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11062 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11063 This is not the same for NaNs or if signed zeros are
11064 involved. */
11065 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11066 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11067 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11068 && TREE_CODE (arg1) == COMPLEX_CST
11069 && real_zerop (TREE_REALPART (arg1)))
11071 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11072 if (real_onep (TREE_IMAGPART (arg1)))
11073 return
11074 fold_build2_loc (loc, COMPLEX_EXPR, type,
11075 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11076 rtype, arg0)),
11077 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11078 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11079 return
11080 fold_build2_loc (loc, COMPLEX_EXPR, type,
11081 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11082 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11083 rtype, arg0)));
11086 /* Optimize z * conj(z) for floating point complex numbers.
11087 Guarded by flag_unsafe_math_optimizations as non-finite
11088 imaginary components don't produce scalar results. */
11089 if (flag_unsafe_math_optimizations
11090 && TREE_CODE (arg0) == CONJ_EXPR
11091 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11092 return fold_mult_zconjz (loc, type, arg1);
11093 if (flag_unsafe_math_optimizations
11094 && TREE_CODE (arg1) == CONJ_EXPR
11095 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11096 return fold_mult_zconjz (loc, type, arg0);
11098 if (flag_unsafe_math_optimizations)
11100 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11101 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11103 /* Optimizations of root(...)*root(...). */
11104 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11106 tree rootfn, arg;
11107 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11108 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11110 /* Optimize sqrt(x)*sqrt(x) as x. */
11111 if (BUILTIN_SQRT_P (fcode0)
11112 && operand_equal_p (arg00, arg10, 0)
11113 && ! HONOR_SNANS (TYPE_MODE (type)))
11114 return arg00;
11116 /* Optimize root(x)*root(y) as root(x*y). */
11117 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11118 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11119 return build_call_expr_loc (loc, rootfn, 1, arg);
11122 /* Optimize expN(x)*expN(y) as expN(x+y). */
11123 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11125 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11126 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11127 CALL_EXPR_ARG (arg0, 0),
11128 CALL_EXPR_ARG (arg1, 0));
11129 return build_call_expr_loc (loc, expfn, 1, arg);
11132 /* Optimizations of pow(...)*pow(...). */
11133 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11134 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11135 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11137 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11138 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11139 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11140 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11142 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11143 if (operand_equal_p (arg01, arg11, 0))
11145 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11146 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11147 arg00, arg10);
11148 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11151 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11152 if (operand_equal_p (arg00, arg10, 0))
11154 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11155 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11156 arg01, arg11);
11157 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11161 /* Optimize tan(x)*cos(x) as sin(x). */
11162 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11163 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11164 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11165 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11166 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11167 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11168 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11169 CALL_EXPR_ARG (arg1, 0), 0))
11171 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11173 if (sinfn != NULL_TREE)
11174 return build_call_expr_loc (loc, sinfn, 1,
11175 CALL_EXPR_ARG (arg0, 0));
11178 /* Optimize x*pow(x,c) as pow(x,c+1). */
11179 if (fcode1 == BUILT_IN_POW
11180 || fcode1 == BUILT_IN_POWF
11181 || fcode1 == BUILT_IN_POWL)
11183 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11184 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11185 if (TREE_CODE (arg11) == REAL_CST
11186 && !TREE_OVERFLOW (arg11)
11187 && operand_equal_p (arg0, arg10, 0))
11189 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11190 REAL_VALUE_TYPE c;
11191 tree arg;
11193 c = TREE_REAL_CST (arg11);
11194 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11195 arg = build_real (type, c);
11196 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11200 /* Optimize pow(x,c)*x as pow(x,c+1). */
11201 if (fcode0 == BUILT_IN_POW
11202 || fcode0 == BUILT_IN_POWF
11203 || fcode0 == BUILT_IN_POWL)
11205 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11206 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11207 if (TREE_CODE (arg01) == REAL_CST
11208 && !TREE_OVERFLOW (arg01)
11209 && operand_equal_p (arg1, arg00, 0))
11211 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11212 REAL_VALUE_TYPE c;
11213 tree arg;
11215 c = TREE_REAL_CST (arg01);
11216 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11217 arg = build_real (type, c);
11218 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11222 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
11223 if (optimize_function_for_speed_p (cfun)
11224 && operand_equal_p (arg0, arg1, 0))
11226 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11228 if (powfn)
11230 tree arg = build_real (type, dconst2);
11231 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11236 goto associate;
11238 case BIT_IOR_EXPR:
11239 bit_ior:
11240 if (integer_all_onesp (arg1))
11241 return omit_one_operand_loc (loc, type, arg1, arg0);
11242 if (integer_zerop (arg1))
11243 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11244 if (operand_equal_p (arg0, arg1, 0))
11245 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11247 /* ~X | X is -1. */
11248 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11249 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11251 t1 = fold_convert_loc (loc, type, integer_zero_node);
11252 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11253 return omit_one_operand_loc (loc, type, t1, arg1);
11256 /* X | ~X is -1. */
11257 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11258 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11260 t1 = fold_convert_loc (loc, type, integer_zero_node);
11261 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11262 return omit_one_operand_loc (loc, type, t1, arg0);
11265 /* Canonicalize (X & C1) | C2. */
11266 if (TREE_CODE (arg0) == BIT_AND_EXPR
11267 && TREE_CODE (arg1) == INTEGER_CST
11268 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11270 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
11271 int width = TYPE_PRECISION (type), w;
11272 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
11273 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11274 hi2 = TREE_INT_CST_HIGH (arg1);
11275 lo2 = TREE_INT_CST_LOW (arg1);
11277 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11278 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
11279 return omit_one_operand_loc (loc, type, arg1,
11280 TREE_OPERAND (arg0, 0));
11282 if (width > HOST_BITS_PER_WIDE_INT)
11284 mhi = (unsigned HOST_WIDE_INT) -1
11285 >> (2 * HOST_BITS_PER_WIDE_INT - width);
11286 mlo = -1;
11288 else
11290 mhi = 0;
11291 mlo = (unsigned HOST_WIDE_INT) -1
11292 >> (HOST_BITS_PER_WIDE_INT - width);
11295 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11296 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
11297 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11298 TREE_OPERAND (arg0, 0), arg1);
11300 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11301 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11302 mode which allows further optimizations. */
11303 hi1 &= mhi;
11304 lo1 &= mlo;
11305 hi2 &= mhi;
11306 lo2 &= mlo;
11307 hi3 = hi1 & ~hi2;
11308 lo3 = lo1 & ~lo2;
11309 for (w = BITS_PER_UNIT;
11310 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11311 w <<= 1)
11313 unsigned HOST_WIDE_INT mask
11314 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11315 if (((lo1 | lo2) & mask) == mask
11316 && (lo1 & ~mask) == 0 && hi1 == 0)
11318 hi3 = 0;
11319 lo3 = mask;
11320 break;
11323 if (hi3 != hi1 || lo3 != lo1)
11324 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11325 fold_build2_loc (loc, BIT_AND_EXPR, type,
11326 TREE_OPERAND (arg0, 0),
11327 build_int_cst_wide (type,
11328 lo3, hi3)),
11329 arg1);
11332 /* (X & Y) | Y is (X, Y). */
11333 if (TREE_CODE (arg0) == BIT_AND_EXPR
11334 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11335 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11336 /* (X & Y) | X is (Y, X). */
11337 if (TREE_CODE (arg0) == BIT_AND_EXPR
11338 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11339 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11340 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11341 /* X | (X & Y) is (Y, X). */
11342 if (TREE_CODE (arg1) == BIT_AND_EXPR
11343 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11344 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11345 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11346 /* X | (Y & X) is (Y, X). */
11347 if (TREE_CODE (arg1) == BIT_AND_EXPR
11348 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11349 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11350 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11352 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11353 if (t1 != NULL_TREE)
11354 return t1;
11356 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11358 This results in more efficient code for machines without a NAND
11359 instruction. Combine will canonicalize to the first form
11360 which will allow use of NAND instructions provided by the
11361 backend if they exist. */
11362 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11363 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11365 return
11366 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11367 build2 (BIT_AND_EXPR, type,
11368 fold_convert_loc (loc, type,
11369 TREE_OPERAND (arg0, 0)),
11370 fold_convert_loc (loc, type,
11371 TREE_OPERAND (arg1, 0))));
11374 /* See if this can be simplified into a rotate first. If that
11375 is unsuccessful continue in the association code. */
11376 goto bit_rotate;
11378 case BIT_XOR_EXPR:
11379 if (integer_zerop (arg1))
11380 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11381 if (integer_all_onesp (arg1))
11382 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11383 if (operand_equal_p (arg0, arg1, 0))
11384 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11386 /* ~X ^ X is -1. */
11387 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11388 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11390 t1 = fold_convert_loc (loc, type, integer_zero_node);
11391 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11392 return omit_one_operand_loc (loc, type, t1, arg1);
11395 /* X ^ ~X is -1. */
11396 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11397 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11399 t1 = fold_convert_loc (loc, type, integer_zero_node);
11400 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11401 return omit_one_operand_loc (loc, type, t1, arg0);
11404 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11405 with a constant, and the two constants have no bits in common,
11406 we should treat this as a BIT_IOR_EXPR since this may produce more
11407 simplifications. */
11408 if (TREE_CODE (arg0) == BIT_AND_EXPR
11409 && TREE_CODE (arg1) == BIT_AND_EXPR
11410 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11411 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11412 && integer_zerop (const_binop (BIT_AND_EXPR,
11413 TREE_OPERAND (arg0, 1),
11414 TREE_OPERAND (arg1, 1), 0)))
11416 code = BIT_IOR_EXPR;
11417 goto bit_ior;
11420 /* (X | Y) ^ X -> Y & ~ X*/
11421 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11422 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11424 tree t2 = TREE_OPERAND (arg0, 1);
11425 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11426 arg1);
11427 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11428 fold_convert_loc (loc, type, t2),
11429 fold_convert_loc (loc, type, t1));
11430 return t1;
11433 /* (Y | X) ^ X -> Y & ~ X*/
11434 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11435 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11437 tree t2 = TREE_OPERAND (arg0, 0);
11438 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11439 arg1);
11440 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11441 fold_convert_loc (loc, type, t2),
11442 fold_convert_loc (loc, type, t1));
11443 return t1;
11446 /* X ^ (X | Y) -> Y & ~ X*/
11447 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11448 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11450 tree t2 = TREE_OPERAND (arg1, 1);
11451 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11452 arg0);
11453 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11454 fold_convert_loc (loc, type, t2),
11455 fold_convert_loc (loc, type, t1));
11456 return t1;
11459 /* X ^ (Y | X) -> Y & ~ X*/
11460 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11461 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11463 tree t2 = TREE_OPERAND (arg1, 0);
11464 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11465 arg0);
11466 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11467 fold_convert_loc (loc, type, t2),
11468 fold_convert_loc (loc, type, t1));
11469 return t1;
11472 /* Convert ~X ^ ~Y to X ^ Y. */
11473 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11474 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11475 return fold_build2_loc (loc, code, type,
11476 fold_convert_loc (loc, type,
11477 TREE_OPERAND (arg0, 0)),
11478 fold_convert_loc (loc, type,
11479 TREE_OPERAND (arg1, 0)));
11481 /* Convert ~X ^ C to X ^ ~C. */
11482 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11483 && TREE_CODE (arg1) == INTEGER_CST)
11484 return fold_build2_loc (loc, code, type,
11485 fold_convert_loc (loc, type,
11486 TREE_OPERAND (arg0, 0)),
11487 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11489 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11490 if (TREE_CODE (arg0) == BIT_AND_EXPR
11491 && integer_onep (TREE_OPERAND (arg0, 1))
11492 && integer_onep (arg1))
11493 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11494 build_int_cst (TREE_TYPE (arg0), 0));
11496 /* Fold (X & Y) ^ Y as ~X & Y. */
11497 if (TREE_CODE (arg0) == BIT_AND_EXPR
11498 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11500 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11501 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11502 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11503 fold_convert_loc (loc, type, arg1));
11505 /* Fold (X & Y) ^ X as ~Y & X. */
11506 if (TREE_CODE (arg0) == BIT_AND_EXPR
11507 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11508 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11510 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11511 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11512 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11513 fold_convert_loc (loc, type, arg1));
11515 /* Fold X ^ (X & Y) as X & ~Y. */
11516 if (TREE_CODE (arg1) == BIT_AND_EXPR
11517 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11519 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11520 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11521 fold_convert_loc (loc, type, arg0),
11522 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11524 /* Fold X ^ (Y & X) as ~Y & X. */
11525 if (TREE_CODE (arg1) == BIT_AND_EXPR
11526 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11527 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11529 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11530 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11531 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11532 fold_convert_loc (loc, type, arg0));
11535 /* See if this can be simplified into a rotate first. If that
11536 is unsuccessful continue in the association code. */
11537 goto bit_rotate;
11539 case BIT_AND_EXPR:
11540 if (integer_all_onesp (arg1))
11541 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11542 if (integer_zerop (arg1))
11543 return omit_one_operand_loc (loc, type, arg1, arg0);
11544 if (operand_equal_p (arg0, arg1, 0))
11545 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11547 /* ~X & X is always zero. */
11548 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11549 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11550 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11552 /* X & ~X is always zero. */
11553 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11554 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11555 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11557 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11558 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11559 && TREE_CODE (arg1) == INTEGER_CST
11560 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11562 tree tmp1 = fold_convert_loc (loc, type, arg1);
11563 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11564 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11565 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11566 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11567 return
11568 fold_convert_loc (loc, type,
11569 fold_build2_loc (loc, BIT_IOR_EXPR,
11570 type, tmp2, tmp3));
11573 /* (X | Y) & Y is (X, Y). */
11574 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11575 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11576 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11577 /* (X | Y) & X is (Y, X). */
11578 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11579 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11580 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11581 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11582 /* X & (X | Y) is (Y, X). */
11583 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11584 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11585 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11586 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11587 /* X & (Y | X) is (Y, X). */
11588 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11589 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11590 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11591 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11593 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11594 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11595 && integer_onep (TREE_OPERAND (arg0, 1))
11596 && integer_onep (arg1))
11598 tem = TREE_OPERAND (arg0, 0);
11599 return fold_build2_loc (loc, EQ_EXPR, type,
11600 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11601 build_int_cst (TREE_TYPE (tem), 1)),
11602 build_int_cst (TREE_TYPE (tem), 0));
11604 /* Fold ~X & 1 as (X & 1) == 0. */
11605 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11606 && integer_onep (arg1))
11608 tem = TREE_OPERAND (arg0, 0);
11609 return fold_build2_loc (loc, EQ_EXPR, type,
11610 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11611 build_int_cst (TREE_TYPE (tem), 1)),
11612 build_int_cst (TREE_TYPE (tem), 0));
11615 /* Fold (X ^ Y) & Y as ~X & Y. */
11616 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11617 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11619 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11620 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11621 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11622 fold_convert_loc (loc, type, arg1));
11624 /* Fold (X ^ Y) & X as ~Y & X. */
11625 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11626 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11627 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11629 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11630 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11631 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11632 fold_convert_loc (loc, type, arg1));
11634 /* Fold X & (X ^ Y) as X & ~Y. */
11635 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11636 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11638 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11639 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11640 fold_convert_loc (loc, type, arg0),
11641 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11643 /* Fold X & (Y ^ X) as ~Y & X. */
11644 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11645 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11646 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11648 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11649 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11650 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11651 fold_convert_loc (loc, type, arg0));
11654 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11655 if (t1 != NULL_TREE)
11656 return t1;
11657 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11658 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11659 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11661 unsigned int prec
11662 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11664 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11665 && (~TREE_INT_CST_LOW (arg1)
11666 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11667 return
11668 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11671 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11673 This results in more efficient code for machines without a NOR
11674 instruction. Combine will canonicalize to the first form
11675 which will allow use of NOR instructions provided by the
11676 backend if they exist. */
11677 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11678 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11680 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11681 build2 (BIT_IOR_EXPR, type,
11682 fold_convert_loc (loc, type,
11683 TREE_OPERAND (arg0, 0)),
11684 fold_convert_loc (loc, type,
11685 TREE_OPERAND (arg1, 0))));
11688 /* If arg0 is derived from the address of an object or function, we may
11689 be able to fold this expression using the object or function's
11690 alignment. */
11691 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11693 unsigned HOST_WIDE_INT modulus, residue;
11694 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11696 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11697 integer_onep (arg1));
11699 /* This works because modulus is a power of 2. If this weren't the
11700 case, we'd have to replace it by its greatest power-of-2
11701 divisor: modulus & -modulus. */
11702 if (low < modulus)
11703 return build_int_cst (type, residue & low);
11706 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11707 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11708 if the new mask might be further optimized. */
11709 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11710 || TREE_CODE (arg0) == RSHIFT_EXPR)
11711 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11712 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11713 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11714 < TYPE_PRECISION (TREE_TYPE (arg0))
11715 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11716 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11718 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11719 unsigned HOST_WIDE_INT mask
11720 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11721 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11722 tree shift_type = TREE_TYPE (arg0);
11724 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11725 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11726 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11727 && TYPE_PRECISION (TREE_TYPE (arg0))
11728 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11730 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11731 tree arg00 = TREE_OPERAND (arg0, 0);
11732 /* See if more bits can be proven as zero because of
11733 zero extension. */
11734 if (TREE_CODE (arg00) == NOP_EXPR
11735 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11737 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11738 if (TYPE_PRECISION (inner_type)
11739 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11740 && TYPE_PRECISION (inner_type) < prec)
11742 prec = TYPE_PRECISION (inner_type);
11743 /* See if we can shorten the right shift. */
11744 if (shiftc < prec)
11745 shift_type = inner_type;
11748 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11749 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11750 zerobits <<= prec - shiftc;
11751 /* For arithmetic shift if sign bit could be set, zerobits
11752 can contain actually sign bits, so no transformation is
11753 possible, unless MASK masks them all away. In that
11754 case the shift needs to be converted into logical shift. */
11755 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11756 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11758 if ((mask & zerobits) == 0)
11759 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11760 else
11761 zerobits = 0;
11765 /* ((X << 16) & 0xff00) is (X, 0). */
11766 if ((mask & zerobits) == mask)
11767 return omit_one_operand_loc (loc, type,
11768 build_int_cst (type, 0), arg0);
11770 newmask = mask | zerobits;
11771 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11773 unsigned int prec;
11775 /* Only do the transformation if NEWMASK is some integer
11776 mode's mask. */
11777 for (prec = BITS_PER_UNIT;
11778 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11779 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11780 break;
11781 if (prec < HOST_BITS_PER_WIDE_INT
11782 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11784 tree newmaskt;
11786 if (shift_type != TREE_TYPE (arg0))
11788 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11789 fold_convert_loc (loc, shift_type,
11790 TREE_OPERAND (arg0, 0)),
11791 TREE_OPERAND (arg0, 1));
11792 tem = fold_convert_loc (loc, type, tem);
11794 else
11795 tem = op0;
11796 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11797 if (!tree_int_cst_equal (newmaskt, arg1))
11798 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11803 goto associate;
11805 case RDIV_EXPR:
11806 /* Don't touch a floating-point divide by zero unless the mode
11807 of the constant can represent infinity. */
11808 if (TREE_CODE (arg1) == REAL_CST
11809 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11810 && real_zerop (arg1))
11811 return NULL_TREE;
11813 /* Optimize A / A to 1.0 if we don't care about
11814 NaNs or Infinities. Skip the transformation
11815 for non-real operands. */
11816 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11817 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11818 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11819 && operand_equal_p (arg0, arg1, 0))
11821 tree r = build_real (TREE_TYPE (arg0), dconst1);
11823 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11826 /* The complex version of the above A / A optimization. */
11827 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11828 && operand_equal_p (arg0, arg1, 0))
11830 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11831 if (! HONOR_NANS (TYPE_MODE (elem_type))
11832 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11834 tree r = build_real (elem_type, dconst1);
11835 /* omit_two_operands will call fold_convert for us. */
11836 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11840 /* (-A) / (-B) -> A / B */
11841 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11842 return fold_build2_loc (loc, RDIV_EXPR, type,
11843 TREE_OPERAND (arg0, 0),
11844 negate_expr (arg1));
11845 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11846 return fold_build2_loc (loc, RDIV_EXPR, type,
11847 negate_expr (arg0),
11848 TREE_OPERAND (arg1, 0));
11850 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11851 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11852 && real_onep (arg1))
11853 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11855 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11856 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11857 && real_minus_onep (arg1))
11858 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11859 negate_expr (arg0)));
11861 /* If ARG1 is a constant, we can convert this to a multiply by the
11862 reciprocal. This does not have the same rounding properties,
11863 so only do this if -freciprocal-math. We can actually
11864 always safely do it if ARG1 is a power of two, but it's hard to
11865 tell if it is or not in a portable manner. */
11866 if (TREE_CODE (arg1) == REAL_CST)
11868 if (flag_reciprocal_math
11869 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11870 arg1, 0)))
11871 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11872 /* Find the reciprocal if optimizing and the result is exact. */
11873 if (optimize)
11875 REAL_VALUE_TYPE r;
11876 r = TREE_REAL_CST (arg1);
11877 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11879 tem = build_real (type, r);
11880 return fold_build2_loc (loc, MULT_EXPR, type,
11881 fold_convert_loc (loc, type, arg0), tem);
11885 /* Convert A/B/C to A/(B*C). */
11886 if (flag_reciprocal_math
11887 && TREE_CODE (arg0) == RDIV_EXPR)
11888 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11889 fold_build2_loc (loc, MULT_EXPR, type,
11890 TREE_OPERAND (arg0, 1), arg1));
11892 /* Convert A/(B/C) to (A/B)*C. */
11893 if (flag_reciprocal_math
11894 && TREE_CODE (arg1) == RDIV_EXPR)
11895 return fold_build2_loc (loc, MULT_EXPR, type,
11896 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11897 TREE_OPERAND (arg1, 0)),
11898 TREE_OPERAND (arg1, 1));
11900 /* Convert C1/(X*C2) into (C1/C2)/X. */
11901 if (flag_reciprocal_math
11902 && TREE_CODE (arg1) == MULT_EXPR
11903 && TREE_CODE (arg0) == REAL_CST
11904 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11906 tree tem = const_binop (RDIV_EXPR, arg0,
11907 TREE_OPERAND (arg1, 1), 0);
11908 if (tem)
11909 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11910 TREE_OPERAND (arg1, 0));
11913 if (flag_unsafe_math_optimizations)
11915 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11916 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11918 /* Optimize sin(x)/cos(x) as tan(x). */
11919 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11920 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11921 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11922 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11923 CALL_EXPR_ARG (arg1, 0), 0))
11925 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11927 if (tanfn != NULL_TREE)
11928 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11931 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11932 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11933 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11934 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11935 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11936 CALL_EXPR_ARG (arg1, 0), 0))
11938 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11940 if (tanfn != NULL_TREE)
11942 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11943 CALL_EXPR_ARG (arg0, 0));
11944 return fold_build2_loc (loc, RDIV_EXPR, type,
11945 build_real (type, dconst1), tmp);
11949 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11950 NaNs or Infinities. */
11951 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11952 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11953 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11955 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11956 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11958 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11959 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11960 && operand_equal_p (arg00, arg01, 0))
11962 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11964 if (cosfn != NULL_TREE)
11965 return build_call_expr_loc (loc, cosfn, 1, arg00);
11969 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11970 NaNs or Infinities. */
11971 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11972 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11973 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11975 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11976 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11978 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11979 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11980 && operand_equal_p (arg00, arg01, 0))
11982 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11984 if (cosfn != NULL_TREE)
11986 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11987 return fold_build2_loc (loc, RDIV_EXPR, type,
11988 build_real (type, dconst1),
11989 tmp);
11994 /* Optimize pow(x,c)/x as pow(x,c-1). */
11995 if (fcode0 == BUILT_IN_POW
11996 || fcode0 == BUILT_IN_POWF
11997 || fcode0 == BUILT_IN_POWL)
11999 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12000 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12001 if (TREE_CODE (arg01) == REAL_CST
12002 && !TREE_OVERFLOW (arg01)
12003 && operand_equal_p (arg1, arg00, 0))
12005 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12006 REAL_VALUE_TYPE c;
12007 tree arg;
12009 c = TREE_REAL_CST (arg01);
12010 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12011 arg = build_real (type, c);
12012 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12016 /* Optimize a/root(b/c) into a*root(c/b). */
12017 if (BUILTIN_ROOT_P (fcode1))
12019 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12021 if (TREE_CODE (rootarg) == RDIV_EXPR)
12023 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12024 tree b = TREE_OPERAND (rootarg, 0);
12025 tree c = TREE_OPERAND (rootarg, 1);
12027 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12029 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12030 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12034 /* Optimize x/expN(y) into x*expN(-y). */
12035 if (BUILTIN_EXPONENT_P (fcode1))
12037 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12038 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12039 arg1 = build_call_expr_loc (loc,
12040 expfn, 1,
12041 fold_convert_loc (loc, type, arg));
12042 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12045 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12046 if (fcode1 == BUILT_IN_POW
12047 || fcode1 == BUILT_IN_POWF
12048 || fcode1 == BUILT_IN_POWL)
12050 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12051 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12052 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12053 tree neg11 = fold_convert_loc (loc, type,
12054 negate_expr (arg11));
12055 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12056 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12059 return NULL_TREE;
12061 case TRUNC_DIV_EXPR:
12062 case FLOOR_DIV_EXPR:
12063 /* Simplify A / (B << N) where A and B are positive and B is
12064 a power of 2, to A >> (N + log2(B)). */
12065 strict_overflow_p = false;
12066 if (TREE_CODE (arg1) == LSHIFT_EXPR
12067 && (TYPE_UNSIGNED (type)
12068 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12070 tree sval = TREE_OPERAND (arg1, 0);
12071 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12073 tree sh_cnt = TREE_OPERAND (arg1, 1);
12074 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12076 if (strict_overflow_p)
12077 fold_overflow_warning (("assuming signed overflow does not "
12078 "occur when simplifying A / (B << N)"),
12079 WARN_STRICT_OVERFLOW_MISC);
12081 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12082 sh_cnt, build_int_cst (NULL_TREE, pow2));
12083 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12084 fold_convert_loc (loc, type, arg0), sh_cnt);
12088 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12089 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12090 if (INTEGRAL_TYPE_P (type)
12091 && TYPE_UNSIGNED (type)
12092 && code == FLOOR_DIV_EXPR)
12093 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12095 /* Fall thru */
12097 case ROUND_DIV_EXPR:
12098 case CEIL_DIV_EXPR:
12099 case EXACT_DIV_EXPR:
12100 if (integer_onep (arg1))
12101 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12102 if (integer_zerop (arg1))
12103 return NULL_TREE;
12104 /* X / -1 is -X. */
12105 if (!TYPE_UNSIGNED (type)
12106 && TREE_CODE (arg1) == INTEGER_CST
12107 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12108 && TREE_INT_CST_HIGH (arg1) == -1)
12109 return fold_convert_loc (loc, type, negate_expr (arg0));
12111 /* Convert -A / -B to A / B when the type is signed and overflow is
12112 undefined. */
12113 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12114 && TREE_CODE (arg0) == NEGATE_EXPR
12115 && negate_expr_p (arg1))
12117 if (INTEGRAL_TYPE_P (type))
12118 fold_overflow_warning (("assuming signed overflow does not occur "
12119 "when distributing negation across "
12120 "division"),
12121 WARN_STRICT_OVERFLOW_MISC);
12122 return fold_build2_loc (loc, code, type,
12123 fold_convert_loc (loc, type,
12124 TREE_OPERAND (arg0, 0)),
12125 fold_convert_loc (loc, type,
12126 negate_expr (arg1)));
12128 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12129 && TREE_CODE (arg1) == NEGATE_EXPR
12130 && negate_expr_p (arg0))
12132 if (INTEGRAL_TYPE_P (type))
12133 fold_overflow_warning (("assuming signed overflow does not occur "
12134 "when distributing negation across "
12135 "division"),
12136 WARN_STRICT_OVERFLOW_MISC);
12137 return fold_build2_loc (loc, code, type,
12138 fold_convert_loc (loc, type,
12139 negate_expr (arg0)),
12140 fold_convert_loc (loc, type,
12141 TREE_OPERAND (arg1, 0)));
12144 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12145 operation, EXACT_DIV_EXPR.
12147 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12148 At one time others generated faster code, it's not clear if they do
12149 after the last round to changes to the DIV code in expmed.c. */
12150 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12151 && multiple_of_p (type, arg0, arg1))
12152 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12154 strict_overflow_p = false;
12155 if (TREE_CODE (arg1) == INTEGER_CST
12156 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12157 &strict_overflow_p)))
12159 if (strict_overflow_p)
12160 fold_overflow_warning (("assuming signed overflow does not occur "
12161 "when simplifying division"),
12162 WARN_STRICT_OVERFLOW_MISC);
12163 return fold_convert_loc (loc, type, tem);
12166 return NULL_TREE;
12168 case CEIL_MOD_EXPR:
12169 case FLOOR_MOD_EXPR:
12170 case ROUND_MOD_EXPR:
12171 case TRUNC_MOD_EXPR:
12172 /* X % 1 is always zero, but be sure to preserve any side
12173 effects in X. */
12174 if (integer_onep (arg1))
12175 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12177 /* X % 0, return X % 0 unchanged so that we can get the
12178 proper warnings and errors. */
12179 if (integer_zerop (arg1))
12180 return NULL_TREE;
12182 /* 0 % X is always zero, but be sure to preserve any side
12183 effects in X. Place this after checking for X == 0. */
12184 if (integer_zerop (arg0))
12185 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12187 /* X % -1 is zero. */
12188 if (!TYPE_UNSIGNED (type)
12189 && TREE_CODE (arg1) == INTEGER_CST
12190 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12191 && TREE_INT_CST_HIGH (arg1) == -1)
12192 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12194 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12195 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12196 strict_overflow_p = false;
12197 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12198 && (TYPE_UNSIGNED (type)
12199 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12201 tree c = arg1;
12202 /* Also optimize A % (C << N) where C is a power of 2,
12203 to A & ((C << N) - 1). */
12204 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12205 c = TREE_OPERAND (arg1, 0);
12207 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12209 tree mask = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12210 build_int_cst (TREE_TYPE (arg1), 1));
12211 if (strict_overflow_p)
12212 fold_overflow_warning (("assuming signed overflow does not "
12213 "occur when simplifying "
12214 "X % (power of two)"),
12215 WARN_STRICT_OVERFLOW_MISC);
12216 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12217 fold_convert_loc (loc, type, arg0),
12218 fold_convert_loc (loc, type, mask));
12222 /* X % -C is the same as X % C. */
12223 if (code == TRUNC_MOD_EXPR
12224 && !TYPE_UNSIGNED (type)
12225 && TREE_CODE (arg1) == INTEGER_CST
12226 && !TREE_OVERFLOW (arg1)
12227 && TREE_INT_CST_HIGH (arg1) < 0
12228 && !TYPE_OVERFLOW_TRAPS (type)
12229 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12230 && !sign_bit_p (arg1, arg1))
12231 return fold_build2_loc (loc, code, type,
12232 fold_convert_loc (loc, type, arg0),
12233 fold_convert_loc (loc, type,
12234 negate_expr (arg1)));
12236 /* X % -Y is the same as X % Y. */
12237 if (code == TRUNC_MOD_EXPR
12238 && !TYPE_UNSIGNED (type)
12239 && TREE_CODE (arg1) == NEGATE_EXPR
12240 && !TYPE_OVERFLOW_TRAPS (type))
12241 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12242 fold_convert_loc (loc, type,
12243 TREE_OPERAND (arg1, 0)));
12245 if (TREE_CODE (arg1) == INTEGER_CST
12246 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12247 &strict_overflow_p)))
12249 if (strict_overflow_p)
12250 fold_overflow_warning (("assuming signed overflow does not occur "
12251 "when simplifying modulus"),
12252 WARN_STRICT_OVERFLOW_MISC);
12253 return fold_convert_loc (loc, type, tem);
12256 return NULL_TREE;
12258 case LROTATE_EXPR:
12259 case RROTATE_EXPR:
12260 if (integer_all_onesp (arg0))
12261 return omit_one_operand_loc (loc, type, arg0, arg1);
12262 goto shift;
12264 case RSHIFT_EXPR:
12265 /* Optimize -1 >> x for arithmetic right shifts. */
12266 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12267 && tree_expr_nonnegative_p (arg1))
12268 return omit_one_operand_loc (loc, type, arg0, arg1);
12269 /* ... fall through ... */
12271 case LSHIFT_EXPR:
12272 shift:
12273 if (integer_zerop (arg1))
12274 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12275 if (integer_zerop (arg0))
12276 return omit_one_operand_loc (loc, type, arg0, arg1);
12278 /* Since negative shift count is not well-defined,
12279 don't try to compute it in the compiler. */
12280 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12281 return NULL_TREE;
12283 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12284 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12285 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12286 && host_integerp (TREE_OPERAND (arg0, 1), false)
12287 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12289 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12290 + TREE_INT_CST_LOW (arg1));
12292 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12293 being well defined. */
12294 if (low >= TYPE_PRECISION (type))
12296 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12297 low = low % TYPE_PRECISION (type);
12298 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12299 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12300 TREE_OPERAND (arg0, 0));
12301 else
12302 low = TYPE_PRECISION (type) - 1;
12305 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12306 build_int_cst (type, low));
12309 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12310 into x & ((unsigned)-1 >> c) for unsigned types. */
12311 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12312 || (TYPE_UNSIGNED (type)
12313 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12314 && host_integerp (arg1, false)
12315 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12316 && host_integerp (TREE_OPERAND (arg0, 1), false)
12317 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12319 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12320 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12321 tree lshift;
12322 tree arg00;
12324 if (low0 == low1)
12326 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12328 lshift = build_int_cst (type, -1);
12329 lshift = int_const_binop (code, lshift, arg1, 0);
12331 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12335 /* Rewrite an LROTATE_EXPR by a constant into an
12336 RROTATE_EXPR by a new constant. */
12337 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12339 tree tem = build_int_cst (TREE_TYPE (arg1),
12340 TYPE_PRECISION (type));
12341 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
12342 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12345 /* If we have a rotate of a bit operation with the rotate count and
12346 the second operand of the bit operation both constant,
12347 permute the two operations. */
12348 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12349 && (TREE_CODE (arg0) == BIT_AND_EXPR
12350 || TREE_CODE (arg0) == BIT_IOR_EXPR
12351 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12352 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12353 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12354 fold_build2_loc (loc, code, type,
12355 TREE_OPERAND (arg0, 0), arg1),
12356 fold_build2_loc (loc, code, type,
12357 TREE_OPERAND (arg0, 1), arg1));
12359 /* Two consecutive rotates adding up to the precision of the
12360 type can be ignored. */
12361 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12362 && TREE_CODE (arg0) == RROTATE_EXPR
12363 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12364 && TREE_INT_CST_HIGH (arg1) == 0
12365 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12366 && ((TREE_INT_CST_LOW (arg1)
12367 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12368 == (unsigned int) TYPE_PRECISION (type)))
12369 return TREE_OPERAND (arg0, 0);
12371 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12372 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12373 if the latter can be further optimized. */
12374 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12375 && TREE_CODE (arg0) == BIT_AND_EXPR
12376 && TREE_CODE (arg1) == INTEGER_CST
12377 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12379 tree mask = fold_build2_loc (loc, code, type,
12380 fold_convert_loc (loc, type,
12381 TREE_OPERAND (arg0, 1)),
12382 arg1);
12383 tree shift = fold_build2_loc (loc, code, type,
12384 fold_convert_loc (loc, type,
12385 TREE_OPERAND (arg0, 0)),
12386 arg1);
12387 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12388 if (tem)
12389 return tem;
12392 return NULL_TREE;
12394 case MIN_EXPR:
12395 if (operand_equal_p (arg0, arg1, 0))
12396 return omit_one_operand_loc (loc, type, arg0, arg1);
12397 if (INTEGRAL_TYPE_P (type)
12398 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12399 return omit_one_operand_loc (loc, type, arg1, arg0);
12400 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12401 if (tem)
12402 return tem;
12403 goto associate;
12405 case MAX_EXPR:
12406 if (operand_equal_p (arg0, arg1, 0))
12407 return omit_one_operand_loc (loc, type, arg0, arg1);
12408 if (INTEGRAL_TYPE_P (type)
12409 && TYPE_MAX_VALUE (type)
12410 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12411 return omit_one_operand_loc (loc, type, arg1, arg0);
12412 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12413 if (tem)
12414 return tem;
12415 goto associate;
12417 case TRUTH_ANDIF_EXPR:
12418 /* Note that the operands of this must be ints
12419 and their values must be 0 or 1.
12420 ("true" is a fixed value perhaps depending on the language.) */
12421 /* If first arg is constant zero, return it. */
12422 if (integer_zerop (arg0))
12423 return fold_convert_loc (loc, type, arg0);
12424 case TRUTH_AND_EXPR:
12425 /* If either arg is constant true, drop it. */
12426 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12427 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12428 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12429 /* Preserve sequence points. */
12430 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12431 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12432 /* If second arg is constant zero, result is zero, but first arg
12433 must be evaluated. */
12434 if (integer_zerop (arg1))
12435 return omit_one_operand_loc (loc, type, arg1, arg0);
12436 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12437 case will be handled here. */
12438 if (integer_zerop (arg0))
12439 return omit_one_operand_loc (loc, type, arg0, arg1);
12441 /* !X && X is always false. */
12442 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12443 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12444 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12445 /* X && !X is always false. */
12446 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12447 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12448 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12450 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12451 means A >= Y && A != MAX, but in this case we know that
12452 A < X <= MAX. */
12454 if (!TREE_SIDE_EFFECTS (arg0)
12455 && !TREE_SIDE_EFFECTS (arg1))
12457 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12458 if (tem && !operand_equal_p (tem, arg0, 0))
12459 return fold_build2_loc (loc, code, type, tem, arg1);
12461 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12462 if (tem && !operand_equal_p (tem, arg1, 0))
12463 return fold_build2_loc (loc, code, type, arg0, tem);
12466 truth_andor:
12467 /* We only do these simplifications if we are optimizing. */
12468 if (!optimize)
12469 return NULL_TREE;
12471 /* Check for things like (A || B) && (A || C). We can convert this
12472 to A || (B && C). Note that either operator can be any of the four
12473 truth and/or operations and the transformation will still be
12474 valid. Also note that we only care about order for the
12475 ANDIF and ORIF operators. If B contains side effects, this
12476 might change the truth-value of A. */
12477 if (TREE_CODE (arg0) == TREE_CODE (arg1)
12478 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12479 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12480 || TREE_CODE (arg0) == TRUTH_AND_EXPR
12481 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12482 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12484 tree a00 = TREE_OPERAND (arg0, 0);
12485 tree a01 = TREE_OPERAND (arg0, 1);
12486 tree a10 = TREE_OPERAND (arg1, 0);
12487 tree a11 = TREE_OPERAND (arg1, 1);
12488 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12489 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12490 && (code == TRUTH_AND_EXPR
12491 || code == TRUTH_OR_EXPR));
12493 if (operand_equal_p (a00, a10, 0))
12494 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12495 fold_build2_loc (loc, code, type, a01, a11));
12496 else if (commutative && operand_equal_p (a00, a11, 0))
12497 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12498 fold_build2_loc (loc, code, type, a01, a10));
12499 else if (commutative && operand_equal_p (a01, a10, 0))
12500 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
12501 fold_build2_loc (loc, code, type, a00, a11));
12503 /* This case if tricky because we must either have commutative
12504 operators or else A10 must not have side-effects. */
12506 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12507 && operand_equal_p (a01, a11, 0))
12508 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12509 fold_build2_loc (loc, code, type, a00, a10),
12510 a01);
12513 /* See if we can build a range comparison. */
12514 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
12515 return tem;
12517 /* Check for the possibility of merging component references. If our
12518 lhs is another similar operation, try to merge its rhs with our
12519 rhs. Then try to merge our lhs and rhs. */
12520 if (TREE_CODE (arg0) == code
12521 && 0 != (tem = fold_truthop (loc, code, type,
12522 TREE_OPERAND (arg0, 1), arg1)))
12523 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12525 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
12526 return tem;
12528 return NULL_TREE;
12530 case TRUTH_ORIF_EXPR:
12531 /* Note that the operands of this must be ints
12532 and their values must be 0 or true.
12533 ("true" is a fixed value perhaps depending on the language.) */
12534 /* If first arg is constant true, return it. */
12535 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12536 return fold_convert_loc (loc, type, arg0);
12537 case TRUTH_OR_EXPR:
12538 /* If either arg is constant zero, drop it. */
12539 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12540 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12541 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12542 /* Preserve sequence points. */
12543 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12544 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12545 /* If second arg is constant true, result is true, but we must
12546 evaluate first arg. */
12547 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12548 return omit_one_operand_loc (loc, type, arg1, arg0);
12549 /* Likewise for first arg, but note this only occurs here for
12550 TRUTH_OR_EXPR. */
12551 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12552 return omit_one_operand_loc (loc, type, arg0, arg1);
12554 /* !X || X is always true. */
12555 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12556 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12557 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12558 /* X || !X is always true. */
12559 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12560 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12561 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12563 goto truth_andor;
12565 case TRUTH_XOR_EXPR:
12566 /* If the second arg is constant zero, drop it. */
12567 if (integer_zerop (arg1))
12568 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12569 /* If the second arg is constant true, this is a logical inversion. */
12570 if (integer_onep (arg1))
12572 /* Only call invert_truthvalue if operand is a truth value. */
12573 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12574 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12575 else
12576 tem = invert_truthvalue_loc (loc, arg0);
12577 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12579 /* Identical arguments cancel to zero. */
12580 if (operand_equal_p (arg0, arg1, 0))
12581 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12583 /* !X ^ X is always true. */
12584 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12585 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12586 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12588 /* X ^ !X is always true. */
12589 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12590 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12591 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12593 return NULL_TREE;
12595 case EQ_EXPR:
12596 case NE_EXPR:
12597 tem = fold_comparison (loc, code, type, op0, op1);
12598 if (tem != NULL_TREE)
12599 return tem;
12601 /* bool_var != 0 becomes bool_var. */
12602 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12603 && code == NE_EXPR)
12604 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12606 /* bool_var == 1 becomes bool_var. */
12607 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12608 && code == EQ_EXPR)
12609 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12611 /* bool_var != 1 becomes !bool_var. */
12612 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12613 && code == NE_EXPR)
12614 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12615 fold_convert_loc (loc, type, arg0));
12617 /* bool_var == 0 becomes !bool_var. */
12618 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12619 && code == EQ_EXPR)
12620 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12621 fold_convert_loc (loc, type, arg0));
12623 /* !exp != 0 becomes !exp */
12624 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12625 && code == NE_EXPR)
12626 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12628 /* If this is an equality comparison of the address of two non-weak,
12629 unaliased symbols neither of which are extern (since we do not
12630 have access to attributes for externs), then we know the result. */
12631 if (TREE_CODE (arg0) == ADDR_EXPR
12632 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12633 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12634 && ! lookup_attribute ("alias",
12635 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12636 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12637 && TREE_CODE (arg1) == ADDR_EXPR
12638 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12639 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12640 && ! lookup_attribute ("alias",
12641 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12642 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12644 /* We know that we're looking at the address of two
12645 non-weak, unaliased, static _DECL nodes.
12647 It is both wasteful and incorrect to call operand_equal_p
12648 to compare the two ADDR_EXPR nodes. It is wasteful in that
12649 all we need to do is test pointer equality for the arguments
12650 to the two ADDR_EXPR nodes. It is incorrect to use
12651 operand_equal_p as that function is NOT equivalent to a
12652 C equality test. It can in fact return false for two
12653 objects which would test as equal using the C equality
12654 operator. */
12655 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12656 return constant_boolean_node (equal
12657 ? code == EQ_EXPR : code != EQ_EXPR,
12658 type);
12661 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12662 a MINUS_EXPR of a constant, we can convert it into a comparison with
12663 a revised constant as long as no overflow occurs. */
12664 if (TREE_CODE (arg1) == INTEGER_CST
12665 && (TREE_CODE (arg0) == PLUS_EXPR
12666 || TREE_CODE (arg0) == MINUS_EXPR)
12667 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12668 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12669 ? MINUS_EXPR : PLUS_EXPR,
12670 fold_convert_loc (loc, TREE_TYPE (arg0),
12671 arg1),
12672 TREE_OPERAND (arg0, 1), 0))
12673 && !TREE_OVERFLOW (tem))
12674 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12676 /* Similarly for a NEGATE_EXPR. */
12677 if (TREE_CODE (arg0) == NEGATE_EXPR
12678 && TREE_CODE (arg1) == INTEGER_CST
12679 && 0 != (tem = negate_expr (arg1))
12680 && TREE_CODE (tem) == INTEGER_CST
12681 && !TREE_OVERFLOW (tem))
12682 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12684 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12685 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12686 && TREE_CODE (arg1) == INTEGER_CST
12687 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12688 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12689 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12690 fold_convert_loc (loc,
12691 TREE_TYPE (arg0),
12692 arg1),
12693 TREE_OPERAND (arg0, 1)));
12695 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12696 if ((TREE_CODE (arg0) == PLUS_EXPR
12697 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12698 || TREE_CODE (arg0) == MINUS_EXPR)
12699 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12700 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12701 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12703 tree val = TREE_OPERAND (arg0, 1);
12704 return omit_two_operands_loc (loc, type,
12705 fold_build2_loc (loc, code, type,
12706 val,
12707 build_int_cst (TREE_TYPE (val),
12708 0)),
12709 TREE_OPERAND (arg0, 0), arg1);
12712 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12713 if (TREE_CODE (arg0) == MINUS_EXPR
12714 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12715 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12716 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12718 return omit_two_operands_loc (loc, type,
12719 code == NE_EXPR
12720 ? boolean_true_node : boolean_false_node,
12721 TREE_OPERAND (arg0, 1), arg1);
12724 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12725 for !=. Don't do this for ordered comparisons due to overflow. */
12726 if (TREE_CODE (arg0) == MINUS_EXPR
12727 && integer_zerop (arg1))
12728 return fold_build2_loc (loc, code, type,
12729 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12731 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12732 if (TREE_CODE (arg0) == ABS_EXPR
12733 && (integer_zerop (arg1) || real_zerop (arg1)))
12734 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12736 /* If this is an EQ or NE comparison with zero and ARG0 is
12737 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12738 two operations, but the latter can be done in one less insn
12739 on machines that have only two-operand insns or on which a
12740 constant cannot be the first operand. */
12741 if (TREE_CODE (arg0) == BIT_AND_EXPR
12742 && integer_zerop (arg1))
12744 tree arg00 = TREE_OPERAND (arg0, 0);
12745 tree arg01 = TREE_OPERAND (arg0, 1);
12746 if (TREE_CODE (arg00) == LSHIFT_EXPR
12747 && integer_onep (TREE_OPERAND (arg00, 0)))
12749 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12750 arg01, TREE_OPERAND (arg00, 1));
12751 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12752 build_int_cst (TREE_TYPE (arg0), 1));
12753 return fold_build2_loc (loc, code, type,
12754 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12755 arg1);
12757 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12758 && integer_onep (TREE_OPERAND (arg01, 0)))
12760 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12761 arg00, TREE_OPERAND (arg01, 1));
12762 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12763 build_int_cst (TREE_TYPE (arg0), 1));
12764 return fold_build2_loc (loc, code, type,
12765 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12766 arg1);
12770 /* If this is an NE or EQ comparison of zero against the result of a
12771 signed MOD operation whose second operand is a power of 2, make
12772 the MOD operation unsigned since it is simpler and equivalent. */
12773 if (integer_zerop (arg1)
12774 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12775 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12776 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12777 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12778 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12779 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12781 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12782 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12783 fold_convert_loc (loc, newtype,
12784 TREE_OPERAND (arg0, 0)),
12785 fold_convert_loc (loc, newtype,
12786 TREE_OPERAND (arg0, 1)));
12788 return fold_build2_loc (loc, code, type, newmod,
12789 fold_convert_loc (loc, newtype, arg1));
12792 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12793 C1 is a valid shift constant, and C2 is a power of two, i.e.
12794 a single bit. */
12795 if (TREE_CODE (arg0) == BIT_AND_EXPR
12796 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12797 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12798 == INTEGER_CST
12799 && integer_pow2p (TREE_OPERAND (arg0, 1))
12800 && integer_zerop (arg1))
12802 tree itype = TREE_TYPE (arg0);
12803 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12804 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12806 /* Check for a valid shift count. */
12807 if (TREE_INT_CST_HIGH (arg001) == 0
12808 && TREE_INT_CST_LOW (arg001) < prec)
12810 tree arg01 = TREE_OPERAND (arg0, 1);
12811 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12812 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12813 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12814 can be rewritten as (X & (C2 << C1)) != 0. */
12815 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12817 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12818 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12819 return fold_build2_loc (loc, code, type, tem, arg1);
12821 /* Otherwise, for signed (arithmetic) shifts,
12822 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12823 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12824 else if (!TYPE_UNSIGNED (itype))
12825 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12826 arg000, build_int_cst (itype, 0));
12827 /* Otherwise, of unsigned (logical) shifts,
12828 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12829 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12830 else
12831 return omit_one_operand_loc (loc, type,
12832 code == EQ_EXPR ? integer_one_node
12833 : integer_zero_node,
12834 arg000);
12838 /* If this is an NE comparison of zero with an AND of one, remove the
12839 comparison since the AND will give the correct value. */
12840 if (code == NE_EXPR
12841 && integer_zerop (arg1)
12842 && TREE_CODE (arg0) == BIT_AND_EXPR
12843 && integer_onep (TREE_OPERAND (arg0, 1)))
12844 return fold_convert_loc (loc, type, arg0);
12846 /* If we have (A & C) == C where C is a power of 2, convert this into
12847 (A & C) != 0. Similarly for NE_EXPR. */
12848 if (TREE_CODE (arg0) == BIT_AND_EXPR
12849 && integer_pow2p (TREE_OPERAND (arg0, 1))
12850 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12851 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12852 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12853 integer_zero_node));
12855 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12856 bit, then fold the expression into A < 0 or A >= 0. */
12857 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12858 if (tem)
12859 return tem;
12861 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12862 Similarly for NE_EXPR. */
12863 if (TREE_CODE (arg0) == BIT_AND_EXPR
12864 && TREE_CODE (arg1) == INTEGER_CST
12865 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12867 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12868 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12869 TREE_OPERAND (arg0, 1));
12870 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12871 arg1, notc);
12872 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12873 if (integer_nonzerop (dandnotc))
12874 return omit_one_operand_loc (loc, type, rslt, arg0);
12877 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12878 Similarly for NE_EXPR. */
12879 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12880 && TREE_CODE (arg1) == INTEGER_CST
12881 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12883 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12884 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12885 TREE_OPERAND (arg0, 1), notd);
12886 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12887 if (integer_nonzerop (candnotd))
12888 return omit_one_operand_loc (loc, type, rslt, arg0);
12891 /* If this is a comparison of a field, we may be able to simplify it. */
12892 if ((TREE_CODE (arg0) == COMPONENT_REF
12893 || TREE_CODE (arg0) == BIT_FIELD_REF)
12894 /* Handle the constant case even without -O
12895 to make sure the warnings are given. */
12896 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12898 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12899 if (t1)
12900 return t1;
12903 /* Optimize comparisons of strlen vs zero to a compare of the
12904 first character of the string vs zero. To wit,
12905 strlen(ptr) == 0 => *ptr == 0
12906 strlen(ptr) != 0 => *ptr != 0
12907 Other cases should reduce to one of these two (or a constant)
12908 due to the return value of strlen being unsigned. */
12909 if (TREE_CODE (arg0) == CALL_EXPR
12910 && integer_zerop (arg1))
12912 tree fndecl = get_callee_fndecl (arg0);
12914 if (fndecl
12915 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12916 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12917 && call_expr_nargs (arg0) == 1
12918 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12920 tree iref = build_fold_indirect_ref_loc (loc,
12921 CALL_EXPR_ARG (arg0, 0));
12922 return fold_build2_loc (loc, code, type, iref,
12923 build_int_cst (TREE_TYPE (iref), 0));
12927 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12928 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12929 if (TREE_CODE (arg0) == RSHIFT_EXPR
12930 && integer_zerop (arg1)
12931 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12933 tree arg00 = TREE_OPERAND (arg0, 0);
12934 tree arg01 = TREE_OPERAND (arg0, 1);
12935 tree itype = TREE_TYPE (arg00);
12936 if (TREE_INT_CST_HIGH (arg01) == 0
12937 && TREE_INT_CST_LOW (arg01)
12938 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12940 if (TYPE_UNSIGNED (itype))
12942 itype = signed_type_for (itype);
12943 arg00 = fold_convert_loc (loc, itype, arg00);
12945 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12946 type, arg00, build_int_cst (itype, 0));
12950 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12951 if (integer_zerop (arg1)
12952 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12953 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12954 TREE_OPERAND (arg0, 1));
12956 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12957 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12958 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12959 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12960 build_int_cst (TREE_TYPE (arg1), 0));
12961 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12962 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12963 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12964 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12965 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12966 build_int_cst (TREE_TYPE (arg1), 0));
12968 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12969 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12970 && TREE_CODE (arg1) == INTEGER_CST
12971 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12972 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12973 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12974 TREE_OPERAND (arg0, 1), arg1));
12976 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12977 (X & C) == 0 when C is a single bit. */
12978 if (TREE_CODE (arg0) == BIT_AND_EXPR
12979 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12980 && integer_zerop (arg1)
12981 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12983 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12984 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12985 TREE_OPERAND (arg0, 1));
12986 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12987 type, tem, arg1);
12990 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12991 constant C is a power of two, i.e. a single bit. */
12992 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12993 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12994 && integer_zerop (arg1)
12995 && integer_pow2p (TREE_OPERAND (arg0, 1))
12996 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12997 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12999 tree arg00 = TREE_OPERAND (arg0, 0);
13000 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13001 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13004 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13005 when is C is a power of two, i.e. a single bit. */
13006 if (TREE_CODE (arg0) == BIT_AND_EXPR
13007 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13008 && integer_zerop (arg1)
13009 && integer_pow2p (TREE_OPERAND (arg0, 1))
13010 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13011 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13013 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13014 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13015 arg000, TREE_OPERAND (arg0, 1));
13016 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13017 tem, build_int_cst (TREE_TYPE (tem), 0));
13020 if (integer_zerop (arg1)
13021 && tree_expr_nonzero_p (arg0))
13023 tree res = constant_boolean_node (code==NE_EXPR, type);
13024 return omit_one_operand_loc (loc, type, res, arg0);
13027 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13028 if (TREE_CODE (arg0) == NEGATE_EXPR
13029 && TREE_CODE (arg1) == NEGATE_EXPR)
13030 return fold_build2_loc (loc, code, type,
13031 TREE_OPERAND (arg0, 0),
13032 TREE_OPERAND (arg1, 0));
13034 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13035 if (TREE_CODE (arg0) == BIT_AND_EXPR
13036 && TREE_CODE (arg1) == BIT_AND_EXPR)
13038 tree arg00 = TREE_OPERAND (arg0, 0);
13039 tree arg01 = TREE_OPERAND (arg0, 1);
13040 tree arg10 = TREE_OPERAND (arg1, 0);
13041 tree arg11 = TREE_OPERAND (arg1, 1);
13042 tree itype = TREE_TYPE (arg0);
13044 if (operand_equal_p (arg01, arg11, 0))
13045 return fold_build2_loc (loc, code, type,
13046 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13047 fold_build2_loc (loc,
13048 BIT_XOR_EXPR, itype,
13049 arg00, arg10),
13050 arg01),
13051 build_int_cst (itype, 0));
13053 if (operand_equal_p (arg01, arg10, 0))
13054 return fold_build2_loc (loc, code, type,
13055 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13056 fold_build2_loc (loc,
13057 BIT_XOR_EXPR, itype,
13058 arg00, arg11),
13059 arg01),
13060 build_int_cst (itype, 0));
13062 if (operand_equal_p (arg00, arg11, 0))
13063 return fold_build2_loc (loc, code, type,
13064 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13065 fold_build2_loc (loc,
13066 BIT_XOR_EXPR, itype,
13067 arg01, arg10),
13068 arg00),
13069 build_int_cst (itype, 0));
13071 if (operand_equal_p (arg00, arg10, 0))
13072 return fold_build2_loc (loc, code, type,
13073 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13074 fold_build2_loc (loc,
13075 BIT_XOR_EXPR, itype,
13076 arg01, arg11),
13077 arg00),
13078 build_int_cst (itype, 0));
13081 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13082 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13084 tree arg00 = TREE_OPERAND (arg0, 0);
13085 tree arg01 = TREE_OPERAND (arg0, 1);
13086 tree arg10 = TREE_OPERAND (arg1, 0);
13087 tree arg11 = TREE_OPERAND (arg1, 1);
13088 tree itype = TREE_TYPE (arg0);
13090 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13091 operand_equal_p guarantees no side-effects so we don't need
13092 to use omit_one_operand on Z. */
13093 if (operand_equal_p (arg01, arg11, 0))
13094 return fold_build2_loc (loc, code, type, arg00, arg10);
13095 if (operand_equal_p (arg01, arg10, 0))
13096 return fold_build2_loc (loc, code, type, arg00, arg11);
13097 if (operand_equal_p (arg00, arg11, 0))
13098 return fold_build2_loc (loc, code, type, arg01, arg10);
13099 if (operand_equal_p (arg00, arg10, 0))
13100 return fold_build2_loc (loc, code, type, arg01, arg11);
13102 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13103 if (TREE_CODE (arg01) == INTEGER_CST
13104 && TREE_CODE (arg11) == INTEGER_CST)
13105 return fold_build2_loc (loc, code, type,
13106 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
13107 fold_build2_loc (loc,
13108 BIT_XOR_EXPR, itype,
13109 arg01, arg11)),
13110 arg10);
13113 /* Attempt to simplify equality/inequality comparisons of complex
13114 values. Only lower the comparison if the result is known or
13115 can be simplified to a single scalar comparison. */
13116 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13117 || TREE_CODE (arg0) == COMPLEX_CST)
13118 && (TREE_CODE (arg1) == COMPLEX_EXPR
13119 || TREE_CODE (arg1) == COMPLEX_CST))
13121 tree real0, imag0, real1, imag1;
13122 tree rcond, icond;
13124 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13126 real0 = TREE_OPERAND (arg0, 0);
13127 imag0 = TREE_OPERAND (arg0, 1);
13129 else
13131 real0 = TREE_REALPART (arg0);
13132 imag0 = TREE_IMAGPART (arg0);
13135 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13137 real1 = TREE_OPERAND (arg1, 0);
13138 imag1 = TREE_OPERAND (arg1, 1);
13140 else
13142 real1 = TREE_REALPART (arg1);
13143 imag1 = TREE_IMAGPART (arg1);
13146 rcond = fold_binary_loc (loc, code, type, real0, real1);
13147 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13149 if (integer_zerop (rcond))
13151 if (code == EQ_EXPR)
13152 return omit_two_operands_loc (loc, type, boolean_false_node,
13153 imag0, imag1);
13154 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13156 else
13158 if (code == NE_EXPR)
13159 return omit_two_operands_loc (loc, type, boolean_true_node,
13160 imag0, imag1);
13161 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13165 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13166 if (icond && TREE_CODE (icond) == INTEGER_CST)
13168 if (integer_zerop (icond))
13170 if (code == EQ_EXPR)
13171 return omit_two_operands_loc (loc, type, boolean_false_node,
13172 real0, real1);
13173 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13175 else
13177 if (code == NE_EXPR)
13178 return omit_two_operands_loc (loc, type, boolean_true_node,
13179 real0, real1);
13180 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13185 return NULL_TREE;
13187 case LT_EXPR:
13188 case GT_EXPR:
13189 case LE_EXPR:
13190 case GE_EXPR:
13191 tem = fold_comparison (loc, code, type, op0, op1);
13192 if (tem != NULL_TREE)
13193 return tem;
13195 /* Transform comparisons of the form X +- C CMP X. */
13196 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13197 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13198 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13199 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13200 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13201 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13203 tree arg01 = TREE_OPERAND (arg0, 1);
13204 enum tree_code code0 = TREE_CODE (arg0);
13205 int is_positive;
13207 if (TREE_CODE (arg01) == REAL_CST)
13208 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13209 else
13210 is_positive = tree_int_cst_sgn (arg01);
13212 /* (X - c) > X becomes false. */
13213 if (code == GT_EXPR
13214 && ((code0 == MINUS_EXPR && is_positive >= 0)
13215 || (code0 == PLUS_EXPR && is_positive <= 0)))
13217 if (TREE_CODE (arg01) == INTEGER_CST
13218 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13219 fold_overflow_warning (("assuming signed overflow does not "
13220 "occur when assuming that (X - c) > X "
13221 "is always false"),
13222 WARN_STRICT_OVERFLOW_ALL);
13223 return constant_boolean_node (0, type);
13226 /* Likewise (X + c) < X becomes false. */
13227 if (code == LT_EXPR
13228 && ((code0 == PLUS_EXPR && is_positive >= 0)
13229 || (code0 == MINUS_EXPR && is_positive <= 0)))
13231 if (TREE_CODE (arg01) == INTEGER_CST
13232 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13233 fold_overflow_warning (("assuming signed overflow does not "
13234 "occur when assuming that "
13235 "(X + c) < X is always false"),
13236 WARN_STRICT_OVERFLOW_ALL);
13237 return constant_boolean_node (0, type);
13240 /* Convert (X - c) <= X to true. */
13241 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13242 && code == LE_EXPR
13243 && ((code0 == MINUS_EXPR && is_positive >= 0)
13244 || (code0 == PLUS_EXPR && is_positive <= 0)))
13246 if (TREE_CODE (arg01) == INTEGER_CST
13247 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13248 fold_overflow_warning (("assuming signed overflow does not "
13249 "occur when assuming that "
13250 "(X - c) <= X is always true"),
13251 WARN_STRICT_OVERFLOW_ALL);
13252 return constant_boolean_node (1, type);
13255 /* Convert (X + c) >= X to true. */
13256 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13257 && code == GE_EXPR
13258 && ((code0 == PLUS_EXPR && is_positive >= 0)
13259 || (code0 == MINUS_EXPR && is_positive <= 0)))
13261 if (TREE_CODE (arg01) == INTEGER_CST
13262 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13263 fold_overflow_warning (("assuming signed overflow does not "
13264 "occur when assuming that "
13265 "(X + c) >= X is always true"),
13266 WARN_STRICT_OVERFLOW_ALL);
13267 return constant_boolean_node (1, type);
13270 if (TREE_CODE (arg01) == INTEGER_CST)
13272 /* Convert X + c > X and X - c < X to true for integers. */
13273 if (code == GT_EXPR
13274 && ((code0 == PLUS_EXPR && is_positive > 0)
13275 || (code0 == MINUS_EXPR && is_positive < 0)))
13277 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13278 fold_overflow_warning (("assuming signed overflow does "
13279 "not occur when assuming that "
13280 "(X + c) > X is always true"),
13281 WARN_STRICT_OVERFLOW_ALL);
13282 return constant_boolean_node (1, type);
13285 if (code == LT_EXPR
13286 && ((code0 == MINUS_EXPR && is_positive > 0)
13287 || (code0 == PLUS_EXPR && is_positive < 0)))
13289 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13290 fold_overflow_warning (("assuming signed overflow does "
13291 "not occur when assuming that "
13292 "(X - c) < X is always true"),
13293 WARN_STRICT_OVERFLOW_ALL);
13294 return constant_boolean_node (1, type);
13297 /* Convert X + c <= X and X - c >= X to false for integers. */
13298 if (code == LE_EXPR
13299 && ((code0 == PLUS_EXPR && is_positive > 0)
13300 || (code0 == MINUS_EXPR && is_positive < 0)))
13302 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13303 fold_overflow_warning (("assuming signed overflow does "
13304 "not occur when assuming that "
13305 "(X + c) <= X is always false"),
13306 WARN_STRICT_OVERFLOW_ALL);
13307 return constant_boolean_node (0, type);
13310 if (code == GE_EXPR
13311 && ((code0 == MINUS_EXPR && is_positive > 0)
13312 || (code0 == PLUS_EXPR && is_positive < 0)))
13314 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13315 fold_overflow_warning (("assuming signed overflow does "
13316 "not occur when assuming that "
13317 "(X - c) >= X is always false"),
13318 WARN_STRICT_OVERFLOW_ALL);
13319 return constant_boolean_node (0, type);
13324 /* Comparisons with the highest or lowest possible integer of
13325 the specified precision will have known values. */
13327 tree arg1_type = TREE_TYPE (arg1);
13328 unsigned int width = TYPE_PRECISION (arg1_type);
13330 if (TREE_CODE (arg1) == INTEGER_CST
13331 && width <= 2 * HOST_BITS_PER_WIDE_INT
13332 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13334 HOST_WIDE_INT signed_max_hi;
13335 unsigned HOST_WIDE_INT signed_max_lo;
13336 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13338 if (width <= HOST_BITS_PER_WIDE_INT)
13340 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13341 - 1;
13342 signed_max_hi = 0;
13343 max_hi = 0;
13345 if (TYPE_UNSIGNED (arg1_type))
13347 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13348 min_lo = 0;
13349 min_hi = 0;
13351 else
13353 max_lo = signed_max_lo;
13354 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13355 min_hi = -1;
13358 else
13360 width -= HOST_BITS_PER_WIDE_INT;
13361 signed_max_lo = -1;
13362 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13363 - 1;
13364 max_lo = -1;
13365 min_lo = 0;
13367 if (TYPE_UNSIGNED (arg1_type))
13369 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13370 min_hi = 0;
13372 else
13374 max_hi = signed_max_hi;
13375 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13379 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13380 && TREE_INT_CST_LOW (arg1) == max_lo)
13381 switch (code)
13383 case GT_EXPR:
13384 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13386 case GE_EXPR:
13387 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13389 case LE_EXPR:
13390 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13392 case LT_EXPR:
13393 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13395 /* The GE_EXPR and LT_EXPR cases above are not normally
13396 reached because of previous transformations. */
13398 default:
13399 break;
13401 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13402 == max_hi
13403 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13404 switch (code)
13406 case GT_EXPR:
13407 arg1 = const_binop (PLUS_EXPR, arg1,
13408 build_int_cst (TREE_TYPE (arg1), 1), 0);
13409 return fold_build2_loc (loc, EQ_EXPR, type,
13410 fold_convert_loc (loc,
13411 TREE_TYPE (arg1), arg0),
13412 arg1);
13413 case LE_EXPR:
13414 arg1 = const_binop (PLUS_EXPR, arg1,
13415 build_int_cst (TREE_TYPE (arg1), 1), 0);
13416 return fold_build2_loc (loc, NE_EXPR, type,
13417 fold_convert_loc (loc, TREE_TYPE (arg1),
13418 arg0),
13419 arg1);
13420 default:
13421 break;
13423 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13424 == min_hi
13425 && TREE_INT_CST_LOW (arg1) == min_lo)
13426 switch (code)
13428 case LT_EXPR:
13429 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13431 case LE_EXPR:
13432 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13434 case GE_EXPR:
13435 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13437 case GT_EXPR:
13438 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13440 default:
13441 break;
13443 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13444 == min_hi
13445 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13446 switch (code)
13448 case GE_EXPR:
13449 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13450 return fold_build2_loc (loc, NE_EXPR, type,
13451 fold_convert_loc (loc,
13452 TREE_TYPE (arg1), arg0),
13453 arg1);
13454 case LT_EXPR:
13455 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13456 return fold_build2_loc (loc, EQ_EXPR, type,
13457 fold_convert_loc (loc, TREE_TYPE (arg1),
13458 arg0),
13459 arg1);
13460 default:
13461 break;
13464 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13465 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13466 && TYPE_UNSIGNED (arg1_type)
13467 /* We will flip the signedness of the comparison operator
13468 associated with the mode of arg1, so the sign bit is
13469 specified by this mode. Check that arg1 is the signed
13470 max associated with this sign bit. */
13471 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13472 /* signed_type does not work on pointer types. */
13473 && INTEGRAL_TYPE_P (arg1_type))
13475 /* The following case also applies to X < signed_max+1
13476 and X >= signed_max+1 because previous transformations. */
13477 if (code == LE_EXPR || code == GT_EXPR)
13479 tree st;
13480 st = signed_type_for (TREE_TYPE (arg1));
13481 return fold_build2_loc (loc,
13482 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13483 type, fold_convert_loc (loc, st, arg0),
13484 build_int_cst (st, 0));
13490 /* If we are comparing an ABS_EXPR with a constant, we can
13491 convert all the cases into explicit comparisons, but they may
13492 well not be faster than doing the ABS and one comparison.
13493 But ABS (X) <= C is a range comparison, which becomes a subtraction
13494 and a comparison, and is probably faster. */
13495 if (code == LE_EXPR
13496 && TREE_CODE (arg1) == INTEGER_CST
13497 && TREE_CODE (arg0) == ABS_EXPR
13498 && ! TREE_SIDE_EFFECTS (arg0)
13499 && (0 != (tem = negate_expr (arg1)))
13500 && TREE_CODE (tem) == INTEGER_CST
13501 && !TREE_OVERFLOW (tem))
13502 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13503 build2 (GE_EXPR, type,
13504 TREE_OPERAND (arg0, 0), tem),
13505 build2 (LE_EXPR, type,
13506 TREE_OPERAND (arg0, 0), arg1));
13508 /* Convert ABS_EXPR<x> >= 0 to true. */
13509 strict_overflow_p = false;
13510 if (code == GE_EXPR
13511 && (integer_zerop (arg1)
13512 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13513 && real_zerop (arg1)))
13514 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13516 if (strict_overflow_p)
13517 fold_overflow_warning (("assuming signed overflow does not occur "
13518 "when simplifying comparison of "
13519 "absolute value and zero"),
13520 WARN_STRICT_OVERFLOW_CONDITIONAL);
13521 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13524 /* Convert ABS_EXPR<x> < 0 to false. */
13525 strict_overflow_p = false;
13526 if (code == LT_EXPR
13527 && (integer_zerop (arg1) || real_zerop (arg1))
13528 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13530 if (strict_overflow_p)
13531 fold_overflow_warning (("assuming signed overflow does not occur "
13532 "when simplifying comparison of "
13533 "absolute value and zero"),
13534 WARN_STRICT_OVERFLOW_CONDITIONAL);
13535 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13538 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13539 and similarly for >= into !=. */
13540 if ((code == LT_EXPR || code == GE_EXPR)
13541 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13542 && TREE_CODE (arg1) == LSHIFT_EXPR
13543 && integer_onep (TREE_OPERAND (arg1, 0)))
13545 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13546 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13547 TREE_OPERAND (arg1, 1)),
13548 build_int_cst (TREE_TYPE (arg0), 0));
13549 goto fold_binary_exit;
13552 if ((code == LT_EXPR || code == GE_EXPR)
13553 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13554 && CONVERT_EXPR_P (arg1)
13555 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13556 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13558 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13559 fold_convert_loc (loc, TREE_TYPE (arg0),
13560 build2 (RSHIFT_EXPR,
13561 TREE_TYPE (arg0), arg0,
13562 TREE_OPERAND (TREE_OPERAND (arg1, 0),
13563 1))),
13564 build_int_cst (TREE_TYPE (arg0), 0));
13565 goto fold_binary_exit;
13568 return NULL_TREE;
13570 case UNORDERED_EXPR:
13571 case ORDERED_EXPR:
13572 case UNLT_EXPR:
13573 case UNLE_EXPR:
13574 case UNGT_EXPR:
13575 case UNGE_EXPR:
13576 case UNEQ_EXPR:
13577 case LTGT_EXPR:
13578 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13580 t1 = fold_relational_const (code, type, arg0, arg1);
13581 if (t1 != NULL_TREE)
13582 return t1;
13585 /* If the first operand is NaN, the result is constant. */
13586 if (TREE_CODE (arg0) == REAL_CST
13587 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13588 && (code != LTGT_EXPR || ! flag_trapping_math))
13590 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13591 ? integer_zero_node
13592 : integer_one_node;
13593 return omit_one_operand_loc (loc, type, t1, arg1);
13596 /* If the second operand is NaN, the result is constant. */
13597 if (TREE_CODE (arg1) == REAL_CST
13598 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13599 && (code != LTGT_EXPR || ! flag_trapping_math))
13601 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13602 ? integer_zero_node
13603 : integer_one_node;
13604 return omit_one_operand_loc (loc, type, t1, arg0);
13607 /* Simplify unordered comparison of something with itself. */
13608 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13609 && operand_equal_p (arg0, arg1, 0))
13610 return constant_boolean_node (1, type);
13612 if (code == LTGT_EXPR
13613 && !flag_trapping_math
13614 && operand_equal_p (arg0, arg1, 0))
13615 return constant_boolean_node (0, type);
13617 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13619 tree targ0 = strip_float_extensions (arg0);
13620 tree targ1 = strip_float_extensions (arg1);
13621 tree newtype = TREE_TYPE (targ0);
13623 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13624 newtype = TREE_TYPE (targ1);
13626 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13627 return fold_build2_loc (loc, code, type,
13628 fold_convert_loc (loc, newtype, targ0),
13629 fold_convert_loc (loc, newtype, targ1));
13632 return NULL_TREE;
13634 case COMPOUND_EXPR:
13635 /* When pedantic, a compound expression can be neither an lvalue
13636 nor an integer constant expression. */
13637 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13638 return NULL_TREE;
13639 /* Don't let (0, 0) be null pointer constant. */
13640 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13641 : fold_convert_loc (loc, type, arg1);
13642 return pedantic_non_lvalue_loc (loc, tem);
13644 case COMPLEX_EXPR:
13645 if ((TREE_CODE (arg0) == REAL_CST
13646 && TREE_CODE (arg1) == REAL_CST)
13647 || (TREE_CODE (arg0) == INTEGER_CST
13648 && TREE_CODE (arg1) == INTEGER_CST))
13649 return build_complex (type, arg0, arg1);
13650 return NULL_TREE;
13652 case ASSERT_EXPR:
13653 /* An ASSERT_EXPR should never be passed to fold_binary. */
13654 gcc_unreachable ();
13656 default:
13657 return NULL_TREE;
13658 } /* switch (code) */
13659 fold_binary_exit:
13660 protected_set_expr_location (tem, loc);
13661 return tem;
13664 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13665 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13666 of GOTO_EXPR. */
13668 static tree
13669 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13671 switch (TREE_CODE (*tp))
13673 case LABEL_EXPR:
13674 return *tp;
13676 case GOTO_EXPR:
13677 *walk_subtrees = 0;
13679 /* ... fall through ... */
13681 default:
13682 return NULL_TREE;
13686 /* Return whether the sub-tree ST contains a label which is accessible from
13687 outside the sub-tree. */
13689 static bool
13690 contains_label_p (tree st)
13692 return
13693 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13696 /* Fold a ternary expression of code CODE and type TYPE with operands
13697 OP0, OP1, and OP2. Return the folded expression if folding is
13698 successful. Otherwise, return NULL_TREE. */
13700 tree
13701 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13702 tree op0, tree op1, tree op2)
13704 tree tem;
13705 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13706 enum tree_code_class kind = TREE_CODE_CLASS (code);
13708 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13709 && TREE_CODE_LENGTH (code) == 3);
13711 /* Strip any conversions that don't change the mode. This is safe
13712 for every expression, except for a comparison expression because
13713 its signedness is derived from its operands. So, in the latter
13714 case, only strip conversions that don't change the signedness.
13716 Note that this is done as an internal manipulation within the
13717 constant folder, in order to find the simplest representation of
13718 the arguments so that their form can be studied. In any cases,
13719 the appropriate type conversions should be put back in the tree
13720 that will get out of the constant folder. */
13721 if (op0)
13723 arg0 = op0;
13724 STRIP_NOPS (arg0);
13727 if (op1)
13729 arg1 = op1;
13730 STRIP_NOPS (arg1);
13733 switch (code)
13735 case COMPONENT_REF:
13736 if (TREE_CODE (arg0) == CONSTRUCTOR
13737 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13739 unsigned HOST_WIDE_INT idx;
13740 tree field, value;
13741 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13742 if (field == arg1)
13743 return value;
13745 return NULL_TREE;
13747 case COND_EXPR:
13748 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13749 so all simple results must be passed through pedantic_non_lvalue. */
13750 if (TREE_CODE (arg0) == INTEGER_CST)
13752 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13753 tem = integer_zerop (arg0) ? op2 : op1;
13754 /* Only optimize constant conditions when the selected branch
13755 has the same type as the COND_EXPR. This avoids optimizing
13756 away "c ? x : throw", where the throw has a void type.
13757 Avoid throwing away that operand which contains label. */
13758 if ((!TREE_SIDE_EFFECTS (unused_op)
13759 || !contains_label_p (unused_op))
13760 && (! VOID_TYPE_P (TREE_TYPE (tem))
13761 || VOID_TYPE_P (type)))
13762 return pedantic_non_lvalue_loc (loc, tem);
13763 return NULL_TREE;
13765 if (operand_equal_p (arg1, op2, 0))
13766 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13768 /* If we have A op B ? A : C, we may be able to convert this to a
13769 simpler expression, depending on the operation and the values
13770 of B and C. Signed zeros prevent all of these transformations,
13771 for reasons given above each one.
13773 Also try swapping the arguments and inverting the conditional. */
13774 if (COMPARISON_CLASS_P (arg0)
13775 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13776 arg1, TREE_OPERAND (arg0, 1))
13777 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13779 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13780 if (tem)
13781 return tem;
13784 if (COMPARISON_CLASS_P (arg0)
13785 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13786 op2,
13787 TREE_OPERAND (arg0, 1))
13788 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13790 tem = fold_truth_not_expr (loc, arg0);
13791 if (tem && COMPARISON_CLASS_P (tem))
13793 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13794 if (tem)
13795 return tem;
13799 /* If the second operand is simpler than the third, swap them
13800 since that produces better jump optimization results. */
13801 if (truth_value_p (TREE_CODE (arg0))
13802 && tree_swap_operands_p (op1, op2, false))
13804 /* See if this can be inverted. If it can't, possibly because
13805 it was a floating-point inequality comparison, don't do
13806 anything. */
13807 tem = fold_truth_not_expr (loc, arg0);
13808 if (tem)
13809 return fold_build3_loc (loc, code, type, tem, op2, op1);
13812 /* Convert A ? 1 : 0 to simply A. */
13813 if (integer_onep (op1)
13814 && integer_zerop (op2)
13815 /* If we try to convert OP0 to our type, the
13816 call to fold will try to move the conversion inside
13817 a COND, which will recurse. In that case, the COND_EXPR
13818 is probably the best choice, so leave it alone. */
13819 && type == TREE_TYPE (arg0))
13820 return pedantic_non_lvalue_loc (loc, arg0);
13822 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13823 over COND_EXPR in cases such as floating point comparisons. */
13824 if (integer_zerop (op1)
13825 && integer_onep (op2)
13826 && truth_value_p (TREE_CODE (arg0)))
13827 return pedantic_non_lvalue_loc (loc,
13828 fold_convert_loc (loc, type,
13829 invert_truthvalue_loc (loc,
13830 arg0)));
13832 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13833 if (TREE_CODE (arg0) == LT_EXPR
13834 && integer_zerop (TREE_OPERAND (arg0, 1))
13835 && integer_zerop (op2)
13836 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13838 /* sign_bit_p only checks ARG1 bits within A's precision.
13839 If <sign bit of A> has wider type than A, bits outside
13840 of A's precision in <sign bit of A> need to be checked.
13841 If they are all 0, this optimization needs to be done
13842 in unsigned A's type, if they are all 1 in signed A's type,
13843 otherwise this can't be done. */
13844 if (TYPE_PRECISION (TREE_TYPE (tem))
13845 < TYPE_PRECISION (TREE_TYPE (arg1))
13846 && TYPE_PRECISION (TREE_TYPE (tem))
13847 < TYPE_PRECISION (type))
13849 unsigned HOST_WIDE_INT mask_lo;
13850 HOST_WIDE_INT mask_hi;
13851 int inner_width, outer_width;
13852 tree tem_type;
13854 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13855 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13856 if (outer_width > TYPE_PRECISION (type))
13857 outer_width = TYPE_PRECISION (type);
13859 if (outer_width > HOST_BITS_PER_WIDE_INT)
13861 mask_hi = ((unsigned HOST_WIDE_INT) -1
13862 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13863 mask_lo = -1;
13865 else
13867 mask_hi = 0;
13868 mask_lo = ((unsigned HOST_WIDE_INT) -1
13869 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13871 if (inner_width > HOST_BITS_PER_WIDE_INT)
13873 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13874 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13875 mask_lo = 0;
13877 else
13878 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13879 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13881 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13882 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13884 tem_type = signed_type_for (TREE_TYPE (tem));
13885 tem = fold_convert_loc (loc, tem_type, tem);
13887 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13888 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13890 tem_type = unsigned_type_for (TREE_TYPE (tem));
13891 tem = fold_convert_loc (loc, tem_type, tem);
13893 else
13894 tem = NULL;
13897 if (tem)
13898 return
13899 fold_convert_loc (loc, type,
13900 fold_build2_loc (loc, BIT_AND_EXPR,
13901 TREE_TYPE (tem), tem,
13902 fold_convert_loc (loc,
13903 TREE_TYPE (tem),
13904 arg1)));
13907 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13908 already handled above. */
13909 if (TREE_CODE (arg0) == BIT_AND_EXPR
13910 && integer_onep (TREE_OPERAND (arg0, 1))
13911 && integer_zerop (op2)
13912 && integer_pow2p (arg1))
13914 tree tem = TREE_OPERAND (arg0, 0);
13915 STRIP_NOPS (tem);
13916 if (TREE_CODE (tem) == RSHIFT_EXPR
13917 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13918 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13919 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13920 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13921 TREE_OPERAND (tem, 0), arg1);
13924 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13925 is probably obsolete because the first operand should be a
13926 truth value (that's why we have the two cases above), but let's
13927 leave it in until we can confirm this for all front-ends. */
13928 if (integer_zerop (op2)
13929 && TREE_CODE (arg0) == NE_EXPR
13930 && integer_zerop (TREE_OPERAND (arg0, 1))
13931 && integer_pow2p (arg1)
13932 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13933 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13934 arg1, OEP_ONLY_CONST))
13935 return pedantic_non_lvalue_loc (loc,
13936 fold_convert_loc (loc, type,
13937 TREE_OPERAND (arg0, 0)));
13939 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13940 if (integer_zerop (op2)
13941 && truth_value_p (TREE_CODE (arg0))
13942 && truth_value_p (TREE_CODE (arg1)))
13943 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13944 fold_convert_loc (loc, type, arg0),
13945 arg1);
13947 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13948 if (integer_onep (op2)
13949 && truth_value_p (TREE_CODE (arg0))
13950 && truth_value_p (TREE_CODE (arg1)))
13952 /* Only perform transformation if ARG0 is easily inverted. */
13953 tem = fold_truth_not_expr (loc, arg0);
13954 if (tem)
13955 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13956 fold_convert_loc (loc, type, tem),
13957 arg1);
13960 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13961 if (integer_zerop (arg1)
13962 && truth_value_p (TREE_CODE (arg0))
13963 && truth_value_p (TREE_CODE (op2)))
13965 /* Only perform transformation if ARG0 is easily inverted. */
13966 tem = fold_truth_not_expr (loc, arg0);
13967 if (tem)
13968 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13969 fold_convert_loc (loc, type, tem),
13970 op2);
13973 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13974 if (integer_onep (arg1)
13975 && truth_value_p (TREE_CODE (arg0))
13976 && truth_value_p (TREE_CODE (op2)))
13977 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13978 fold_convert_loc (loc, type, arg0),
13979 op2);
13981 return NULL_TREE;
13983 case CALL_EXPR:
13984 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13985 of fold_ternary on them. */
13986 gcc_unreachable ();
13988 case BIT_FIELD_REF:
13989 if ((TREE_CODE (arg0) == VECTOR_CST
13990 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13991 && type == TREE_TYPE (TREE_TYPE (arg0)))
13993 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13994 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13996 if (width != 0
13997 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13998 && (idx % width) == 0
13999 && (idx = idx / width)
14000 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14002 tree elements = NULL_TREE;
14004 if (TREE_CODE (arg0) == VECTOR_CST)
14005 elements = TREE_VECTOR_CST_ELTS (arg0);
14006 else
14008 unsigned HOST_WIDE_INT idx;
14009 tree value;
14011 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
14012 elements = tree_cons (NULL_TREE, value, elements);
14014 while (idx-- > 0 && elements)
14015 elements = TREE_CHAIN (elements);
14016 if (elements)
14017 return TREE_VALUE (elements);
14018 else
14019 return fold_convert_loc (loc, type, integer_zero_node);
14023 /* A bit-field-ref that referenced the full argument can be stripped. */
14024 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14025 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14026 && integer_zerop (op2))
14027 return fold_convert_loc (loc, type, arg0);
14029 return NULL_TREE;
14031 default:
14032 return NULL_TREE;
14033 } /* switch (code) */
14036 /* Perform constant folding and related simplification of EXPR.
14037 The related simplifications include x*1 => x, x*0 => 0, etc.,
14038 and application of the associative law.
14039 NOP_EXPR conversions may be removed freely (as long as we
14040 are careful not to change the type of the overall expression).
14041 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14042 but we can constant-fold them if they have constant operands. */
14044 #ifdef ENABLE_FOLD_CHECKING
14045 # define fold(x) fold_1 (x)
14046 static tree fold_1 (tree);
14047 static
14048 #endif
14049 tree
14050 fold (tree expr)
14052 const tree t = expr;
14053 enum tree_code code = TREE_CODE (t);
14054 enum tree_code_class kind = TREE_CODE_CLASS (code);
14055 tree tem;
14056 location_t loc = EXPR_LOCATION (expr);
14058 /* Return right away if a constant. */
14059 if (kind == tcc_constant)
14060 return t;
14062 /* CALL_EXPR-like objects with variable numbers of operands are
14063 treated specially. */
14064 if (kind == tcc_vl_exp)
14066 if (code == CALL_EXPR)
14068 tem = fold_call_expr (loc, expr, false);
14069 return tem ? tem : expr;
14071 return expr;
14074 if (IS_EXPR_CODE_CLASS (kind))
14076 tree type = TREE_TYPE (t);
14077 tree op0, op1, op2;
14079 switch (TREE_CODE_LENGTH (code))
14081 case 1:
14082 op0 = TREE_OPERAND (t, 0);
14083 tem = fold_unary_loc (loc, code, type, op0);
14084 return tem ? tem : expr;
14085 case 2:
14086 op0 = TREE_OPERAND (t, 0);
14087 op1 = TREE_OPERAND (t, 1);
14088 tem = fold_binary_loc (loc, code, type, op0, op1);
14089 return tem ? tem : expr;
14090 case 3:
14091 op0 = TREE_OPERAND (t, 0);
14092 op1 = TREE_OPERAND (t, 1);
14093 op2 = TREE_OPERAND (t, 2);
14094 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14095 return tem ? tem : expr;
14096 default:
14097 break;
14101 switch (code)
14103 case ARRAY_REF:
14105 tree op0 = TREE_OPERAND (t, 0);
14106 tree op1 = TREE_OPERAND (t, 1);
14108 if (TREE_CODE (op1) == INTEGER_CST
14109 && TREE_CODE (op0) == CONSTRUCTOR
14110 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14112 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
14113 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
14114 unsigned HOST_WIDE_INT begin = 0;
14116 /* Find a matching index by means of a binary search. */
14117 while (begin != end)
14119 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14120 tree index = VEC_index (constructor_elt, elts, middle)->index;
14122 if (TREE_CODE (index) == INTEGER_CST
14123 && tree_int_cst_lt (index, op1))
14124 begin = middle + 1;
14125 else if (TREE_CODE (index) == INTEGER_CST
14126 && tree_int_cst_lt (op1, index))
14127 end = middle;
14128 else if (TREE_CODE (index) == RANGE_EXPR
14129 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14130 begin = middle + 1;
14131 else if (TREE_CODE (index) == RANGE_EXPR
14132 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14133 end = middle;
14134 else
14135 return VEC_index (constructor_elt, elts, middle)->value;
14139 return t;
14142 case CONST_DECL:
14143 return fold (DECL_INITIAL (t));
14145 default:
14146 return t;
14147 } /* switch (code) */
14150 #ifdef ENABLE_FOLD_CHECKING
14151 #undef fold
14153 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
14154 static void fold_check_failed (const_tree, const_tree);
14155 void print_fold_checksum (const_tree);
14157 /* When --enable-checking=fold, compute a digest of expr before
14158 and after actual fold call to see if fold did not accidentally
14159 change original expr. */
14161 tree
14162 fold (tree expr)
14164 tree ret;
14165 struct md5_ctx ctx;
14166 unsigned char checksum_before[16], checksum_after[16];
14167 htab_t ht;
14169 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14170 md5_init_ctx (&ctx);
14171 fold_checksum_tree (expr, &ctx, ht);
14172 md5_finish_ctx (&ctx, checksum_before);
14173 htab_empty (ht);
14175 ret = fold_1 (expr);
14177 md5_init_ctx (&ctx);
14178 fold_checksum_tree (expr, &ctx, ht);
14179 md5_finish_ctx (&ctx, checksum_after);
14180 htab_delete (ht);
14182 if (memcmp (checksum_before, checksum_after, 16))
14183 fold_check_failed (expr, ret);
14185 return ret;
14188 void
14189 print_fold_checksum (const_tree expr)
14191 struct md5_ctx ctx;
14192 unsigned char checksum[16], cnt;
14193 htab_t ht;
14195 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14196 md5_init_ctx (&ctx);
14197 fold_checksum_tree (expr, &ctx, ht);
14198 md5_finish_ctx (&ctx, checksum);
14199 htab_delete (ht);
14200 for (cnt = 0; cnt < 16; ++cnt)
14201 fprintf (stderr, "%02x", checksum[cnt]);
14202 putc ('\n', stderr);
14205 static void
14206 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14208 internal_error ("fold check: original tree changed by fold");
14211 static void
14212 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
14214 const void **slot;
14215 enum tree_code code;
14216 union tree_node buf;
14217 int i, len;
14219 recursive_label:
14221 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
14222 <= sizeof (struct tree_function_decl))
14223 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
14224 if (expr == NULL)
14225 return;
14226 slot = (const void **) htab_find_slot (ht, expr, INSERT);
14227 if (*slot != NULL)
14228 return;
14229 *slot = expr;
14230 code = TREE_CODE (expr);
14231 if (TREE_CODE_CLASS (code) == tcc_declaration
14232 && DECL_ASSEMBLER_NAME_SET_P (expr))
14234 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14235 memcpy ((char *) &buf, expr, tree_size (expr));
14236 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14237 expr = (tree) &buf;
14239 else if (TREE_CODE_CLASS (code) == tcc_type
14240 && (TYPE_POINTER_TO (expr)
14241 || TYPE_REFERENCE_TO (expr)
14242 || TYPE_CACHED_VALUES_P (expr)
14243 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14244 || TYPE_NEXT_VARIANT (expr)))
14246 /* Allow these fields to be modified. */
14247 tree tmp;
14248 memcpy ((char *) &buf, expr, tree_size (expr));
14249 expr = tmp = (tree) &buf;
14250 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14251 TYPE_POINTER_TO (tmp) = NULL;
14252 TYPE_REFERENCE_TO (tmp) = NULL;
14253 TYPE_NEXT_VARIANT (tmp) = NULL;
14254 if (TYPE_CACHED_VALUES_P (tmp))
14256 TYPE_CACHED_VALUES_P (tmp) = 0;
14257 TYPE_CACHED_VALUES (tmp) = NULL;
14260 md5_process_bytes (expr, tree_size (expr), ctx);
14261 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14262 if (TREE_CODE_CLASS (code) != tcc_type
14263 && TREE_CODE_CLASS (code) != tcc_declaration
14264 && code != TREE_LIST
14265 && code != SSA_NAME)
14266 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14267 switch (TREE_CODE_CLASS (code))
14269 case tcc_constant:
14270 switch (code)
14272 case STRING_CST:
14273 md5_process_bytes (TREE_STRING_POINTER (expr),
14274 TREE_STRING_LENGTH (expr), ctx);
14275 break;
14276 case COMPLEX_CST:
14277 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14278 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14279 break;
14280 case VECTOR_CST:
14281 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
14282 break;
14283 default:
14284 break;
14286 break;
14287 case tcc_exceptional:
14288 switch (code)
14290 case TREE_LIST:
14291 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14292 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14293 expr = TREE_CHAIN (expr);
14294 goto recursive_label;
14295 break;
14296 case TREE_VEC:
14297 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14298 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14299 break;
14300 default:
14301 break;
14303 break;
14304 case tcc_expression:
14305 case tcc_reference:
14306 case tcc_comparison:
14307 case tcc_unary:
14308 case tcc_binary:
14309 case tcc_statement:
14310 case tcc_vl_exp:
14311 len = TREE_OPERAND_LENGTH (expr);
14312 for (i = 0; i < len; ++i)
14313 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14314 break;
14315 case tcc_declaration:
14316 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14317 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14318 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14320 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14321 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14322 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14323 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14324 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14326 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14327 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14329 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14331 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14332 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14333 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14335 break;
14336 case tcc_type:
14337 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14338 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14339 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14340 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14341 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14342 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14343 if (INTEGRAL_TYPE_P (expr)
14344 || SCALAR_FLOAT_TYPE_P (expr))
14346 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14347 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14349 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14350 if (TREE_CODE (expr) == RECORD_TYPE
14351 || TREE_CODE (expr) == UNION_TYPE
14352 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14353 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14354 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14355 break;
14356 default:
14357 break;
14361 /* Helper function for outputting the checksum of a tree T. When
14362 debugging with gdb, you can "define mynext" to be "next" followed
14363 by "call debug_fold_checksum (op0)", then just trace down till the
14364 outputs differ. */
14366 void
14367 debug_fold_checksum (const_tree t)
14369 int i;
14370 unsigned char checksum[16];
14371 struct md5_ctx ctx;
14372 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14374 md5_init_ctx (&ctx);
14375 fold_checksum_tree (t, &ctx, ht);
14376 md5_finish_ctx (&ctx, checksum);
14377 htab_empty (ht);
14379 for (i = 0; i < 16; i++)
14380 fprintf (stderr, "%d ", checksum[i]);
14382 fprintf (stderr, "\n");
14385 #endif
14387 /* Fold a unary tree expression with code CODE of type TYPE with an
14388 operand OP0. LOC is the location of the resulting expression.
14389 Return a folded expression if successful. Otherwise, return a tree
14390 expression with code CODE of type TYPE with an operand OP0. */
14392 tree
14393 fold_build1_stat_loc (location_t loc,
14394 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14396 tree tem;
14397 #ifdef ENABLE_FOLD_CHECKING
14398 unsigned char checksum_before[16], checksum_after[16];
14399 struct md5_ctx ctx;
14400 htab_t ht;
14402 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14403 md5_init_ctx (&ctx);
14404 fold_checksum_tree (op0, &ctx, ht);
14405 md5_finish_ctx (&ctx, checksum_before);
14406 htab_empty (ht);
14407 #endif
14409 tem = fold_unary_loc (loc, code, type, op0);
14410 if (!tem)
14412 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
14413 SET_EXPR_LOCATION (tem, loc);
14416 #ifdef ENABLE_FOLD_CHECKING
14417 md5_init_ctx (&ctx);
14418 fold_checksum_tree (op0, &ctx, ht);
14419 md5_finish_ctx (&ctx, checksum_after);
14420 htab_delete (ht);
14422 if (memcmp (checksum_before, checksum_after, 16))
14423 fold_check_failed (op0, tem);
14424 #endif
14425 return tem;
14428 /* Fold a binary tree expression with code CODE of type TYPE with
14429 operands OP0 and OP1. LOC is the location of the resulting
14430 expression. Return a folded expression if successful. Otherwise,
14431 return a tree expression with code CODE of type TYPE with operands
14432 OP0 and OP1. */
14434 tree
14435 fold_build2_stat_loc (location_t loc,
14436 enum tree_code code, tree type, tree op0, tree op1
14437 MEM_STAT_DECL)
14439 tree tem;
14440 #ifdef ENABLE_FOLD_CHECKING
14441 unsigned char checksum_before_op0[16],
14442 checksum_before_op1[16],
14443 checksum_after_op0[16],
14444 checksum_after_op1[16];
14445 struct md5_ctx ctx;
14446 htab_t ht;
14448 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14449 md5_init_ctx (&ctx);
14450 fold_checksum_tree (op0, &ctx, ht);
14451 md5_finish_ctx (&ctx, checksum_before_op0);
14452 htab_empty (ht);
14454 md5_init_ctx (&ctx);
14455 fold_checksum_tree (op1, &ctx, ht);
14456 md5_finish_ctx (&ctx, checksum_before_op1);
14457 htab_empty (ht);
14458 #endif
14460 tem = fold_binary_loc (loc, code, type, op0, op1);
14461 if (!tem)
14463 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
14464 SET_EXPR_LOCATION (tem, loc);
14467 #ifdef ENABLE_FOLD_CHECKING
14468 md5_init_ctx (&ctx);
14469 fold_checksum_tree (op0, &ctx, ht);
14470 md5_finish_ctx (&ctx, checksum_after_op0);
14471 htab_empty (ht);
14473 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14474 fold_check_failed (op0, tem);
14476 md5_init_ctx (&ctx);
14477 fold_checksum_tree (op1, &ctx, ht);
14478 md5_finish_ctx (&ctx, checksum_after_op1);
14479 htab_delete (ht);
14481 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14482 fold_check_failed (op1, tem);
14483 #endif
14484 return tem;
14487 /* Fold a ternary tree expression with code CODE of type TYPE with
14488 operands OP0, OP1, and OP2. Return a folded expression if
14489 successful. Otherwise, return a tree expression with code CODE of
14490 type TYPE with operands OP0, OP1, and OP2. */
14492 tree
14493 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14494 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14496 tree tem;
14497 #ifdef ENABLE_FOLD_CHECKING
14498 unsigned char checksum_before_op0[16],
14499 checksum_before_op1[16],
14500 checksum_before_op2[16],
14501 checksum_after_op0[16],
14502 checksum_after_op1[16],
14503 checksum_after_op2[16];
14504 struct md5_ctx ctx;
14505 htab_t ht;
14507 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14508 md5_init_ctx (&ctx);
14509 fold_checksum_tree (op0, &ctx, ht);
14510 md5_finish_ctx (&ctx, checksum_before_op0);
14511 htab_empty (ht);
14513 md5_init_ctx (&ctx);
14514 fold_checksum_tree (op1, &ctx, ht);
14515 md5_finish_ctx (&ctx, checksum_before_op1);
14516 htab_empty (ht);
14518 md5_init_ctx (&ctx);
14519 fold_checksum_tree (op2, &ctx, ht);
14520 md5_finish_ctx (&ctx, checksum_before_op2);
14521 htab_empty (ht);
14522 #endif
14524 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14525 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14526 if (!tem)
14528 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14529 SET_EXPR_LOCATION (tem, loc);
14532 #ifdef ENABLE_FOLD_CHECKING
14533 md5_init_ctx (&ctx);
14534 fold_checksum_tree (op0, &ctx, ht);
14535 md5_finish_ctx (&ctx, checksum_after_op0);
14536 htab_empty (ht);
14538 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14539 fold_check_failed (op0, tem);
14541 md5_init_ctx (&ctx);
14542 fold_checksum_tree (op1, &ctx, ht);
14543 md5_finish_ctx (&ctx, checksum_after_op1);
14544 htab_empty (ht);
14546 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14547 fold_check_failed (op1, tem);
14549 md5_init_ctx (&ctx);
14550 fold_checksum_tree (op2, &ctx, ht);
14551 md5_finish_ctx (&ctx, checksum_after_op2);
14552 htab_delete (ht);
14554 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14555 fold_check_failed (op2, tem);
14556 #endif
14557 return tem;
14560 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14561 arguments in ARGARRAY, and a null static chain.
14562 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14563 of type TYPE from the given operands as constructed by build_call_array. */
14565 tree
14566 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14567 int nargs, tree *argarray)
14569 tree tem;
14570 #ifdef ENABLE_FOLD_CHECKING
14571 unsigned char checksum_before_fn[16],
14572 checksum_before_arglist[16],
14573 checksum_after_fn[16],
14574 checksum_after_arglist[16];
14575 struct md5_ctx ctx;
14576 htab_t ht;
14577 int i;
14579 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14580 md5_init_ctx (&ctx);
14581 fold_checksum_tree (fn, &ctx, ht);
14582 md5_finish_ctx (&ctx, checksum_before_fn);
14583 htab_empty (ht);
14585 md5_init_ctx (&ctx);
14586 for (i = 0; i < nargs; i++)
14587 fold_checksum_tree (argarray[i], &ctx, ht);
14588 md5_finish_ctx (&ctx, checksum_before_arglist);
14589 htab_empty (ht);
14590 #endif
14592 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14594 #ifdef ENABLE_FOLD_CHECKING
14595 md5_init_ctx (&ctx);
14596 fold_checksum_tree (fn, &ctx, ht);
14597 md5_finish_ctx (&ctx, checksum_after_fn);
14598 htab_empty (ht);
14600 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14601 fold_check_failed (fn, tem);
14603 md5_init_ctx (&ctx);
14604 for (i = 0; i < nargs; i++)
14605 fold_checksum_tree (argarray[i], &ctx, ht);
14606 md5_finish_ctx (&ctx, checksum_after_arglist);
14607 htab_delete (ht);
14609 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14610 fold_check_failed (NULL_TREE, tem);
14611 #endif
14612 return tem;
14615 /* Perform constant folding and related simplification of initializer
14616 expression EXPR. These behave identically to "fold_buildN" but ignore
14617 potential run-time traps and exceptions that fold must preserve. */
14619 #define START_FOLD_INIT \
14620 int saved_signaling_nans = flag_signaling_nans;\
14621 int saved_trapping_math = flag_trapping_math;\
14622 int saved_rounding_math = flag_rounding_math;\
14623 int saved_trapv = flag_trapv;\
14624 int saved_folding_initializer = folding_initializer;\
14625 flag_signaling_nans = 0;\
14626 flag_trapping_math = 0;\
14627 flag_rounding_math = 0;\
14628 flag_trapv = 0;\
14629 folding_initializer = 1;
14631 #define END_FOLD_INIT \
14632 flag_signaling_nans = saved_signaling_nans;\
14633 flag_trapping_math = saved_trapping_math;\
14634 flag_rounding_math = saved_rounding_math;\
14635 flag_trapv = saved_trapv;\
14636 folding_initializer = saved_folding_initializer;
14638 tree
14639 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14640 tree type, tree op)
14642 tree result;
14643 START_FOLD_INIT;
14645 result = fold_build1_loc (loc, code, type, op);
14647 END_FOLD_INIT;
14648 return result;
14651 tree
14652 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14653 tree type, tree op0, tree op1)
14655 tree result;
14656 START_FOLD_INIT;
14658 result = fold_build2_loc (loc, code, type, op0, op1);
14660 END_FOLD_INIT;
14661 return result;
14664 tree
14665 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14666 tree type, tree op0, tree op1, tree op2)
14668 tree result;
14669 START_FOLD_INIT;
14671 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14673 END_FOLD_INIT;
14674 return result;
14677 tree
14678 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14679 int nargs, tree *argarray)
14681 tree result;
14682 START_FOLD_INIT;
14684 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14686 END_FOLD_INIT;
14687 return result;
14690 #undef START_FOLD_INIT
14691 #undef END_FOLD_INIT
14693 /* Determine if first argument is a multiple of second argument. Return 0 if
14694 it is not, or we cannot easily determined it to be.
14696 An example of the sort of thing we care about (at this point; this routine
14697 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14698 fold cases do now) is discovering that
14700 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14702 is a multiple of
14704 SAVE_EXPR (J * 8)
14706 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14708 This code also handles discovering that
14710 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14712 is a multiple of 8 so we don't have to worry about dealing with a
14713 possible remainder.
14715 Note that we *look* inside a SAVE_EXPR only to determine how it was
14716 calculated; it is not safe for fold to do much of anything else with the
14717 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14718 at run time. For example, the latter example above *cannot* be implemented
14719 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14720 evaluation time of the original SAVE_EXPR is not necessarily the same at
14721 the time the new expression is evaluated. The only optimization of this
14722 sort that would be valid is changing
14724 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14726 divided by 8 to
14728 SAVE_EXPR (I) * SAVE_EXPR (J)
14730 (where the same SAVE_EXPR (J) is used in the original and the
14731 transformed version). */
14734 multiple_of_p (tree type, const_tree top, const_tree bottom)
14736 if (operand_equal_p (top, bottom, 0))
14737 return 1;
14739 if (TREE_CODE (type) != INTEGER_TYPE)
14740 return 0;
14742 switch (TREE_CODE (top))
14744 case BIT_AND_EXPR:
14745 /* Bitwise and provides a power of two multiple. If the mask is
14746 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14747 if (!integer_pow2p (bottom))
14748 return 0;
14749 /* FALLTHRU */
14751 case MULT_EXPR:
14752 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14753 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14755 case PLUS_EXPR:
14756 case MINUS_EXPR:
14757 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14758 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14760 case LSHIFT_EXPR:
14761 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14763 tree op1, t1;
14765 op1 = TREE_OPERAND (top, 1);
14766 /* const_binop may not detect overflow correctly,
14767 so check for it explicitly here. */
14768 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14769 > TREE_INT_CST_LOW (op1)
14770 && TREE_INT_CST_HIGH (op1) == 0
14771 && 0 != (t1 = fold_convert (type,
14772 const_binop (LSHIFT_EXPR,
14773 size_one_node,
14774 op1, 0)))
14775 && !TREE_OVERFLOW (t1))
14776 return multiple_of_p (type, t1, bottom);
14778 return 0;
14780 case NOP_EXPR:
14781 /* Can't handle conversions from non-integral or wider integral type. */
14782 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14783 || (TYPE_PRECISION (type)
14784 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14785 return 0;
14787 /* .. fall through ... */
14789 case SAVE_EXPR:
14790 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14792 case INTEGER_CST:
14793 if (TREE_CODE (bottom) != INTEGER_CST
14794 || integer_zerop (bottom)
14795 || (TYPE_UNSIGNED (type)
14796 && (tree_int_cst_sgn (top) < 0
14797 || tree_int_cst_sgn (bottom) < 0)))
14798 return 0;
14799 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14800 top, bottom, 0));
14802 default:
14803 return 0;
14807 /* Return true if CODE or TYPE is known to be non-negative. */
14809 static bool
14810 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14812 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14813 && truth_value_p (code))
14814 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14815 have a signed:1 type (where the value is -1 and 0). */
14816 return true;
14817 return false;
14820 /* Return true if (CODE OP0) is known to be non-negative. If the return
14821 value is based on the assumption that signed overflow is undefined,
14822 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14823 *STRICT_OVERFLOW_P. */
14825 bool
14826 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14827 bool *strict_overflow_p)
14829 if (TYPE_UNSIGNED (type))
14830 return true;
14832 switch (code)
14834 case ABS_EXPR:
14835 /* We can't return 1 if flag_wrapv is set because
14836 ABS_EXPR<INT_MIN> = INT_MIN. */
14837 if (!INTEGRAL_TYPE_P (type))
14838 return true;
14839 if (TYPE_OVERFLOW_UNDEFINED (type))
14841 *strict_overflow_p = true;
14842 return true;
14844 break;
14846 case NON_LVALUE_EXPR:
14847 case FLOAT_EXPR:
14848 case FIX_TRUNC_EXPR:
14849 return tree_expr_nonnegative_warnv_p (op0,
14850 strict_overflow_p);
14852 case NOP_EXPR:
14854 tree inner_type = TREE_TYPE (op0);
14855 tree outer_type = type;
14857 if (TREE_CODE (outer_type) == REAL_TYPE)
14859 if (TREE_CODE (inner_type) == REAL_TYPE)
14860 return tree_expr_nonnegative_warnv_p (op0,
14861 strict_overflow_p);
14862 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14864 if (TYPE_UNSIGNED (inner_type))
14865 return true;
14866 return tree_expr_nonnegative_warnv_p (op0,
14867 strict_overflow_p);
14870 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14872 if (TREE_CODE (inner_type) == REAL_TYPE)
14873 return tree_expr_nonnegative_warnv_p (op0,
14874 strict_overflow_p);
14875 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14876 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14877 && TYPE_UNSIGNED (inner_type);
14880 break;
14882 default:
14883 return tree_simple_nonnegative_warnv_p (code, type);
14886 /* We don't know sign of `t', so be conservative and return false. */
14887 return false;
14890 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14891 value is based on the assumption that signed overflow is undefined,
14892 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14893 *STRICT_OVERFLOW_P. */
14895 bool
14896 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14897 tree op1, bool *strict_overflow_p)
14899 if (TYPE_UNSIGNED (type))
14900 return true;
14902 switch (code)
14904 case POINTER_PLUS_EXPR:
14905 case PLUS_EXPR:
14906 if (FLOAT_TYPE_P (type))
14907 return (tree_expr_nonnegative_warnv_p (op0,
14908 strict_overflow_p)
14909 && tree_expr_nonnegative_warnv_p (op1,
14910 strict_overflow_p));
14912 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14913 both unsigned and at least 2 bits shorter than the result. */
14914 if (TREE_CODE (type) == INTEGER_TYPE
14915 && TREE_CODE (op0) == NOP_EXPR
14916 && TREE_CODE (op1) == NOP_EXPR)
14918 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14919 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14920 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14921 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14923 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14924 TYPE_PRECISION (inner2)) + 1;
14925 return prec < TYPE_PRECISION (type);
14928 break;
14930 case MULT_EXPR:
14931 if (FLOAT_TYPE_P (type))
14933 /* x * x for floating point x is always non-negative. */
14934 if (operand_equal_p (op0, op1, 0))
14935 return true;
14936 return (tree_expr_nonnegative_warnv_p (op0,
14937 strict_overflow_p)
14938 && tree_expr_nonnegative_warnv_p (op1,
14939 strict_overflow_p));
14942 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14943 both unsigned and their total bits is shorter than the result. */
14944 if (TREE_CODE (type) == INTEGER_TYPE
14945 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14946 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14948 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14949 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14950 : TREE_TYPE (op0);
14951 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14952 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14953 : TREE_TYPE (op1);
14955 bool unsigned0 = TYPE_UNSIGNED (inner0);
14956 bool unsigned1 = TYPE_UNSIGNED (inner1);
14958 if (TREE_CODE (op0) == INTEGER_CST)
14959 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14961 if (TREE_CODE (op1) == INTEGER_CST)
14962 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14964 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14965 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14967 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14968 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14969 : TYPE_PRECISION (inner0);
14971 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14972 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14973 : TYPE_PRECISION (inner1);
14975 return precision0 + precision1 < TYPE_PRECISION (type);
14978 return false;
14980 case BIT_AND_EXPR:
14981 case MAX_EXPR:
14982 return (tree_expr_nonnegative_warnv_p (op0,
14983 strict_overflow_p)
14984 || tree_expr_nonnegative_warnv_p (op1,
14985 strict_overflow_p));
14987 case BIT_IOR_EXPR:
14988 case BIT_XOR_EXPR:
14989 case MIN_EXPR:
14990 case RDIV_EXPR:
14991 case TRUNC_DIV_EXPR:
14992 case CEIL_DIV_EXPR:
14993 case FLOOR_DIV_EXPR:
14994 case ROUND_DIV_EXPR:
14995 return (tree_expr_nonnegative_warnv_p (op0,
14996 strict_overflow_p)
14997 && tree_expr_nonnegative_warnv_p (op1,
14998 strict_overflow_p));
15000 case TRUNC_MOD_EXPR:
15001 case CEIL_MOD_EXPR:
15002 case FLOOR_MOD_EXPR:
15003 case ROUND_MOD_EXPR:
15004 return tree_expr_nonnegative_warnv_p (op0,
15005 strict_overflow_p);
15006 default:
15007 return tree_simple_nonnegative_warnv_p (code, type);
15010 /* We don't know sign of `t', so be conservative and return false. */
15011 return false;
15014 /* Return true if T is known to be non-negative. If the return
15015 value is based on the assumption that signed overflow is undefined,
15016 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15017 *STRICT_OVERFLOW_P. */
15019 bool
15020 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15022 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15023 return true;
15025 switch (TREE_CODE (t))
15027 case INTEGER_CST:
15028 return tree_int_cst_sgn (t) >= 0;
15030 case REAL_CST:
15031 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15033 case FIXED_CST:
15034 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15036 case COND_EXPR:
15037 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15038 strict_overflow_p)
15039 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15040 strict_overflow_p));
15041 default:
15042 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15043 TREE_TYPE (t));
15045 /* We don't know sign of `t', so be conservative and return false. */
15046 return false;
15049 /* Return true if T is known to be non-negative. If the return
15050 value is based on the assumption that signed overflow is undefined,
15051 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15052 *STRICT_OVERFLOW_P. */
15054 bool
15055 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15056 tree arg0, tree arg1, bool *strict_overflow_p)
15058 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15059 switch (DECL_FUNCTION_CODE (fndecl))
15061 CASE_FLT_FN (BUILT_IN_ACOS):
15062 CASE_FLT_FN (BUILT_IN_ACOSH):
15063 CASE_FLT_FN (BUILT_IN_CABS):
15064 CASE_FLT_FN (BUILT_IN_COSH):
15065 CASE_FLT_FN (BUILT_IN_ERFC):
15066 CASE_FLT_FN (BUILT_IN_EXP):
15067 CASE_FLT_FN (BUILT_IN_EXP10):
15068 CASE_FLT_FN (BUILT_IN_EXP2):
15069 CASE_FLT_FN (BUILT_IN_FABS):
15070 CASE_FLT_FN (BUILT_IN_FDIM):
15071 CASE_FLT_FN (BUILT_IN_HYPOT):
15072 CASE_FLT_FN (BUILT_IN_POW10):
15073 CASE_INT_FN (BUILT_IN_FFS):
15074 CASE_INT_FN (BUILT_IN_PARITY):
15075 CASE_INT_FN (BUILT_IN_POPCOUNT):
15076 case BUILT_IN_BSWAP32:
15077 case BUILT_IN_BSWAP64:
15078 /* Always true. */
15079 return true;
15081 CASE_FLT_FN (BUILT_IN_SQRT):
15082 /* sqrt(-0.0) is -0.0. */
15083 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15084 return true;
15085 return tree_expr_nonnegative_warnv_p (arg0,
15086 strict_overflow_p);
15088 CASE_FLT_FN (BUILT_IN_ASINH):
15089 CASE_FLT_FN (BUILT_IN_ATAN):
15090 CASE_FLT_FN (BUILT_IN_ATANH):
15091 CASE_FLT_FN (BUILT_IN_CBRT):
15092 CASE_FLT_FN (BUILT_IN_CEIL):
15093 CASE_FLT_FN (BUILT_IN_ERF):
15094 CASE_FLT_FN (BUILT_IN_EXPM1):
15095 CASE_FLT_FN (BUILT_IN_FLOOR):
15096 CASE_FLT_FN (BUILT_IN_FMOD):
15097 CASE_FLT_FN (BUILT_IN_FREXP):
15098 CASE_FLT_FN (BUILT_IN_LCEIL):
15099 CASE_FLT_FN (BUILT_IN_LDEXP):
15100 CASE_FLT_FN (BUILT_IN_LFLOOR):
15101 CASE_FLT_FN (BUILT_IN_LLCEIL):
15102 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15103 CASE_FLT_FN (BUILT_IN_LLRINT):
15104 CASE_FLT_FN (BUILT_IN_LLROUND):
15105 CASE_FLT_FN (BUILT_IN_LRINT):
15106 CASE_FLT_FN (BUILT_IN_LROUND):
15107 CASE_FLT_FN (BUILT_IN_MODF):
15108 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15109 CASE_FLT_FN (BUILT_IN_RINT):
15110 CASE_FLT_FN (BUILT_IN_ROUND):
15111 CASE_FLT_FN (BUILT_IN_SCALB):
15112 CASE_FLT_FN (BUILT_IN_SCALBLN):
15113 CASE_FLT_FN (BUILT_IN_SCALBN):
15114 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15115 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15116 CASE_FLT_FN (BUILT_IN_SINH):
15117 CASE_FLT_FN (BUILT_IN_TANH):
15118 CASE_FLT_FN (BUILT_IN_TRUNC):
15119 /* True if the 1st argument is nonnegative. */
15120 return tree_expr_nonnegative_warnv_p (arg0,
15121 strict_overflow_p);
15123 CASE_FLT_FN (BUILT_IN_FMAX):
15124 /* True if the 1st OR 2nd arguments are nonnegative. */
15125 return (tree_expr_nonnegative_warnv_p (arg0,
15126 strict_overflow_p)
15127 || (tree_expr_nonnegative_warnv_p (arg1,
15128 strict_overflow_p)));
15130 CASE_FLT_FN (BUILT_IN_FMIN):
15131 /* True if the 1st AND 2nd arguments are nonnegative. */
15132 return (tree_expr_nonnegative_warnv_p (arg0,
15133 strict_overflow_p)
15134 && (tree_expr_nonnegative_warnv_p (arg1,
15135 strict_overflow_p)));
15137 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15138 /* True if the 2nd argument is nonnegative. */
15139 return tree_expr_nonnegative_warnv_p (arg1,
15140 strict_overflow_p);
15142 CASE_FLT_FN (BUILT_IN_POWI):
15143 /* True if the 1st argument is nonnegative or the second
15144 argument is an even integer. */
15145 if (TREE_CODE (arg1) == INTEGER_CST
15146 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15147 return true;
15148 return tree_expr_nonnegative_warnv_p (arg0,
15149 strict_overflow_p);
15151 CASE_FLT_FN (BUILT_IN_POW):
15152 /* True if the 1st argument is nonnegative or the second
15153 argument is an even integer valued real. */
15154 if (TREE_CODE (arg1) == REAL_CST)
15156 REAL_VALUE_TYPE c;
15157 HOST_WIDE_INT n;
15159 c = TREE_REAL_CST (arg1);
15160 n = real_to_integer (&c);
15161 if ((n & 1) == 0)
15163 REAL_VALUE_TYPE cint;
15164 real_from_integer (&cint, VOIDmode, n,
15165 n < 0 ? -1 : 0, 0);
15166 if (real_identical (&c, &cint))
15167 return true;
15170 return tree_expr_nonnegative_warnv_p (arg0,
15171 strict_overflow_p);
15173 default:
15174 break;
15176 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15177 type);
15180 /* Return true if T is known to be non-negative. If the return
15181 value is based on the assumption that signed overflow is undefined,
15182 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15183 *STRICT_OVERFLOW_P. */
15185 bool
15186 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15188 enum tree_code code = TREE_CODE (t);
15189 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15190 return true;
15192 switch (code)
15194 case TARGET_EXPR:
15196 tree temp = TARGET_EXPR_SLOT (t);
15197 t = TARGET_EXPR_INITIAL (t);
15199 /* If the initializer is non-void, then it's a normal expression
15200 that will be assigned to the slot. */
15201 if (!VOID_TYPE_P (t))
15202 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15204 /* Otherwise, the initializer sets the slot in some way. One common
15205 way is an assignment statement at the end of the initializer. */
15206 while (1)
15208 if (TREE_CODE (t) == BIND_EXPR)
15209 t = expr_last (BIND_EXPR_BODY (t));
15210 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15211 || TREE_CODE (t) == TRY_CATCH_EXPR)
15212 t = expr_last (TREE_OPERAND (t, 0));
15213 else if (TREE_CODE (t) == STATEMENT_LIST)
15214 t = expr_last (t);
15215 else
15216 break;
15218 if (TREE_CODE (t) == MODIFY_EXPR
15219 && TREE_OPERAND (t, 0) == temp)
15220 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15221 strict_overflow_p);
15223 return false;
15226 case CALL_EXPR:
15228 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15229 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15231 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15232 get_callee_fndecl (t),
15233 arg0,
15234 arg1,
15235 strict_overflow_p);
15237 case COMPOUND_EXPR:
15238 case MODIFY_EXPR:
15239 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15240 strict_overflow_p);
15241 case BIND_EXPR:
15242 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15243 strict_overflow_p);
15244 case SAVE_EXPR:
15245 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15246 strict_overflow_p);
15248 default:
15249 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15250 TREE_TYPE (t));
15253 /* We don't know sign of `t', so be conservative and return false. */
15254 return false;
15257 /* Return true if T is known to be non-negative. If the return
15258 value is based on the assumption that signed overflow is undefined,
15259 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15260 *STRICT_OVERFLOW_P. */
15262 bool
15263 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15265 enum tree_code code;
15266 if (t == error_mark_node)
15267 return false;
15269 code = TREE_CODE (t);
15270 switch (TREE_CODE_CLASS (code))
15272 case tcc_binary:
15273 case tcc_comparison:
15274 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15275 TREE_TYPE (t),
15276 TREE_OPERAND (t, 0),
15277 TREE_OPERAND (t, 1),
15278 strict_overflow_p);
15280 case tcc_unary:
15281 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15282 TREE_TYPE (t),
15283 TREE_OPERAND (t, 0),
15284 strict_overflow_p);
15286 case tcc_constant:
15287 case tcc_declaration:
15288 case tcc_reference:
15289 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15291 default:
15292 break;
15295 switch (code)
15297 case TRUTH_AND_EXPR:
15298 case TRUTH_OR_EXPR:
15299 case TRUTH_XOR_EXPR:
15300 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15301 TREE_TYPE (t),
15302 TREE_OPERAND (t, 0),
15303 TREE_OPERAND (t, 1),
15304 strict_overflow_p);
15305 case TRUTH_NOT_EXPR:
15306 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15307 TREE_TYPE (t),
15308 TREE_OPERAND (t, 0),
15309 strict_overflow_p);
15311 case COND_EXPR:
15312 case CONSTRUCTOR:
15313 case OBJ_TYPE_REF:
15314 case ASSERT_EXPR:
15315 case ADDR_EXPR:
15316 case WITH_SIZE_EXPR:
15317 case SSA_NAME:
15318 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15320 default:
15321 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15325 /* Return true if `t' is known to be non-negative. Handle warnings
15326 about undefined signed overflow. */
15328 bool
15329 tree_expr_nonnegative_p (tree t)
15331 bool ret, strict_overflow_p;
15333 strict_overflow_p = false;
15334 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15335 if (strict_overflow_p)
15336 fold_overflow_warning (("assuming signed overflow does not occur when "
15337 "determining that expression is always "
15338 "non-negative"),
15339 WARN_STRICT_OVERFLOW_MISC);
15340 return ret;
15344 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15345 For floating point we further ensure that T is not denormal.
15346 Similar logic is present in nonzero_address in rtlanal.h.
15348 If the return value is based on the assumption that signed overflow
15349 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15350 change *STRICT_OVERFLOW_P. */
15352 bool
15353 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15354 bool *strict_overflow_p)
15356 switch (code)
15358 case ABS_EXPR:
15359 return tree_expr_nonzero_warnv_p (op0,
15360 strict_overflow_p);
15362 case NOP_EXPR:
15364 tree inner_type = TREE_TYPE (op0);
15365 tree outer_type = type;
15367 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15368 && tree_expr_nonzero_warnv_p (op0,
15369 strict_overflow_p));
15371 break;
15373 case NON_LVALUE_EXPR:
15374 return tree_expr_nonzero_warnv_p (op0,
15375 strict_overflow_p);
15377 default:
15378 break;
15381 return false;
15384 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15385 For floating point we further ensure that T is not denormal.
15386 Similar logic is present in nonzero_address in rtlanal.h.
15388 If the return value is based on the assumption that signed overflow
15389 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15390 change *STRICT_OVERFLOW_P. */
15392 bool
15393 tree_binary_nonzero_warnv_p (enum tree_code code,
15394 tree type,
15395 tree op0,
15396 tree op1, bool *strict_overflow_p)
15398 bool sub_strict_overflow_p;
15399 switch (code)
15401 case POINTER_PLUS_EXPR:
15402 case PLUS_EXPR:
15403 if (TYPE_OVERFLOW_UNDEFINED (type))
15405 /* With the presence of negative values it is hard
15406 to say something. */
15407 sub_strict_overflow_p = false;
15408 if (!tree_expr_nonnegative_warnv_p (op0,
15409 &sub_strict_overflow_p)
15410 || !tree_expr_nonnegative_warnv_p (op1,
15411 &sub_strict_overflow_p))
15412 return false;
15413 /* One of operands must be positive and the other non-negative. */
15414 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15415 overflows, on a twos-complement machine the sum of two
15416 nonnegative numbers can never be zero. */
15417 return (tree_expr_nonzero_warnv_p (op0,
15418 strict_overflow_p)
15419 || tree_expr_nonzero_warnv_p (op1,
15420 strict_overflow_p));
15422 break;
15424 case MULT_EXPR:
15425 if (TYPE_OVERFLOW_UNDEFINED (type))
15427 if (tree_expr_nonzero_warnv_p (op0,
15428 strict_overflow_p)
15429 && tree_expr_nonzero_warnv_p (op1,
15430 strict_overflow_p))
15432 *strict_overflow_p = true;
15433 return true;
15436 break;
15438 case MIN_EXPR:
15439 sub_strict_overflow_p = false;
15440 if (tree_expr_nonzero_warnv_p (op0,
15441 &sub_strict_overflow_p)
15442 && tree_expr_nonzero_warnv_p (op1,
15443 &sub_strict_overflow_p))
15445 if (sub_strict_overflow_p)
15446 *strict_overflow_p = true;
15448 break;
15450 case MAX_EXPR:
15451 sub_strict_overflow_p = false;
15452 if (tree_expr_nonzero_warnv_p (op0,
15453 &sub_strict_overflow_p))
15455 if (sub_strict_overflow_p)
15456 *strict_overflow_p = true;
15458 /* When both operands are nonzero, then MAX must be too. */
15459 if (tree_expr_nonzero_warnv_p (op1,
15460 strict_overflow_p))
15461 return true;
15463 /* MAX where operand 0 is positive is positive. */
15464 return tree_expr_nonnegative_warnv_p (op0,
15465 strict_overflow_p);
15467 /* MAX where operand 1 is positive is positive. */
15468 else if (tree_expr_nonzero_warnv_p (op1,
15469 &sub_strict_overflow_p)
15470 && tree_expr_nonnegative_warnv_p (op1,
15471 &sub_strict_overflow_p))
15473 if (sub_strict_overflow_p)
15474 *strict_overflow_p = true;
15475 return true;
15477 break;
15479 case BIT_IOR_EXPR:
15480 return (tree_expr_nonzero_warnv_p (op1,
15481 strict_overflow_p)
15482 || tree_expr_nonzero_warnv_p (op0,
15483 strict_overflow_p));
15485 default:
15486 break;
15489 return false;
15492 /* Return true when T is an address and is known to be nonzero.
15493 For floating point we further ensure that T is not denormal.
15494 Similar logic is present in nonzero_address in rtlanal.h.
15496 If the return value is based on the assumption that signed overflow
15497 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15498 change *STRICT_OVERFLOW_P. */
15500 bool
15501 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15503 bool sub_strict_overflow_p;
15504 switch (TREE_CODE (t))
15506 case INTEGER_CST:
15507 return !integer_zerop (t);
15509 case ADDR_EXPR:
15511 tree base = get_base_address (TREE_OPERAND (t, 0));
15513 if (!base)
15514 return false;
15516 /* Weak declarations may link to NULL. Other things may also be NULL
15517 so protect with -fdelete-null-pointer-checks; but not variables
15518 allocated on the stack. */
15519 if (DECL_P (base)
15520 && (flag_delete_null_pointer_checks
15521 || (TREE_CODE (base) == VAR_DECL && !TREE_STATIC (base))))
15522 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15524 /* Constants are never weak. */
15525 if (CONSTANT_CLASS_P (base))
15526 return true;
15528 return false;
15531 case COND_EXPR:
15532 sub_strict_overflow_p = false;
15533 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15534 &sub_strict_overflow_p)
15535 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15536 &sub_strict_overflow_p))
15538 if (sub_strict_overflow_p)
15539 *strict_overflow_p = true;
15540 return true;
15542 break;
15544 default:
15545 break;
15547 return false;
15550 /* Return true when T is an address and is known to be nonzero.
15551 For floating point we further ensure that T is not denormal.
15552 Similar logic is present in nonzero_address in rtlanal.h.
15554 If the return value is based on the assumption that signed overflow
15555 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15556 change *STRICT_OVERFLOW_P. */
15558 bool
15559 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15561 tree type = TREE_TYPE (t);
15562 enum tree_code code;
15564 /* Doing something useful for floating point would need more work. */
15565 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15566 return false;
15568 code = TREE_CODE (t);
15569 switch (TREE_CODE_CLASS (code))
15571 case tcc_unary:
15572 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15573 strict_overflow_p);
15574 case tcc_binary:
15575 case tcc_comparison:
15576 return tree_binary_nonzero_warnv_p (code, type,
15577 TREE_OPERAND (t, 0),
15578 TREE_OPERAND (t, 1),
15579 strict_overflow_p);
15580 case tcc_constant:
15581 case tcc_declaration:
15582 case tcc_reference:
15583 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15585 default:
15586 break;
15589 switch (code)
15591 case TRUTH_NOT_EXPR:
15592 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15593 strict_overflow_p);
15595 case TRUTH_AND_EXPR:
15596 case TRUTH_OR_EXPR:
15597 case TRUTH_XOR_EXPR:
15598 return tree_binary_nonzero_warnv_p (code, type,
15599 TREE_OPERAND (t, 0),
15600 TREE_OPERAND (t, 1),
15601 strict_overflow_p);
15603 case COND_EXPR:
15604 case CONSTRUCTOR:
15605 case OBJ_TYPE_REF:
15606 case ASSERT_EXPR:
15607 case ADDR_EXPR:
15608 case WITH_SIZE_EXPR:
15609 case SSA_NAME:
15610 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15612 case COMPOUND_EXPR:
15613 case MODIFY_EXPR:
15614 case BIND_EXPR:
15615 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15616 strict_overflow_p);
15618 case SAVE_EXPR:
15619 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15620 strict_overflow_p);
15622 case CALL_EXPR:
15623 return alloca_call_p (t);
15625 default:
15626 break;
15628 return false;
15631 /* Return true when T is an address and is known to be nonzero.
15632 Handle warnings about undefined signed overflow. */
15634 bool
15635 tree_expr_nonzero_p (tree t)
15637 bool ret, strict_overflow_p;
15639 strict_overflow_p = false;
15640 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15641 if (strict_overflow_p)
15642 fold_overflow_warning (("assuming signed overflow does not occur when "
15643 "determining that expression is always "
15644 "non-zero"),
15645 WARN_STRICT_OVERFLOW_MISC);
15646 return ret;
15649 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15650 attempt to fold the expression to a constant without modifying TYPE,
15651 OP0 or OP1.
15653 If the expression could be simplified to a constant, then return
15654 the constant. If the expression would not be simplified to a
15655 constant, then return NULL_TREE. */
15657 tree
15658 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15660 tree tem = fold_binary (code, type, op0, op1);
15661 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15664 /* Given the components of a unary expression CODE, TYPE and OP0,
15665 attempt to fold the expression to a constant without modifying
15666 TYPE or OP0.
15668 If the expression could be simplified to a constant, then return
15669 the constant. If the expression would not be simplified to a
15670 constant, then return NULL_TREE. */
15672 tree
15673 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15675 tree tem = fold_unary (code, type, op0);
15676 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15679 /* If EXP represents referencing an element in a constant string
15680 (either via pointer arithmetic or array indexing), return the
15681 tree representing the value accessed, otherwise return NULL. */
15683 tree
15684 fold_read_from_constant_string (tree exp)
15686 if ((TREE_CODE (exp) == INDIRECT_REF
15687 || TREE_CODE (exp) == ARRAY_REF)
15688 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15690 tree exp1 = TREE_OPERAND (exp, 0);
15691 tree index;
15692 tree string;
15693 location_t loc = EXPR_LOCATION (exp);
15695 if (TREE_CODE (exp) == INDIRECT_REF)
15696 string = string_constant (exp1, &index);
15697 else
15699 tree low_bound = array_ref_low_bound (exp);
15700 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15702 /* Optimize the special-case of a zero lower bound.
15704 We convert the low_bound to sizetype to avoid some problems
15705 with constant folding. (E.g. suppose the lower bound is 1,
15706 and its mode is QI. Without the conversion,l (ARRAY
15707 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15708 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15709 if (! integer_zerop (low_bound))
15710 index = size_diffop_loc (loc, index,
15711 fold_convert_loc (loc, sizetype, low_bound));
15713 string = exp1;
15716 if (string
15717 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15718 && TREE_CODE (string) == STRING_CST
15719 && TREE_CODE (index) == INTEGER_CST
15720 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15721 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15722 == MODE_INT)
15723 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15724 return build_int_cst_type (TREE_TYPE (exp),
15725 (TREE_STRING_POINTER (string)
15726 [TREE_INT_CST_LOW (index)]));
15728 return NULL;
15731 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15732 an integer constant, real, or fixed-point constant.
15734 TYPE is the type of the result. */
15736 static tree
15737 fold_negate_const (tree arg0, tree type)
15739 tree t = NULL_TREE;
15741 switch (TREE_CODE (arg0))
15743 case INTEGER_CST:
15745 unsigned HOST_WIDE_INT low;
15746 HOST_WIDE_INT high;
15747 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15748 TREE_INT_CST_HIGH (arg0),
15749 &low, &high);
15750 t = force_fit_type_double (type, low, high, 1,
15751 (overflow | TREE_OVERFLOW (arg0))
15752 && !TYPE_UNSIGNED (type));
15753 break;
15756 case REAL_CST:
15757 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15758 break;
15760 case FIXED_CST:
15762 FIXED_VALUE_TYPE f;
15763 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15764 &(TREE_FIXED_CST (arg0)), NULL,
15765 TYPE_SATURATING (type));
15766 t = build_fixed (type, f);
15767 /* Propagate overflow flags. */
15768 if (overflow_p | TREE_OVERFLOW (arg0))
15769 TREE_OVERFLOW (t) = 1;
15770 break;
15773 default:
15774 gcc_unreachable ();
15777 return t;
15780 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15781 an integer constant or real constant.
15783 TYPE is the type of the result. */
15785 tree
15786 fold_abs_const (tree arg0, tree type)
15788 tree t = NULL_TREE;
15790 switch (TREE_CODE (arg0))
15792 case INTEGER_CST:
15793 /* If the value is unsigned, then the absolute value is
15794 the same as the ordinary value. */
15795 if (TYPE_UNSIGNED (type))
15796 t = arg0;
15797 /* Similarly, if the value is non-negative. */
15798 else if (INT_CST_LT (integer_minus_one_node, arg0))
15799 t = arg0;
15800 /* If the value is negative, then the absolute value is
15801 its negation. */
15802 else
15804 unsigned HOST_WIDE_INT low;
15805 HOST_WIDE_INT high;
15806 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15807 TREE_INT_CST_HIGH (arg0),
15808 &low, &high);
15809 t = force_fit_type_double (type, low, high, -1,
15810 overflow | TREE_OVERFLOW (arg0));
15812 break;
15814 case REAL_CST:
15815 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15816 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15817 else
15818 t = arg0;
15819 break;
15821 default:
15822 gcc_unreachable ();
15825 return t;
15828 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15829 constant. TYPE is the type of the result. */
15831 static tree
15832 fold_not_const (tree arg0, tree type)
15834 tree t = NULL_TREE;
15836 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15838 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15839 ~TREE_INT_CST_HIGH (arg0), 0,
15840 TREE_OVERFLOW (arg0));
15842 return t;
15845 /* Given CODE, a relational operator, the target type, TYPE and two
15846 constant operands OP0 and OP1, return the result of the
15847 relational operation. If the result is not a compile time
15848 constant, then return NULL_TREE. */
15850 static tree
15851 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15853 int result, invert;
15855 /* From here on, the only cases we handle are when the result is
15856 known to be a constant. */
15858 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15860 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15861 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15863 /* Handle the cases where either operand is a NaN. */
15864 if (real_isnan (c0) || real_isnan (c1))
15866 switch (code)
15868 case EQ_EXPR:
15869 case ORDERED_EXPR:
15870 result = 0;
15871 break;
15873 case NE_EXPR:
15874 case UNORDERED_EXPR:
15875 case UNLT_EXPR:
15876 case UNLE_EXPR:
15877 case UNGT_EXPR:
15878 case UNGE_EXPR:
15879 case UNEQ_EXPR:
15880 result = 1;
15881 break;
15883 case LT_EXPR:
15884 case LE_EXPR:
15885 case GT_EXPR:
15886 case GE_EXPR:
15887 case LTGT_EXPR:
15888 if (flag_trapping_math)
15889 return NULL_TREE;
15890 result = 0;
15891 break;
15893 default:
15894 gcc_unreachable ();
15897 return constant_boolean_node (result, type);
15900 return constant_boolean_node (real_compare (code, c0, c1), type);
15903 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15905 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15906 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15907 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15910 /* Handle equality/inequality of complex constants. */
15911 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15913 tree rcond = fold_relational_const (code, type,
15914 TREE_REALPART (op0),
15915 TREE_REALPART (op1));
15916 tree icond = fold_relational_const (code, type,
15917 TREE_IMAGPART (op0),
15918 TREE_IMAGPART (op1));
15919 if (code == EQ_EXPR)
15920 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15921 else if (code == NE_EXPR)
15922 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15923 else
15924 return NULL_TREE;
15927 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15929 To compute GT, swap the arguments and do LT.
15930 To compute GE, do LT and invert the result.
15931 To compute LE, swap the arguments, do LT and invert the result.
15932 To compute NE, do EQ and invert the result.
15934 Therefore, the code below must handle only EQ and LT. */
15936 if (code == LE_EXPR || code == GT_EXPR)
15938 tree tem = op0;
15939 op0 = op1;
15940 op1 = tem;
15941 code = swap_tree_comparison (code);
15944 /* Note that it is safe to invert for real values here because we
15945 have already handled the one case that it matters. */
15947 invert = 0;
15948 if (code == NE_EXPR || code == GE_EXPR)
15950 invert = 1;
15951 code = invert_tree_comparison (code, false);
15954 /* Compute a result for LT or EQ if args permit;
15955 Otherwise return T. */
15956 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15958 if (code == EQ_EXPR)
15959 result = tree_int_cst_equal (op0, op1);
15960 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15961 result = INT_CST_LT_UNSIGNED (op0, op1);
15962 else
15963 result = INT_CST_LT (op0, op1);
15965 else
15966 return NULL_TREE;
15968 if (invert)
15969 result ^= 1;
15970 return constant_boolean_node (result, type);
15973 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15974 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15975 itself. */
15977 tree
15978 fold_build_cleanup_point_expr (tree type, tree expr)
15980 /* If the expression does not have side effects then we don't have to wrap
15981 it with a cleanup point expression. */
15982 if (!TREE_SIDE_EFFECTS (expr))
15983 return expr;
15985 /* If the expression is a return, check to see if the expression inside the
15986 return has no side effects or the right hand side of the modify expression
15987 inside the return. If either don't have side effects set we don't need to
15988 wrap the expression in a cleanup point expression. Note we don't check the
15989 left hand side of the modify because it should always be a return decl. */
15990 if (TREE_CODE (expr) == RETURN_EXPR)
15992 tree op = TREE_OPERAND (expr, 0);
15993 if (!op || !TREE_SIDE_EFFECTS (op))
15994 return expr;
15995 op = TREE_OPERAND (op, 1);
15996 if (!TREE_SIDE_EFFECTS (op))
15997 return expr;
16000 return build1 (CLEANUP_POINT_EXPR, type, expr);
16003 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16004 of an indirection through OP0, or NULL_TREE if no simplification is
16005 possible. */
16007 tree
16008 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16010 tree sub = op0;
16011 tree subtype;
16013 STRIP_NOPS (sub);
16014 subtype = TREE_TYPE (sub);
16015 if (!POINTER_TYPE_P (subtype))
16016 return NULL_TREE;
16018 if (TREE_CODE (sub) == ADDR_EXPR)
16020 tree op = TREE_OPERAND (sub, 0);
16021 tree optype = TREE_TYPE (op);
16022 /* *&CONST_DECL -> to the value of the const decl. */
16023 if (TREE_CODE (op) == CONST_DECL)
16024 return DECL_INITIAL (op);
16025 /* *&p => p; make sure to handle *&"str"[cst] here. */
16026 if (type == optype)
16028 tree fop = fold_read_from_constant_string (op);
16029 if (fop)
16030 return fop;
16031 else
16032 return op;
16034 /* *(foo *)&fooarray => fooarray[0] */
16035 else if (TREE_CODE (optype) == ARRAY_TYPE
16036 && type == TREE_TYPE (optype))
16038 tree type_domain = TYPE_DOMAIN (optype);
16039 tree min_val = size_zero_node;
16040 if (type_domain && TYPE_MIN_VALUE (type_domain))
16041 min_val = TYPE_MIN_VALUE (type_domain);
16042 op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
16043 SET_EXPR_LOCATION (op0, loc);
16044 return op0;
16046 /* *(foo *)&complexfoo => __real__ complexfoo */
16047 else if (TREE_CODE (optype) == COMPLEX_TYPE
16048 && type == TREE_TYPE (optype))
16049 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16050 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16051 else if (TREE_CODE (optype) == VECTOR_TYPE
16052 && type == TREE_TYPE (optype))
16054 tree part_width = TYPE_SIZE (type);
16055 tree index = bitsize_int (0);
16056 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16060 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16061 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16062 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16064 tree op00 = TREE_OPERAND (sub, 0);
16065 tree op01 = TREE_OPERAND (sub, 1);
16066 tree op00type;
16068 STRIP_NOPS (op00);
16069 op00type = TREE_TYPE (op00);
16070 if (TREE_CODE (op00) == ADDR_EXPR
16071 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
16072 && type == TREE_TYPE (TREE_TYPE (op00type)))
16074 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16075 tree part_width = TYPE_SIZE (type);
16076 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16077 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16078 tree index = bitsize_int (indexi);
16080 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
16081 return fold_build3_loc (loc,
16082 BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
16083 part_width, index);
16089 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16090 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16091 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16093 tree op00 = TREE_OPERAND (sub, 0);
16094 tree op01 = TREE_OPERAND (sub, 1);
16095 tree op00type;
16097 STRIP_NOPS (op00);
16098 op00type = TREE_TYPE (op00);
16099 if (TREE_CODE (op00) == ADDR_EXPR
16100 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
16101 && type == TREE_TYPE (TREE_TYPE (op00type)))
16103 tree size = TYPE_SIZE_UNIT (type);
16104 if (tree_int_cst_equal (size, op01))
16105 return fold_build1_loc (loc, IMAGPART_EXPR, type,
16106 TREE_OPERAND (op00, 0));
16110 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16111 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16112 && type == TREE_TYPE (TREE_TYPE (subtype)))
16114 tree type_domain;
16115 tree min_val = size_zero_node;
16116 sub = build_fold_indirect_ref_loc (loc, sub);
16117 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16118 if (type_domain && TYPE_MIN_VALUE (type_domain))
16119 min_val = TYPE_MIN_VALUE (type_domain);
16120 op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
16121 SET_EXPR_LOCATION (op0, loc);
16122 return op0;
16125 return NULL_TREE;
16128 /* Builds an expression for an indirection through T, simplifying some
16129 cases. */
16131 tree
16132 build_fold_indirect_ref_loc (location_t loc, tree t)
16134 tree type = TREE_TYPE (TREE_TYPE (t));
16135 tree sub = fold_indirect_ref_1 (loc, type, t);
16137 if (sub)
16138 return sub;
16140 t = build1 (INDIRECT_REF, type, t);
16141 SET_EXPR_LOCATION (t, loc);
16142 return t;
16145 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16147 tree
16148 fold_indirect_ref_loc (location_t loc, tree t)
16150 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16152 if (sub)
16153 return sub;
16154 else
16155 return t;
16158 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16159 whose result is ignored. The type of the returned tree need not be
16160 the same as the original expression. */
16162 tree
16163 fold_ignored_result (tree t)
16165 if (!TREE_SIDE_EFFECTS (t))
16166 return integer_zero_node;
16168 for (;;)
16169 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16171 case tcc_unary:
16172 t = TREE_OPERAND (t, 0);
16173 break;
16175 case tcc_binary:
16176 case tcc_comparison:
16177 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16178 t = TREE_OPERAND (t, 0);
16179 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16180 t = TREE_OPERAND (t, 1);
16181 else
16182 return t;
16183 break;
16185 case tcc_expression:
16186 switch (TREE_CODE (t))
16188 case COMPOUND_EXPR:
16189 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16190 return t;
16191 t = TREE_OPERAND (t, 0);
16192 break;
16194 case COND_EXPR:
16195 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16196 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16197 return t;
16198 t = TREE_OPERAND (t, 0);
16199 break;
16201 default:
16202 return t;
16204 break;
16206 default:
16207 return t;
16211 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16212 This can only be applied to objects of a sizetype. */
16214 tree
16215 round_up_loc (location_t loc, tree value, int divisor)
16217 tree div = NULL_TREE;
16219 gcc_assert (divisor > 0);
16220 if (divisor == 1)
16221 return value;
16223 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16224 have to do anything. Only do this when we are not given a const,
16225 because in that case, this check is more expensive than just
16226 doing it. */
16227 if (TREE_CODE (value) != INTEGER_CST)
16229 div = build_int_cst (TREE_TYPE (value), divisor);
16231 if (multiple_of_p (TREE_TYPE (value), value, div))
16232 return value;
16235 /* If divisor is a power of two, simplify this to bit manipulation. */
16236 if (divisor == (divisor & -divisor))
16238 if (TREE_CODE (value) == INTEGER_CST)
16240 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
16241 unsigned HOST_WIDE_INT high;
16242 bool overflow_p;
16244 if ((low & (divisor - 1)) == 0)
16245 return value;
16247 overflow_p = TREE_OVERFLOW (value);
16248 high = TREE_INT_CST_HIGH (value);
16249 low &= ~(divisor - 1);
16250 low += divisor;
16251 if (low == 0)
16253 high++;
16254 if (high == 0)
16255 overflow_p = true;
16258 return force_fit_type_double (TREE_TYPE (value), low, high,
16259 -1, overflow_p);
16261 else
16263 tree t;
16265 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16266 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16267 t = build_int_cst (TREE_TYPE (value), -divisor);
16268 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16271 else
16273 if (!div)
16274 div = build_int_cst (TREE_TYPE (value), divisor);
16275 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16276 value = size_binop_loc (loc, MULT_EXPR, value, div);
16279 return value;
16282 /* Likewise, but round down. */
16284 tree
16285 round_down_loc (location_t loc, tree value, int divisor)
16287 tree div = NULL_TREE;
16289 gcc_assert (divisor > 0);
16290 if (divisor == 1)
16291 return value;
16293 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16294 have to do anything. Only do this when we are not given a const,
16295 because in that case, this check is more expensive than just
16296 doing it. */
16297 if (TREE_CODE (value) != INTEGER_CST)
16299 div = build_int_cst (TREE_TYPE (value), divisor);
16301 if (multiple_of_p (TREE_TYPE (value), value, div))
16302 return value;
16305 /* If divisor is a power of two, simplify this to bit manipulation. */
16306 if (divisor == (divisor & -divisor))
16308 tree t;
16310 t = build_int_cst (TREE_TYPE (value), -divisor);
16311 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16313 else
16315 if (!div)
16316 div = build_int_cst (TREE_TYPE (value), divisor);
16317 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16318 value = size_binop_loc (loc, MULT_EXPR, value, div);
16321 return value;
16324 /* Returns the pointer to the base of the object addressed by EXP and
16325 extracts the information about the offset of the access, storing it
16326 to PBITPOS and POFFSET. */
16328 static tree
16329 split_address_to_core_and_offset (tree exp,
16330 HOST_WIDE_INT *pbitpos, tree *poffset)
16332 tree core;
16333 enum machine_mode mode;
16334 int unsignedp, volatilep;
16335 HOST_WIDE_INT bitsize;
16336 location_t loc = EXPR_LOCATION (exp);
16338 if (TREE_CODE (exp) == ADDR_EXPR)
16340 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16341 poffset, &mode, &unsignedp, &volatilep,
16342 false);
16343 core = build_fold_addr_expr_loc (loc, core);
16345 else
16347 core = exp;
16348 *pbitpos = 0;
16349 *poffset = NULL_TREE;
16352 return core;
16355 /* Returns true if addresses of E1 and E2 differ by a constant, false
16356 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16358 bool
16359 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16361 tree core1, core2;
16362 HOST_WIDE_INT bitpos1, bitpos2;
16363 tree toffset1, toffset2, tdiff, type;
16365 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16366 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16368 if (bitpos1 % BITS_PER_UNIT != 0
16369 || bitpos2 % BITS_PER_UNIT != 0
16370 || !operand_equal_p (core1, core2, 0))
16371 return false;
16373 if (toffset1 && toffset2)
16375 type = TREE_TYPE (toffset1);
16376 if (type != TREE_TYPE (toffset2))
16377 toffset2 = fold_convert (type, toffset2);
16379 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16380 if (!cst_and_fits_in_hwi (tdiff))
16381 return false;
16383 *diff = int_cst_value (tdiff);
16385 else if (toffset1 || toffset2)
16387 /* If only one of the offsets is non-constant, the difference cannot
16388 be a constant. */
16389 return false;
16391 else
16392 *diff = 0;
16394 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16395 return true;
16398 /* Simplify the floating point expression EXP when the sign of the
16399 result is not significant. Return NULL_TREE if no simplification
16400 is possible. */
16402 tree
16403 fold_strip_sign_ops (tree exp)
16405 tree arg0, arg1;
16406 location_t loc = EXPR_LOCATION (exp);
16408 switch (TREE_CODE (exp))
16410 case ABS_EXPR:
16411 case NEGATE_EXPR:
16412 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16413 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16415 case MULT_EXPR:
16416 case RDIV_EXPR:
16417 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16418 return NULL_TREE;
16419 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16420 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16421 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16422 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16423 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16424 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16425 break;
16427 case COMPOUND_EXPR:
16428 arg0 = TREE_OPERAND (exp, 0);
16429 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16430 if (arg1)
16431 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16432 break;
16434 case COND_EXPR:
16435 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16436 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16437 if (arg0 || arg1)
16438 return fold_build3_loc (loc,
16439 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16440 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16441 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16442 break;
16444 case CALL_EXPR:
16446 const enum built_in_function fcode = builtin_mathfn_code (exp);
16447 switch (fcode)
16449 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16450 /* Strip copysign function call, return the 1st argument. */
16451 arg0 = CALL_EXPR_ARG (exp, 0);
16452 arg1 = CALL_EXPR_ARG (exp, 1);
16453 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16455 default:
16456 /* Strip sign ops from the argument of "odd" math functions. */
16457 if (negate_mathfn_p (fcode))
16459 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16460 if (arg0)
16461 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16463 break;
16466 break;
16468 default:
16469 break;
16471 return NULL_TREE;